org.apache.hadoop.fs.s3.S3Exception#org.jets3t.service.impl.rest.httpclient.RestS3Service源码实例Demo

下面列出了org.apache.hadoop.fs.s3.S3Exception#org.jets3t.service.impl.rest.httpclient.RestS3Service 实例代码,或者点击链接到github查看源代码,也可以在右侧发表评论。

源代码1 项目: hadoop   文件: Jets3tNativeFileSystemStore.java
@Override
public void initialize(URI uri, Configuration conf) throws IOException {
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    handleException(e);
  }
  multipartEnabled =
      conf.getBoolean("fs.s3n.multipart.uploads.enabled", false);
  multipartBlockSize = Math.min(
      conf.getLong("fs.s3n.multipart.uploads.block.size", 64 * 1024 * 1024),
      MAX_PART_SIZE);
  multipartCopyBlockSize = Math.min(
      conf.getLong("fs.s3n.multipart.copy.block.size", MAX_PART_SIZE),
      MAX_PART_SIZE);
  serverSideEncryptionAlgorithm = conf.get("fs.s3n.server-side-encryption-algorithm");

  bucket = new S3Bucket(uri.getHost());
}
 
源代码2 项目: hadoop   文件: Jets3tFileSystemStore.java
@Override
public void initialize(URI uri, Configuration conf) throws IOException {
  
  this.conf = conf;
  
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());

  this.bufferSize = conf.getInt(
                     S3FileSystemConfigKeys.S3_STREAM_BUFFER_SIZE_KEY,
                     S3FileSystemConfigKeys.S3_STREAM_BUFFER_SIZE_DEFAULT
      );
}
 
源代码3 项目: big-c   文件: Jets3tNativeFileSystemStore.java
@Override
public void initialize(URI uri, Configuration conf) throws IOException {
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    handleException(e);
  }
  multipartEnabled =
      conf.getBoolean("fs.s3n.multipart.uploads.enabled", false);
  multipartBlockSize = Math.min(
      conf.getLong("fs.s3n.multipart.uploads.block.size", 64 * 1024 * 1024),
      MAX_PART_SIZE);
  multipartCopyBlockSize = Math.min(
      conf.getLong("fs.s3n.multipart.copy.block.size", MAX_PART_SIZE),
      MAX_PART_SIZE);
  serverSideEncryptionAlgorithm = conf.get("fs.s3n.server-side-encryption-algorithm");

  bucket = new S3Bucket(uri.getHost());
}
 
源代码4 项目: big-c   文件: Jets3tFileSystemStore.java
@Override
public void initialize(URI uri, Configuration conf) throws IOException {
  
  this.conf = conf;
  
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());

  this.bufferSize = conf.getInt(
                     S3FileSystemConfigKeys.S3_STREAM_BUFFER_SIZE_KEY,
                     S3FileSystemConfigKeys.S3_STREAM_BUFFER_SIZE_DEFAULT
      );
}
 
源代码5 项目: red5-examples   文件: S3FilenameGenerator.java
public static List<String> getBucketList() {
	logger.debug("Get the bucket list");
	List<String> bucketList = new ArrayList<String>(3);
	try {
		S3Service s3Service = new RestS3Service(awsCredentials); 
		S3Bucket[] buckets = s3Service.listAllBuckets();
		for (S3Bucket bucket: buckets) {
			logger.debug("Bucket: {}", bucket.getName());
			bucketList.add(bucket.getName());
		}
		logger.debug("Bucket count: {}", buckets.length);
	} catch (S3ServiceException e) {
		logger.error("Error during bucket listing", e);
	}
	return bucketList;
}
 
源代码6 项目: RDFS   文件: Jets3tNativeFileSystemStore.java
public void initialize(URI uri, Configuration conf) throws IOException {
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());
}
 
源代码7 项目: RDFS   文件: Jets3tFileSystemStore.java
public void initialize(URI uri, Configuration conf) throws IOException {
  
  this.conf = conf;
  
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());

  this.bufferSize = conf.getInt("io.file.buffer.size", 4096);
}
 
源代码8 项目: suro   文件: TestGrantAcl.java
@Test
public void test() throws Exception {
    RestS3Service s3Service = mock(RestS3Service.class);
    AccessControlList acl = new AccessControlList();
    doReturn(acl).when(s3Service).getObjectAcl("bucket", "key");
    doNothing().when(s3Service).putObjectAcl("bucket", "key", acl);

    GrantAcl grantAcl = new GrantAcl(s3Service, "1,2,3", 1);
    S3Object obj = new S3Object("key");
    obj.setBucketName("bucket");
    obj.setAcl(GSAccessControlList.REST_CANNED_BUCKET_OWNER_FULL_CONTROL);
    assertTrue(grantAcl.grantAcl(obj));

    Set<GrantAndPermission> grants = new HashSet<GrantAndPermission>(Arrays.asList(acl.getGrantAndPermissions()));
    assertEquals(grants.size(), 3);
    Set<GrantAndPermission> grantSet = new HashSet<GrantAndPermission>();
    for (int i = 1; i <= 3; ++i) {
        grantSet.add(new GrantAndPermission(new CanonicalGrantee(Integer.toString(i)), Permission.PERMISSION_READ));
    }
}
 
源代码9 项目: hadoop-gpu   文件: Jets3tNativeFileSystemStore.java
public void initialize(URI uri, Configuration conf) throws IOException {
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());
}
 
源代码10 项目: hadoop-gpu   文件: Jets3tFileSystemStore.java
public void initialize(URI uri, Configuration conf) throws IOException {
  
  this.conf = conf;
  
  S3Credentials s3Credentials = new S3Credentials();
  s3Credentials.initialize(uri, conf);
  try {
    AWSCredentials awsCredentials =
      new AWSCredentials(s3Credentials.getAccessKey(),
          s3Credentials.getSecretAccessKey());
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());

  this.bufferSize = conf.getInt("io.file.buffer.size", 4096);
}
 
源代码11 项目: cyberduck   文件: S3PresignedUrlProvider.java
/**
 * Generates a signed URL string that will grant access to an S3 resource (bucket or object) to whoever uses the URL
 * up until the time specified.
 *
 * @param host   Hostname
 * @param bucket the name of the bucket to include in the URL, must be a valid bucket name.
 * @param key    the name of the object to include in the URL, if null only the bucket name is used.
 * @param method HTTP method
 * @param expiry Milliseconds
 * @return a URL signed in such a way as to grant access to an S3 resource to whoever uses it.
 */
public String create(final Host host, final String user, final String secret,
                     final String bucket, String region, final String key,
                     final String method, final long expiry) {
    final S3Protocol.AuthenticationHeaderSignatureVersion signature;
    if(StringUtils.isBlank(region)) {
        // Only for AWS
        if(S3Session.isAwsHostname(host.getHostname())) {
            // Region is required for AWS4-HMAC-SHA256 signature
            region = "us-east-1";
            signature = S3Protocol.AuthenticationHeaderSignatureVersion.getDefault(host.getProtocol());
        }
        else {
            signature = S3Protocol.AuthenticationHeaderSignatureVersion.AWS2;
        }
    }
    else {
        // Only for AWS
        if(S3Session.isAwsHostname(host.getHostname())) {
            // Region is required for AWS4-HMAC-SHA256 signature
            signature = S3Protocol.AuthenticationHeaderSignatureVersion.getDefault(host.getProtocol());
        }
        else {
            signature = S3Protocol.AuthenticationHeaderSignatureVersion.AWS2;
        }
    }
    return new RestS3Service(new AWSCredentials(StringUtils.strip(user), StringUtils.strip(secret))) {
        @Override
        public String getEndpoint() {
            return host.getHostname();
        }
    }.createSignedUrlUsingSignatureVersion(
        signature.toString(),
        region, method, bucket, key, null, null, expiry / 1000, false, true, false);
}
 
源代码12 项目: cc-warc-examples   文件: S3ReaderTest.java
public static void main(String[] args) throws IOException, S3ServiceException {
	// We're accessing a publicly available bucket so don't need to fill in our credentials
	S3Service s3s = new RestS3Service(null);
	
	// Let's grab a file out of the CommonCrawl S3 bucket
	String fn = "common-crawl/crawl-data/CC-MAIN-2013-48/segments/1386163035819/warc/CC-MAIN-20131204131715-00000-ip-10-33-133-15.ec2.internal.warc.gz";
	S3Object f = s3s.getObject("aws-publicdatasets", fn, null, null, null, null, null, null);
	
	// The file name identifies the ArchiveReader and indicates if it should be decompressed
	ArchiveReader ar = WARCReaderFactory.get(fn, f.getDataInputStream(), true);
	
	// Once we have an ArchiveReader, we can work through each of the records it contains
	int i = 0;
	for(ArchiveRecord r : ar) {
		// The header file contains information such as the type of record, size, creation time, and URL
		System.out.println("Header: " + r.getHeader());
		System.out.println("URL: " + r.getHeader().getUrl());
		System.out.println();
		
		// If we want to read the contents of the record, we can use the ArchiveRecord as an InputStream
		// Create a byte array that is as long as all the record's stated length
		byte[] rawData = new byte[r.available()];
		r.read(rawData);
		// Note: potential optimization would be to have a large buffer only allocated once
		
		// Why don't we convert it to a string and print the start of it? Let's hope it's text!
		String content = new String(rawData);
		System.out.println(content.substring(0, Math.min(500, content.length())));
		System.out.println((content.length() > 500 ? "..." : ""));
		
		// Pretty printing to make the output more readable 
		System.out.println("=-=-=-=-=-=-=-=-=");
		if (i++ > 4) break; 
	}
}
 
源代码13 项目: red5-examples   文件: S3FilenameGenerator.java
public static void createBucket() {
	logger.debug("Create bucket");
	try {
		S3Service s3Service = new RestS3Service(awsCredentials);
		S3Bucket bucket = s3Service.createBucket(bucketName);
		logger.debug("Created bucket: {}", bucket.getName());
	} catch (S3ServiceException e) {
		logger.error("Error creating bucket", e);
	}		
}
 
源代码14 项目: red5-examples   文件: S3FilenameGenerator.java
public static void upload(String sessionId, String name) {
	logger.debug("Upload - session id: {} name: {}", sessionId, name);
	try {
		// find the file
		StringBuilder sb = new StringBuilder(recordPath);
		sb.append(sessionId);
		sb.append('/');
		sb.append(name);
		sb.append(".flv");
		String filePath = sb.toString();
		logger.debug("File path: {}", filePath);
		File file = new File(filePath);
		if (file.exists()) {
			S3Service s3Service = new RestS3Service(awsCredentials);
			S3Bucket bucket = s3Service.createBucket(bucketName);
			S3Object sob = new S3Object(sessionId + "/" + name + ".flv");
			// force bucket name
			sob.setBucketName(bucketName);
			// point at file
			sob.setDataInputFile(file);
			// set type
			sob.setContentType("video/x-flv");
			// set auth / acl
			sob.setAcl(AccessControlList.REST_CANNED_PUBLIC_READ);				
			logger.debug("Pre-upload: {}", sob);
			sob = s3Service.putObject(bucket, sob);
			logger.debug("Post-upload: {}", sob);						
		} else {
			logger.warn("File was not found");
		}
		file = null;
	} catch (S3ServiceException e) {
		logger.error("Error during upload", e);
	}		
}
 
源代码15 项目: tutorials   文件: JetS3tLiveTest.java
@BeforeClass
public static void connectS3() throws Exception {

    // Replace with your keys
    String awsAccessKey = "your access key";
    String awsSecretKey = "your secret key";

    // Create credentials
    AWSCredentials awsCredentials = new AWSCredentials(awsAccessKey, awsSecretKey);

    // Create service
    s3Service = new RestS3Service(awsCredentials);
}
 
源代码16 项目: tutorials   文件: JetS3tLiveTest.java
@BeforeClass
public static void connectS3() throws Exception {

    // Replace with your keys
    String awsAccessKey = "your access key";
    String awsSecretKey = "your secret key";

    // Create credentials
    AWSCredentials awsCredentials = new AWSCredentials(awsAccessKey, awsSecretKey);

    // Create service
    s3Service = new RestS3Service(awsCredentials);
}
 
源代码17 项目: suro   文件: S3Consumer.java
@JsonCreator
public S3Consumer(
        @JsonProperty("id") String id,
        @JsonProperty("s3Endpoint") String s3Endpoint,
        @JsonProperty("notice") Notice notice,
        @JsonProperty("recvTimeout") long timeout,
        @JsonProperty("concurrentDownload") int concurrentDownload,
        @JsonProperty("downloadPath") String downloadPath,
        @JsonProperty("recordParser") RecordParser recordParser,
        @JacksonInject AWSCredentialsProvider credentialProvider,
        @JacksonInject MessageRouter router,
        @JacksonInject ObjectMapper jsonMapper,
        @JacksonInject RestS3Service s3Service
) {
    this.id = id;
    this.s3Endpoint = s3Endpoint == null ? "s3.amazonaws.com" : s3Endpoint;
    this.notice = notice;
    this.timeout = timeout == 0 ? 1000 : timeout;
    this.concurrentDownload = concurrentDownload == 0 ? 5 : concurrentDownload;
    this.recordParser = recordParser;
    this.downloadPath = downloadPath == null ? "/logs/suro-s3consumer/" + id : downloadPath;

    this.credentialsProvider = credentialProvider;
    this.router = router;
    this.jsonMapper = jsonMapper;

    this.s3Service = s3Service;

    Preconditions.checkNotNull(notice, "notice is needed");
    Preconditions.checkNotNull(recordParser, "recordParser is needed");
}
 
源代码18 项目: hadoop   文件: MigrationTool.java
public void initialize(URI uri) throws IOException {
  
  
  
  try {
    String accessKey = null;
    String secretAccessKey = null;
    String userInfo = uri.getUserInfo();
    if (userInfo != null) {
      int index = userInfo.indexOf(':');
      if (index != -1) {
        accessKey = userInfo.substring(0, index);
        secretAccessKey = userInfo.substring(index + 1);
      } else {
        accessKey = userInfo;
      }
    }
    if (accessKey == null) {
      accessKey = getConf().get("fs.s3.awsAccessKeyId");
    }
    if (secretAccessKey == null) {
      secretAccessKey = getConf().get("fs.s3.awsSecretAccessKey");
    }
    if (accessKey == null && secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID and Secret Access Key " +
                                         "must be specified as the username " +
                                         "or password (respectively) of a s3 URL, " +
                                         "or by setting the " +
                                         "fs.s3.awsAccessKeyId or " +                         
                                         "fs.s3.awsSecretAccessKey properties (respectively).");
    } else if (accessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID must be specified " +
                                         "as the username of a s3 URL, or by setting the " +
                                         "fs.s3.awsAccessKeyId property.");
    } else if (secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Secret Access Key must be specified " +
                                         "as the password of a s3 URL, or by setting the " +
                                         "fs.s3.awsSecretAccessKey property.");         
    }
    AWSCredentials awsCredentials =
      new AWSCredentials(accessKey, secretAccessKey);
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());
}
 
源代码19 项目: big-c   文件: MigrationTool.java
public void initialize(URI uri) throws IOException {
  
  
  
  try {
    String accessKey = null;
    String secretAccessKey = null;
    String userInfo = uri.getUserInfo();
    if (userInfo != null) {
      int index = userInfo.indexOf(':');
      if (index != -1) {
        accessKey = userInfo.substring(0, index);
        secretAccessKey = userInfo.substring(index + 1);
      } else {
        accessKey = userInfo;
      }
    }
    if (accessKey == null) {
      accessKey = getConf().get("fs.s3.awsAccessKeyId");
    }
    if (secretAccessKey == null) {
      secretAccessKey = getConf().get("fs.s3.awsSecretAccessKey");
    }
    if (accessKey == null && secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID and Secret Access Key " +
                                         "must be specified as the username " +
                                         "or password (respectively) of a s3 URL, " +
                                         "or by setting the " +
                                         "fs.s3.awsAccessKeyId or " +                         
                                         "fs.s3.awsSecretAccessKey properties (respectively).");
    } else if (accessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID must be specified " +
                                         "as the username of a s3 URL, or by setting the " +
                                         "fs.s3.awsAccessKeyId property.");
    } else if (secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Secret Access Key must be specified " +
                                         "as the password of a s3 URL, or by setting the " +
                                         "fs.s3.awsSecretAccessKey property.");         
    }
    AWSCredentials awsCredentials =
      new AWSCredentials(accessKey, secretAccessKey);
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());
}
 
源代码20 项目: lumongo   文件: IndexCommonCrawl.java
public static void main(String[] args) throws Exception {
	
	if (args.length != 4) {
		System.err.println("usage: awsPropertiesFile prefix lumongoServers indexName");
		System.err.println("usage: aws.properties 2010/09/25/9 10.0.0.1,10.0.0.2 ccrawl");
		System.exit(1);
	}
	
	LogUtil.loadLogConfig();
	
	String propFileName = args[0];
	String prefix = args[1];
	final String[] serverNames = args[2].split(",");
	final String indexName = args[3];
	
	final LumongoPoolConfig clientConfig = new LumongoPoolConfig();
	for (String serverName : serverNames) {
		clientConfig.addMember(serverName);
	}
	
	File propFile = new File(propFileName);
	
	PropertiesReader pr = new PropertiesReader(propFile);
	
	String awsAccessKey = pr.getString("awsAccessKey");
	String awsSecretKey = pr.getString("awsSecretKey");
	
	final AWSCredentials awsCredentials = new AWSCredentials(awsAccessKey, awsSecretKey);
	
	RestS3Service s3Service = new RestS3Service(awsCredentials);
	s3Service.setRequesterPaysEnabled(true);
	
	System.out.println("Fetching files list for prefix <" + prefix + ">");
	System.out.println("This can take awhile ...");
	
	S3Object[] objects = s3Service.listObjects("aws-publicdatasets", "common-crawl/crawl-002/" + prefix, null);
	System.out.println("Fetched info for <" + objects.length + "> files");
	
	lumongoWorkPool = new LumongoWorkPool(clientConfig);
	
	IndexConfig indexConfig = new IndexConfig(CONTENTS);
	indexConfig.addFieldConfig(FieldConfigBuilder.create(URL, FieldType.STRING).indexAs(DefaultAnalyzers.LC_KEYWORD));
	indexConfig.addFieldConfig(FieldConfigBuilder.create(TEXT_CONTENTS, FieldType.STRING).indexAs(DefaultAnalyzers.STANDARD));
	indexConfig.addFieldConfig(FieldConfigBuilder.create(TITLE, FieldType.STRING).indexAs(DefaultAnalyzers.STANDARD));
	
	CreateOrUpdateIndex createOrUpdateIndex = new CreateOrUpdateIndex(indexName, 16, indexConfig);
	
	lumongoWorkPool.createOrUpdateIndex(createOrUpdateIndex);
	
	ExecutorService pool = Executors.newFixedThreadPool(16);
	
	for (S3Object object : objects) {
		final String key = object.getKey();
		
		pool.execute(new Runnable() {
			@Override
			public void run() {
				try {
					handleFile(indexName, awsCredentials, key);
				}
				catch (Exception e) {
					log.error(e.getClass().getSimpleName() + ": ", e);
				}
			}
		});
		
	}
	
	pool.shutdown();
	lumongoWorkPool.shutdown();
	
	while (!pool.isTerminated()) {
		pool.awaitTermination(1, TimeUnit.MINUTES);
	}
	
}
 
源代码21 项目: red5-examples   文件: S3FilenameGenerator.java
@SuppressWarnings("deprecation")
public String generateFilename(IScope scope, String name, String extension, GenerationType type) {
   	logger.debug("Get stream directory: scope={}, name={}, type={}", new Object[]{scope, name, type.toString()});		
	StringBuilder path = new StringBuilder();
	// get the session id
	IConnection conn = Red5.getConnectionLocal();
	if (conn.hasAttribute("sessionId")) {
		String sessionId = conn.getStringAttribute("sessionId");
		path.append(sessionId);
		path.append('/');
	}
	// add resources name
	path.append(name);
	// add extension if we have one
       if (extension != null){
           // add extension
       	path.append(extension);
       }		
	// determine whether its playback or record
	if (type.equals(GenerationType.PLAYBACK)) {
		logger.debug("Playback path used");
		// look on s3 for the file first	
		boolean found = false;
		try {
			S3Service s3Service = new RestS3Service(awsCredentials);
			S3Bucket bucket = s3Service.getBucket(bucketName);
			String objectKey = path.toString();
			S3Object file = s3Service.getObject(bucket, objectKey);
			if (file != null) {
				S3Object details = s3Service.getObjectDetails(bucket, objectKey);
				logger.debug("Details - key: {} content type: {}", details.getKey(), details.getContentType()); 
				path.insert(0, bucket.getLocation());
				// set found flag
				found = true;
			}
		} catch (S3ServiceException e) {
			logger.warn("Error looking up media file", e);
		}
		// use local path
		if (!found) {
			logger.debug("File was not found on S3, using local playback location");
			path.insert(0, playbackPath);
		}
	} else {
		logger.debug("Record path used");
		path.insert(0, recordPath);
	}

       String fileName = path.toString();
       logger.debug("Generated filename: {}", fileName);
       return fileName;
}
 
源代码22 项目: RDFS   文件: MigrationTool.java
public void initialize(URI uri) throws IOException {
  
  
  
  try {
    String accessKey = null;
    String secretAccessKey = null;
    String userInfo = uri.getUserInfo();
    if (userInfo != null) {
      int index = userInfo.indexOf(':');
      if (index != -1) {
        accessKey = userInfo.substring(0, index);
        secretAccessKey = userInfo.substring(index + 1);
      } else {
        accessKey = userInfo;
      }
    }
    if (accessKey == null) {
      accessKey = getConf().get("fs.s3.awsAccessKeyId");
    }
    if (secretAccessKey == null) {
      secretAccessKey = getConf().get("fs.s3.awsSecretAccessKey");
    }
    if (accessKey == null && secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID and Secret Access Key " +
                                         "must be specified as the username " +
                                         "or password (respectively) of a s3 URL, " +
                                         "or by setting the " +
                                         "fs.s3.awsAccessKeyId or " +                         
                                         "fs.s3.awsSecretAccessKey properties (respectively).");
    } else if (accessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID must be specified " +
                                         "as the username of a s3 URL, or by setting the " +
                                         "fs.s3.awsAccessKeyId property.");
    } else if (secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Secret Access Key must be specified " +
                                         "as the password of a s3 URL, or by setting the " +
                                         "fs.s3.awsSecretAccessKey property.");         
    }
    AWSCredentials awsCredentials =
      new AWSCredentials(accessKey, secretAccessKey);
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());
}
 
源代码23 项目: suro   文件: GrantAcl.java
public GrantAcl(RestS3Service s3Service, String s3Acl, int s3AclRetries) {
    this.s3Service = s3Service;
    this.s3Acl = s3Acl;
    this.s3AclRetries = s3AclRetries;
}
 
源代码24 项目: suro   文件: TestS3FileSink.java
private Injector getInjector() {
    return Guice.createInjector(
        new SuroSinkPlugin(),
        new AbstractModule() {
                @Override
                protected void configure() {
                    bind(ObjectMapper.class).to(DefaultObjectMapper.class);
                    bind(AWSCredentialsProvider.class)
                        .toInstance(new AWSCredentialsProvider() {
                            @Override
                            public AWSCredentials getCredentials() {
                                return new AWSCredentials() {
                                    @Override
                                    public String getAWSAccessKeyId() {
                                        return "accessKey";
                                    }

                                    @Override
                                    public String getAWSSecretKey() {
                                        return "secretKey";
                                    }
                                };
                            }

                            @Override
                            public void refresh() {
                            }
                        });

                    MultipartUtils mpUtils = mock(MultipartUtils.class);
                    try {
                        doAnswer(new Answer() {
                            @Override
                            public Object answer(InvocationOnMock invocation) throws Throwable {
                                Thread.sleep(1000);
                                return null;
                            }
                        }).when(mpUtils).uploadObjects(
                            any(String.class),
                            any(RestS3Service.class),
                            any(List.class),
                            any(S3ServiceEventListener.class));

                        bind(MultipartUtils.class).toInstance(mpUtils);
                    } catch (Exception e) {
                        Assert.fail(e.getMessage());
                    }
                    bind(SpaceChecker.class).toInstance(mock(SpaceChecker.class));
                }
        }
    );
}
 
源代码25 项目: hadoop-gpu   文件: MigrationTool.java
public void initialize(URI uri) throws IOException {
  
  
  
  try {
    String accessKey = null;
    String secretAccessKey = null;
    String userInfo = uri.getUserInfo();
    if (userInfo != null) {
      int index = userInfo.indexOf(':');
      if (index != -1) {
        accessKey = userInfo.substring(0, index);
        secretAccessKey = userInfo.substring(index + 1);
      } else {
        accessKey = userInfo;
      }
    }
    if (accessKey == null) {
      accessKey = getConf().get("fs.s3.awsAccessKeyId");
    }
    if (secretAccessKey == null) {
      secretAccessKey = getConf().get("fs.s3.awsSecretAccessKey");
    }
    if (accessKey == null && secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID and Secret Access Key " +
                                         "must be specified as the username " +
                                         "or password (respectively) of a s3 URL, " +
                                         "or by setting the " +
                                         "fs.s3.awsAccessKeyId or " +                         
                                         "fs.s3.awsSecretAccessKey properties (respectively).");
    } else if (accessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Access Key ID must be specified " +
                                         "as the username of a s3 URL, or by setting the " +
                                         "fs.s3.awsAccessKeyId property.");
    } else if (secretAccessKey == null) {
      throw new IllegalArgumentException("AWS " +
                                         "Secret Access Key must be specified " +
                                         "as the password of a s3 URL, or by setting the " +
                                         "fs.s3.awsSecretAccessKey property.");         
    }
    AWSCredentials awsCredentials =
      new AWSCredentials(accessKey, secretAccessKey);
    this.s3Service = new RestS3Service(awsCredentials);
  } catch (S3ServiceException e) {
    if (e.getCause() instanceof IOException) {
      throw (IOException) e.getCause();
    }
    throw new S3Exception(e);
  }
  bucket = new S3Bucket(uri.getHost());
}