下面列出了怎么用com.google.api.client.http.AbstractInputStreamContent的API类实例代码及写法,或者点击链接到github查看源代码。
@Test
public void testItemAndContent() throws IOException, InterruptedException {
Item item = new Item().setName("id1").setAcl(getCustomerAcl());
AbstractInputStreamContent content = ByteArrayContent.fromString("", "golden");
RepositoryDoc doc =
new RepositoryDoc.Builder().setItem(item).setContent(content, ContentFormat.TEXT).build();
SettableFuture<Item> updateFuture = SettableFuture.create();
doAnswer(
invocation -> {
updateFuture.set(new Item());
return updateFuture;
})
.when(mockIndexingService)
.indexItemAndContent(
any(), any(), any(), eq(ContentFormat.TEXT), eq(RequestMode.UNSPECIFIED));
doc.execute(mockIndexingService);
InOrder inOrder = inOrder(mockIndexingService);
inOrder
.verify(mockIndexingService)
.indexItemAndContent(item, content, null, ContentFormat.TEXT, RequestMode.UNSPECIFIED);
assertEquals("id1", doc.getItem().getName());
assertEquals(content, doc.getContent());
}
@Test
public void testEqualsWithContent() {
AbstractInputStreamContent content = ByteArrayContent.fromString(null, "golden");
RepositoryDoc doc1 =
new RepositoryDoc.Builder()
.setItem(new Item().setName("id1"))
.setContent(content, ContentFormat.TEXT)
.build();
RepositoryDoc doc2 =
new RepositoryDoc.Builder()
.setItem(new Item().setName("id1"))
.setContent(content, ContentFormat.TEXT)
.build();
assertEquals(doc1, doc2);
assertEquals(doc1.hashCode(), doc2.hashCode());
}
@Test
public void testNotEqualsWithContent() {
AbstractInputStreamContent content1 = ByteArrayContent.fromString(null, "golden");
RepositoryDoc doc1 =
new RepositoryDoc.Builder()
.setItem(new Item().setName("id1"))
.setContent(content1, ContentFormat.TEXT)
.build();
AbstractInputStreamContent content2 = ByteArrayContent.fromString(null, "golden2");
RepositoryDoc doc2 =
new RepositoryDoc.Builder()
.setItem(new Item().setName("id1"))
.setContent(content2, ContentFormat.TEXT)
.build();
assertNotEquals(doc1, doc2);
assertNotEquals(doc1.hashCode(), doc2.hashCode());
}
@Test
public void testEqualsWithContentHash() {
AbstractInputStreamContent content = ByteArrayContent.fromString(null, "golden");
RepositoryDoc doc1 =
new RepositoryDoc.Builder()
.setItem(new Item().setName("id1"))
.setContent(content, Integer.toString(Objects.hash(content)), ContentFormat.TEXT)
.build();
RepositoryDoc doc2 =
new RepositoryDoc.Builder()
.setItem(new Item().setName("id1"))
.setContent(content, Integer.toString(Objects.hash(content)), ContentFormat.TEXT)
.build();
assertEquals(doc1, doc2);
assertEquals(doc1.hashCode(), doc2.hashCode());
}
@Test
public void testNotEqualsWithContentHash() {
AbstractInputStreamContent content = ByteArrayContent.fromString(null, "golden");
RepositoryDoc doc1 =
new RepositoryDoc.Builder()
.setItem(new Item().setName("id1"))
.setContent(content, Integer.toString(Objects.hash(content)), ContentFormat.TEXT)
.build();
RepositoryDoc doc2 =
new RepositoryDoc.Builder()
.setItem(new Item().setName("id1"))
.setContent(content, ContentFormat.TEXT)
.build();
assertNotEquals(doc1, doc2);
assertNotEquals(doc1.hashCode(), doc2.hashCode());
}
@Override
public ListenableFuture<Void> uploadContent(
String resourceName, AbstractInputStreamContent content) throws IOException {
checkArgument(!Strings.isNullOrEmpty(resourceName), "resource name can not be empty");
checkNotNull(content, "content can not be null");
checkState(isRunning(), "upload service not running to accept upload requests");
Upload upload =
service.media().upload(resourceName, new Media().setResourceName(resourceName), content);
mediaUploader.enableMediaUploader(upload);
return executorService.submit(
() -> {
upload.executeUnparsed();
return null;
});
}
@Test
public void testItemAndContentSynchronous() throws IOException, InterruptedException {
Item item = new Item().setName("id1").setAcl(getCustomerAcl());
AbstractInputStreamContent content = ByteArrayContent.fromString("", "golden");
RepositoryDoc doc =
new RepositoryDoc.Builder()
.setItem(item)
.setContent(content, ContentFormat.TEXT)
.setRequestMode(RequestMode.SYNCHRONOUS)
.build();
SettableFuture<Item> updateFuture = SettableFuture.create();
doAnswer(
invocation -> {
updateFuture.set(new Item());
return updateFuture;
})
.when(mockIndexingService)
.indexItemAndContent(
any(), any(), any(), eq(ContentFormat.TEXT), eq(RequestMode.SYNCHRONOUS));
doc.execute(mockIndexingService);
InOrder inOrder = inOrder(mockIndexingService);
inOrder
.verify(mockIndexingService)
.indexItemAndContent(item, content, null, ContentFormat.TEXT, RequestMode.SYNCHRONOUS);
assertEquals("id1", doc.getItem().getName());
assertEquals(content, doc.getContent());
}
@Test
public void testItemAndContentNotIncrement() throws IOException, InterruptedException {
Item item = new Item().setName("id1").setAcl(getCustomerAcl());
AbstractInputStreamContent content = ByteArrayContent.fromString("", "golden");
RepositoryDoc doc =
new RepositoryDoc.Builder()
.setItem(item)
.setContent(content, ContentFormat.TEXT)
.setRequestMode(RequestMode.ASYNCHRONOUS)
.build();
SettableFuture<Item> updateFuture = SettableFuture.create();
doAnswer(
invocation -> {
updateFuture.set(new Item());
return updateFuture;
})
.when(mockIndexingService)
.indexItemAndContent(
any(), any(), any(), eq(ContentFormat.TEXT), eq(RequestMode.ASYNCHRONOUS));
doc.execute(mockIndexingService);
InOrder inOrder = inOrder(mockIndexingService);
inOrder
.verify(mockIndexingService)
.indexItemAndContent(item, content, null, ContentFormat.TEXT, RequestMode.ASYNCHRONOUS);
}
@Test
public void testItemContentAndChildLinks() throws IOException, InterruptedException {
Item item = new Item().setName("id1").setAcl(getCustomerAcl());
AbstractInputStreamContent content = ByteArrayContent.fromString("", "golden");
PushItem pushItem1 = new PushItem().setQueue("queue1");
PushItem pushItem2 = new PushItem().setQueue("queue1");
RepositoryDoc doc =
new RepositoryDoc.Builder()
.setItem(item)
.setContent(content, ContentFormat.TEXT)
.addChildId("id1", pushItem1)
.addChildId("id2+b;more", pushItem2)
.build();
SettableFuture<Item> updateFuture = SettableFuture.create();
doAnswer(
invocation -> {
updateFuture.set(new Item());
return updateFuture;
})
.when(mockIndexingService)
.indexItemAndContent(
any(), any(), any(), eq(ContentFormat.TEXT), eq(RequestMode.UNSPECIFIED));
SettableFuture<Item> pushFuture = SettableFuture.create();
doAnswer(
invocation -> {
pushFuture.set(new Item());
return pushFuture;
})
.when(mockIndexingService)
.push(any(), any());
doc.execute(mockIndexingService);
verify(mockIndexingService)
.indexItemAndContent(item, content, null, ContentFormat.TEXT, RequestMode.UNSPECIFIED);
verify(mockIndexingService).push("id1", pushItem1);
verify(mockIndexingService).push("id2+b;more", pushItem2);
}
private RepositoryDoc getItem(MockItem createItem) {
AbstractInputStreamContent content =
ByteArrayContent.fromString("", createItem.getItem().toString());
return new RepositoryDoc.Builder()
.setItem(createItem.getItem())
.setContent(content, ContentFormat.RAW)
.build();
}
/**
* Launch a job, but do not wait for it to complete.
*
* @throws BigqueryJobFailureException
*/
private Job launchJob(Job job, @Nullable AbstractInputStreamContent data) {
verify(job.getStatus() == null);
try {
return data != null
? bigquery.jobs().insert(getProjectId(), job, data).execute()
: bigquery.jobs().insert(getProjectId(), job).execute();
} catch (IOException e) {
throw BigqueryJobFailureException.create(e);
}
}
/** Runs job and returns a future that yields {@code result} when {@code job} is completed. */
private <T> ListenableFuture<T> runJobToCompletion(
final Job job,
final T result,
@Nullable final AbstractInputStreamContent data) {
return service.submit(
() -> {
runJob(job, data);
return result;
});
}
public File putResource(GoogleDrivePutParameters parameters) throws IOException {
String folderId = parameters.getDestinationFolderId();
File putFile = new File();
putFile.setParents(Collections.singletonList(folderId));
Files.List fileRequest = drive.files().list()
.setQ(format(QUERY_NOTTRASHED_NAME_NOTMIME_INPARENTS, parameters.getResourceName(), MIME_TYPE_FOLDER, folderId));
LOG.debug("[putResource] `{}` Exists in `{}` ? with `{}`.", parameters.getResourceName(),
parameters.getDestinationFolderId(), fileRequest.getQ());
FileList existingFiles = fileRequest.execute();
if (existingFiles.getFiles().size() > 1) {
throw new IOException(messages.getMessage("error.file.more.than.one", parameters.getResourceName()));
}
if (existingFiles.getFiles().size() == 1) {
if (!parameters.isOverwriteIfExist()) {
throw new IOException(messages.getMessage("error.file.already.exist", parameters.getResourceName()));
}
LOG.debug("[putResource] {} will be overwritten...", parameters.getResourceName());
drive.files().delete(existingFiles.getFiles().get(0).getId()).execute();
}
putFile.setName(parameters.getResourceName());
String metadata = "id,parents,name";
if (!StringUtils.isEmpty(parameters.getFromLocalFilePath())) {
// Reading content from local fileName
FileContent fContent = new FileContent(null, new java.io.File(parameters.getFromLocalFilePath()));
putFile = drive.files().create(putFile, fContent).setFields(metadata).execute();
//
} else if (parameters.getFromBytes() != null) {
AbstractInputStreamContent content = new ByteArrayContent(null, parameters.getFromBytes());
putFile = drive.files().create(putFile, content).setFields(metadata).execute();
}
return putFile;
}
@Before
public void setUp() throws Exception {
super.setUp();
properties = new GoogleDrivePutProperties("test");
properties.connection.setupProperties();
properties.connection.setupLayout();
properties.schemaMain.setupProperties();
properties.schemaMain.setupLayout();
properties.setupProperties();
properties.setupLayout();
properties = (GoogleDrivePutProperties) setupConnectionWithAccessToken(properties);
properties.uploadMode.setValue(UploadMode.UPLOAD_LOCAL_FILE);
properties.fileName.setValue(FILE_PUT_NAME);
properties.localFilePath.setValue("c:/Users/undx/brasil.jpg");
properties.overwrite.setValue(true);
properties.destinationFolder.setValue("root");
testRuntime = spy(GoogleDrivePutRuntime.class);
doReturn(drive).when(testRuntime).getDriveService();
when(drive.files().list().setQ(anyString()).execute()).thenReturn(emptyFileList);
//
File putFile = new File();
putFile.setId(PUT_FILE_ID);
putFile.setParents(Collections.singletonList(PUT_FILE_PARENT_ID));
when(drive.files().create(any(File.class), any(AbstractInputStreamContent.class)).setFields(anyString()).execute())
.thenReturn(putFile);
}
@Before
public void setUp() throws Exception {
super.setUp();
properties = new GoogleDrivePutProperties("test");
properties.connection.setupProperties();
properties.connection.setupLayout();
properties.schemaMain.setupProperties();
properties.schemaMain.setupLayout();
properties.setupProperties();
properties.setupLayout();
properties = (GoogleDrivePutProperties) setupConnectionWithAccessToken(properties);
properties.uploadMode.setValue(UploadMode.UPLOAD_LOCAL_FILE);
properties.fileName.setValue("GoogleDrive Put test BR");
properties.localFilePath.setValue("c:/Users/undx/brasil.jpg");
properties.overwrite.setValue(true);
properties.destinationFolder.setValue("root");
sink.initialize(container, properties);
wop = (GoogleDriveWriteOperation) sink.createWriteOperation();
writer = new GoogleDrivePutWriter(wop, properties, container);
when(drive.files().list().setQ(anyString()).execute()).thenReturn(emptyFileList);
//
File putFile = new File();
putFile.setId(PUT_FILE_ID);
putFile.setParents(Collections.singletonList(PUT_FILE_PARENT_ID));
when(drive.files().create(any(File.class), any(AbstractInputStreamContent.class)).setFields(anyString()).execute())
.thenReturn(putFile);
}
@Before
public void setUp() throws Exception {
super.setUp();
properties = new GoogleDrivePutProperties("test");
properties.connection.setupProperties();
properties.connection.setupLayout();
properties.schemaMain.setupProperties();
properties.schemaMain.setupLayout();
properties.setupProperties();
properties.setupLayout();
properties = (GoogleDrivePutProperties) setupConnectionWithAccessToken(properties);
properties.uploadMode.setValue(UploadMode.UPLOAD_LOCAL_FILE);
properties.fileName.setValue(FILE_PUT_NAME);
properties.localFilePath
.setValue(Paths.get(getClass().getClassLoader().getResource("service_account.json").toURI()).toString());
properties.overwrite.setValue(true);
properties.destinationFolder.setValue("root");
when(drive.files().list().setQ(anyString()).execute()).thenReturn(emptyFileList);
//
File putFile = new File();
putFile.setId(PUT_FILE_ID);
putFile.setParents(Collections.singletonList(PUT_FILE_PARENT_ID));
when(drive.files().create(any(File.class), any(AbstractInputStreamContent.class)).setFields(anyString()).execute())
.thenReturn(putFile);
}
/**
* Test handling of various types of exceptions thrown during JSON API call for
* GoogleCloudStorage.create(2).
*/
@Test
public void testCreateObjectApiRuntimeException() throws IOException {
// Prepare the mock return values before invoking the method being tested.
when(mockStorage.objects()).thenReturn(mockStorageObjects);
when(mockStorageObjects.insert(
eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class)))
.thenReturn(mockStorageObjectsInsert);
// Set up the mock Insert to throw an exception when execute() is called.
RuntimeException fakeException = new RuntimeException("Fake exception");
setupNonConflictedWrite(fakeException);
WritableByteChannel writeChannel = gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME));
assertThat(writeChannel.isOpen()).isTrue();
IOException thrown = assertThrows(IOException.class, writeChannel::close);
assertThat(thrown).hasCauseThat().isEqualTo(fakeException);
verify(mockStorageObjectsInsert).execute();
verify(mockStorage, times(2)).objects();
verify(mockStorageObjects)
.insert(eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class));
verify(mockStorageObjects).get(eq(BUCKET_NAME), eq(OBJECT_NAME));
verify(mockErrorExtractor, atLeastOnce()).itemNotFound(any(IOException.class));
verify(mockStorageObjectsGet).execute();
verify(mockStorageObjectsInsert).setName(eq(OBJECT_NAME));
verify(mockStorageObjectsInsert).setDisableGZipContent(eq(true));
verify(mockStorageObjects).get(anyString(), anyString());
verify(mockClientRequestHelper).setChunkSize(any(Storage.Objects.Insert.class), anyInt());
verify(mockStorageObjectsInsert).setIfGenerationMatch(anyLong());
}
/**
* Test handling of various types of Errors thrown during JSON API call for
* GoogleCloudStorage.create(2).
*/
@Test
public void testCreateObjectApiError() throws IOException {
// Prepare the mock return values before invoking the method being tested.
when(mockStorage.objects()).thenReturn(mockStorageObjects);
// Set up the mock Insert to throw an exception when execute() is called.
Error fakeError = new Error("Fake error");
setupNonConflictedWrite(fakeError);
when(mockStorageObjects.insert(
eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class)))
.thenReturn(mockStorageObjectsInsert);
WritableByteChannel writeChannel = gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME));
assertThat(writeChannel.isOpen()).isTrue();
Error thrown = assertThrows(Error.class, writeChannel::close);
assertThat(thrown).isEqualTo(fakeError);
verify(mockStorage, times(2)).objects();
verify(mockStorageObjects)
.insert(eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class));
verify(mockStorageObjects).get(BUCKET_NAME, OBJECT_NAME);
verify(mockStorageObjectsGet).execute();
verify(mockStorageObjectsInsert).setName(eq(OBJECT_NAME));
verify(mockStorageObjectsInsert).setDisableGZipContent(eq(true));
verify(mockStorageObjectsInsert).setIfGenerationMatch(eq(0L));
verify(mockErrorExtractor).itemNotFound(any(IOException.class));
verify(mockClientRequestHelper).setChunkSize(any(Storage.Objects.Insert.class), anyInt());
verify(mockStorageObjectsInsert).execute();
}
/**
* Initializes the media HTTP uploader based on the media content.
*
* @param mediaContent media content
*/
protected final void initializeMediaUpload(AbstractInputStreamContent mediaContent) {
HttpRequestFactory requestFactory = abstractGoogleClient.getRequestFactory();
this.uploader = new MediaHttpUploader(
mediaContent, requestFactory.getTransport(), requestFactory.getInitializer());
this.uploader.setInitiationRequestMethod(requestMethod);
if (httpContent != null) {
this.uploader.setMetadata(httpContent);
}
}
ItemContent getItemContent(String itemResourceName, MediaContent mediaContent)
throws IOException, InterruptedException {
ItemContent itemContent = new ItemContent();
if (mediaContent.contentFormat == null) {
mediaContent.contentFormat = "RAW";
}
itemContent.setContentFormat(mediaContent.contentFormat);
AbstractInputStreamContent content;
if (mediaContent.contentString != null) {
content = ByteArrayContent.fromString(mediaContent.contentType, mediaContent.contentString);
} else {
content =
new UrlInputStreamContent(
mediaContent.contentType, baseUri.resolve(mediaContent.url).toURL());
}
long length = content.getLength();
boolean inline =
(length <= IndexingServiceImpl.DEFAULT_CONTENT_UPLOAD_THRESHOLD_BYTES) && (length > 0);
if (inline) {
itemContent.encodeInlineContent(convertStreamToByteArray(content));
} else {
// upload content first
Items.Upload uploadRequest =
cloudSearchService
.indexing()
.datasources()
.items()
.upload(itemResourceName,
new StartUploadItemRequest()
.setDebugOptions(new DebugOptions().setEnableDebugging(enableDebugging))
.setConnectorName(connectorName));
UploadItemRef uploadItemRef = execute(uploadRequest);
try {
contentUploadService.uploadContent(uploadItemRef.getName(), content).get();
itemContent.setContentDataRef(uploadItemRef);
} catch (ExecutionException e) {
throw new IOException(e);
}
}
return itemContent;
}
private byte[] convertStreamToByteArray(AbstractInputStreamContent content) throws IOException {
try (InputStream is = content.getInputStream()) {
return ByteStreams.toByteArray(is);
}
}
/**
* Updates an {@link Item}.
*
* <p>The {@code content} parameter should use a concrete implementation of {@code
* AbstractInputStreamContent} based on the natural source object:
*
* <ul>
* <li>For {@code InputStream}, use {@code InputStreamContent}. For best results, if the length
* of the content (in bytes) is known without reading the stream, call {@code setLength} on
* the {@code InputStreamContent}.
* <li>For {@code String} or {@code byte[]}, use {@code ByteArrayContent}.
* <li>For existing files, use {@code FileContent}.
* </ul>
*
* @param item the item to update
* @param content the item's content
* @param contentHash the hash of the item's content
* @param contentFormat format of the content
* @param requestMode the {@link IndexingService.RequestMode} for the request
* @return {@link ListenableFuture}. Caller can use {@link ListenableFuture#get()} to obtain the
* result of an update operation
* @throws IOException when the service throws an exception
*/
@Override
public ListenableFuture<Operation> indexItemAndContent(
Item item,
AbstractInputStreamContent content,
@Nullable String contentHash,
ContentFormat contentFormat,
RequestMode requestMode)
throws IOException {
validateRunning();
checkArgument(item != null, "Item cannot be null.");
checkArgument(!Strings.isNullOrEmpty(item.getName()), "Item ID cannot be null.");
checkNotNull(content, "Item content cannot be null.");
long length = content.getLength();
boolean useInline = (length <= contentUploadThreshold) && (length >= 0);
if (useInline) {
logger.log(
Level.FINEST,
"Inlining content for {0}, length {1} bytes.",
new Object[] {item.getName(), length});
item.setContent(
new ItemContent()
.encodeInlineContent(convertStreamToByteArray(content))
.setHash(contentHash)
.setContentFormat(contentFormat.name()));
return indexItem(item, requestMode);
} else {
UploadItemRef uploadRef = startUpload(item.getName());
logger.log(
Level.FINEST,
"Uploading content for {0}, length {1} bytes, upload ref {2}",
new Object[] {item.getName(), length, uploadRef.getName()});
ListenableFuture<Item> itemUploaded =
Futures.transform(
contentUploadService.uploadContent(uploadRef.getName(), content),
voidVal ->
item.setContent(
new ItemContent()
.setContentDataRef(uploadRef)
.setHash(contentHash)
.setContentFormat(contentFormat.name())),
MoreExecutors.directExecutor());
return Futures.transformAsync(
itemUploaded, i -> indexItem(i, requestMode), MoreExecutors.directExecutor());
}
}
private byte[] convertStreamToByteArray(AbstractInputStreamContent content) throws IOException {
try (InputStream is = content.getInputStream()) {
return ByteStreams.toByteArray(is);
}
}
/**
* Build the ApiOperation to index a repository.
*
* @param repo Repository
* @param previousItem Previous item state in the index
* @return ApiOperation (RepositoryDoc if indexing, PushItem if not modified)
* @throws IOException if unable to create operation
*/
private ApiOperation indexItem(GHRepository repo, Item previousItem)
throws IOException {
String metadataHash = repo.getUpdatedAt().toString();
// If previously indexed and unchanged, just requeue as unmodified
if (canSkipIndexing(previousItem, metadataHash)) {
return notModified(previousItem.getName());
}
String resourceName = repo.getHtmlUrl().getPath();
FieldOrValue<String> title = FieldOrValue.withValue(repo.getFullName());
FieldOrValue<String> url = FieldOrValue.withValue(repo.getHtmlUrl().toExternalForm());
FieldOrValue<DateTime> createTime = FieldOrValue.withValue(
new DateTime(repo.getCreatedAt().getTime()));
FieldOrValue<DateTime> updateTime = FieldOrValue.withValue(
new DateTime(repo.getUpdatedAt().getTime()));
// Structured data based on the schema
Multimap<String, Object> structuredData = ArrayListMultimap.create();
structuredData.put("organization", repo.getOwnerName());
structuredData.put("repository", repo.getName());
structuredData.put("stars", repo.getStargazersCount());
structuredData.put("forks", repo.getForks());
structuredData.put("openIssues", repo.getOpenIssueCount());
structuredData.put("watchers", repo.getWatchers());
structuredData.put("createdAt", repo.getCreatedAt());
structuredData.put("updatedAt", repo.getUpdatedAt());
// Create the item to index
Item item = IndexingItemBuilder.fromConfiguration(resourceName)
.setTitle(title)
.setSourceRepositoryUrl(url)
.setItemType(IndexingItemBuilder.ItemType.CONTAINER_ITEM)
.setObjectType("repository")
.setValues(structuredData)
.setVersion(Longs.toByteArray(repo.getUpdatedAt().getTime()))
.setCreateTime(createTime)
.setUpdateTime(updateTime)
.setHash(metadataHash)
.build();
// TODO - Render markdown to HTML?
AbstractInputStreamContent content = new ByteArrayContent(
"text/plain",
repo.getDescription().getBytes(StandardCharsets.UTF_8));
return new RepositoryDoc.Builder()
.setItem(item)
.setRequestMode(IndexingService.RequestMode.SYNCHRONOUS)
.setContent(content, IndexingService.ContentFormat.TEXT)
.setRequestMode(IndexingService.RequestMode.SYNCHRONOUS)
.build();
}
/**
* Build the ApiOperation to index a pull request.
*
* @param pullRequest Pull request to index
* @param previousItem Previous item state in the index
* @return ApiOperation (RepositoryDoc if indexing, PushItem if not modified)
* @throws IOException if unable to create operation
*/
private ApiOperation indexItem(GHPullRequest pullRequest, Item previousItem)
throws IOException {
String metadataHash = pullRequest.getUpdatedAt().toString();
// If previously indexed and unchanged, just requeue as unmodified
if (canSkipIndexing(previousItem, metadataHash)) {
return notModified(previousItem.getName());
}
String resourceName = pullRequest.getHtmlUrl().getPath();
FieldOrValue<String> title = FieldOrValue.withValue(pullRequest.getTitle());
FieldOrValue<String> url = FieldOrValue.withValue(
pullRequest.getHtmlUrl().toExternalForm());
FieldOrValue<DateTime> createTime = FieldOrValue.withValue(
new DateTime(pullRequest.getCreatedAt().getTime()));
FieldOrValue<DateTime> updateTime = FieldOrValue.withValue(
new DateTime(pullRequest.getUpdatedAt().getTime()));
String containerName = pullRequest.getRepository().getHtmlUrl().getPath();
// Structured data based on the schema
Multimap<String, Object> structuredData = ArrayListMultimap.create();
structuredData.put("organization", pullRequest.getRepository().getOwnerName());
structuredData.put("repository", pullRequest.getRepository().getName());
structuredData.put("status", pullRequest.getState().name().toLowerCase());
structuredData.put("openedBy", pullRequest.getUser() != null ?
pullRequest.getUser().getLogin() : null);
structuredData.put("assignee", pullRequest.getAssignee() != null ?
pullRequest.getAssignee().getLogin() : null);
for (GHLabel label : pullRequest.getLabels()) {
structuredData.put("labels", label.getName());
}
// Index comments as sub objects in the metadata. This makes the comments
// searchable but still tied to the issue itself.
for (GHIssueComment comment : pullRequest.getComments()) {
Multimap<String, Object> commentData = ArrayListMultimap.create();
commentData.put("comment", comment.getBody());
commentData.put("user", comment.getUser() != null ?
comment.getUser().getLogin() : null);
structuredData.put("comments", commentData);
}
structuredData.put("createdAt", pullRequest.getCreatedAt());
structuredData.put("updatedAt", pullRequest.getUpdatedAt());
Item item = IndexingItemBuilder.fromConfiguration(resourceName)
.setTitle(title)
.setContainerName(containerName)
.setSourceRepositoryUrl(url)
.setItemType(IndexingItemBuilder.ItemType.CONTAINER_ITEM)
.setObjectType("pullRequest")
.setValues(structuredData)
.setVersion(Longs.toByteArray(pullRequest.getUpdatedAt().getTime()))
.setCreateTime(createTime)
.setUpdateTime(updateTime)
.setHash(metadataHash)
.build();
// TODO - Index the actual patch/diff?
// TODO - Render markdown to HTML
AbstractInputStreamContent content = new ByteArrayContent(
"text/plain",
pullRequest.getBody().getBytes(StandardCharsets.UTF_8));
return new RepositoryDoc.Builder()
.setItem(item)
.setContent(content, IndexingService.ContentFormat.TEXT)
.setRequestMode(IndexingService.RequestMode.SYNCHRONOUS)
.build();
}
/**
* Build the ApiOperation to index an issue.
*
* @param issue Pull request to index
* @param previousItem Previous item state in the index
* @return ApiOperation (RepositoryDoc if indexing, PushItem if not modified)
* @throws IOException if unable to create operation
*/
private ApiOperation indexItem(GHIssue issue, Item previousItem)
throws IOException {
String metadataHash = issue.getUpdatedAt().toString();
// If previously indexed and unchanged, just requeue as unmodified
if (canSkipIndexing(previousItem, metadataHash)) {
return notModified(previousItem.getName());
}
String resourceName = issue.getHtmlUrl().getPath();
FieldOrValue<String> title = FieldOrValue.withValue(issue.getTitle());
FieldOrValue<String> url = FieldOrValue.withValue(
issue.getHtmlUrl().toExternalForm());
FieldOrValue<DateTime> createTime = FieldOrValue.withValue(
new DateTime(issue.getCreatedAt().getTime()));
FieldOrValue<DateTime> updateTime = FieldOrValue.withValue(
new DateTime(issue.getUpdatedAt().getTime()));
String containerName = issue.getRepository().getHtmlUrl().getPath();
// Structured data based on the schema
Multimap<String, Object> structuredData = ArrayListMultimap.create();
structuredData.put("organization", issue.getRepository().getOwnerName());
structuredData.put("repository", issue.getRepository().getName());
structuredData.put("status", issue.getState().name().toLowerCase());
structuredData.put("reportedBy", issue.getUser() != null ?
issue.getUser().getLogin() : null);
structuredData.put("assignee", issue.getAssignee() != null ?
issue.getAssignee().getLogin() : null);
for (GHLabel label : issue.getLabels()) {
structuredData.put("labels", label.getName());
}
// Index comments as sub objects in the metadata. This makes the comments
// searchable but still tied to the issue itself.
for (GHIssueComment comment : issue.getComments()) {
Multimap<String, Object> commentData = ArrayListMultimap.create();
commentData.put("comment", comment.getBody());
commentData.put("user", comment.getUser() != null ?
comment.getUser().getLogin() : null);
structuredData.put("comments", commentData);
}
structuredData.put("createdAt", issue.getCreatedAt());
structuredData.put("updatedAt", issue.getUpdatedAt());
Item item = IndexingItemBuilder.fromConfiguration(resourceName)
.setTitle(title)
.setContainerName(containerName)
.setSourceRepositoryUrl(url)
.setItemType(IndexingItemBuilder.ItemType.CONTAINER_ITEM)
.setObjectType("issue")
.setValues(structuredData)
.setVersion(Longs.toByteArray(issue.getUpdatedAt().getTime()))
.setCreateTime(createTime)
.setUpdateTime(updateTime)
.setHash(metadataHash)
.build();
// TODO - Render markdown to HTML
AbstractInputStreamContent content = new ByteArrayContent(
"text/plain",
issue.getBody().getBytes(StandardCharsets.UTF_8));
return new RepositoryDoc.Builder()
.setItem(item)
.setContent(content, IndexingService.ContentFormat.TEXT)
.setRequestMode(IndexingService.RequestMode.SYNCHRONOUS)
.build();
}
/**
* Build the ApiOperation to index a content item (file).
*
* @param content Content item to index
* @param previousItem Previous item state in the index
* @return ApiOperation (RepositoryDoc if indexing, PushItem if not modified)
* @throws IOException if unable to create operation
*/
private ApiOperation indexItem(GHContent content, Item previousItem)
throws IOException {
String metadataHash = content.getSha();
// If previously indexed and unchanged, just requeue as unmodified
if (canSkipIndexing(previousItem, metadataHash)) {
return notModified(previousItem.getName());
}
String resourceName = new URL(content.getHtmlUrl()).getPath();
FieldOrValue<String> title = FieldOrValue.withValue(content.getName());
FieldOrValue<String> url = FieldOrValue.withValue(content.getHtmlUrl());
String containerName = content.getOwner().getHtmlUrl().getPath();
String programmingLanguage = FileExtensions.getLanguageForFile(content.getName());
// Structured data based on the schema
Multimap<String, Object> structuredData = ArrayListMultimap.create();
structuredData.put("organization", content.getOwner().getOwnerName());
structuredData.put("repository", content.getOwner().getName());
structuredData.put("path", content.getPath());
structuredData.put("language", programmingLanguage);
Item item = IndexingItemBuilder.fromConfiguration(resourceName)
.setTitle(title)
.setContainerName(containerName)
.setSourceRepositoryUrl(url)
.setItemType(IndexingItemBuilder.ItemType.CONTAINER_ITEM)
.setObjectType("file")
.setValues(structuredData)
.setVersion(Longs.toByteArray(System.currentTimeMillis()))
.setHash(content.getSha())
.build();
// Index the file content too
String mimeType = FileTypeMap.getDefaultFileTypeMap()
.getContentType(content.getName());
AbstractInputStreamContent fileContent = new InputStreamContent(
mimeType, content.read())
.setLength(content.getSize())
.setCloseInputStream(true);
return new RepositoryDoc.Builder()
.setItem(item)
.setContent(fileContent, IndexingService.ContentFormat.RAW)
.setRequestMode(IndexingService.RequestMode.SYNCHRONOUS)
.build();
}
/**
* Perform apk upload an release on given track
*
* @throws Exception Upload error
*/
private void upload() throws Exception {
// configure proxy
if (this.proxyHost != null && !this.proxyHost.isEmpty()) {
System.setProperty("https.proxyHost", this.proxyHost);
}
if (this.proxyPort != null && !this.proxyPort.isEmpty()) {
System.setProperty("https.proxyPort", this.proxyPort);
}
// load key file credentials
System.out.println("Loading account credentials...");
Path jsonKey = FileSystems.getDefault().getPath(this.jsonKeyPath).normalize();
GoogleCredential cred = GoogleCredential.fromStream(new FileInputStream(jsonKey.toFile()));
cred = cred.createScoped(Collections.singleton(AndroidPublisherScopes.ANDROIDPUBLISHER));
// load apk file info
System.out.println("Loading apk file information...");
Path apkFile = FileSystems.getDefault().getPath(this.apkPath).normalize();
ApkFile apkInfo = new ApkFile(apkFile.toFile());
ApkMeta apkMeta = apkInfo.getApkMeta();
final String applicationName = this.appName == null ? apkMeta.getName() : this.appName;
final String packageName = apkMeta.getPackageName();
System.out.println(String.format("App Name: %s", apkMeta.getName()));
System.out.println(String.format("App Id: %s", apkMeta.getPackageName()));
System.out.println(String.format("App Version Code: %d", apkMeta.getVersionCode()));
System.out.println(String.format("App Version Name: %s", apkMeta.getVersionName()));
apkInfo.close();
// load release notes
System.out.println("Loading release notes...");
List<LocalizedText> releaseNotes = new ArrayList<LocalizedText>();
if (this.notesPath != null) {
Path notesFile = FileSystems.getDefault().getPath(this.notesPath).normalize();
String notesContent = new String(Files.readAllBytes(notesFile));
releaseNotes.add(new LocalizedText().setLanguage(Locale.US.toString()).setText(notesContent));
} else if (this.notes != null) {
releaseNotes.add(new LocalizedText().setLanguage(Locale.US.toString()).setText(this.notes));
}
// init publisher
System.out.println("Initialising publisher service...");
AndroidPublisher.Builder ab = new AndroidPublisher.Builder(cred.getTransport(), cred.getJsonFactory(), cred);
AndroidPublisher publisher = ab.setApplicationName(applicationName).build();
// create an edit
System.out.println("Initialising new edit...");
AppEdit edit = publisher.edits().insert(packageName, null).execute();
final String editId = edit.getId();
System.out.println(String.format("Edit created. Id: %s", editId));
try {
// upload the apk
System.out.println("Uploading apk file...");
AbstractInputStreamContent apkContent = new FileContent(MIME_TYPE_APK, apkFile.toFile());
Apk apk = publisher.edits().apks().upload(packageName, editId, apkContent).execute();
System.out.println(String.format("Apk uploaded. Version Code: %s", apk.getVersionCode()));
// create a release on track
System.out.println(String.format("On track:%s. Creating a release...", this.trackName));
TrackRelease release = new TrackRelease().setName("Automated upload").setStatus("completed")
.setVersionCodes(Collections.singletonList((long) apk.getVersionCode()))
.setReleaseNotes(releaseNotes);
Track track = new Track().setReleases(Collections.singletonList(release));
track = publisher.edits().tracks().update(packageName, editId, this.trackName, track).execute();
System.out.println(String.format("Release created on track: %s", this.trackName));
// commit edit
System.out.println("Commiting edit...");
edit = publisher.edits().commit(packageName, editId).execute();
System.out.println(String.format("Success. Commited Edit id: %s", editId));
// Success
} catch (Exception e) {
// error message
String msg = "Operation Failed: " + e.getMessage();
// abort
System.err.println("Opertaion failed due to an error!, Deleting edit...");
try {
publisher.edits().delete(packageName, editId).execute();
} catch (Exception e2) {
// log abort error as well
msg += "\nFailed to delete edit: " + e2.getMessage();
}
// forward error with message
throw new IOException(msg, e);
}
}
/**
* Launch a job, wait for it to complete, but <i>do not</i> check for errors.
*/
public Job runJob(Job job, @Nullable AbstractInputStreamContent data) {
return checkJob(waitForJob(launchJob(job, data)));
}
/**
* Test handling when the parent thread waiting for the write to finish via the close call is
* interrupted, that the actual write is cancelled and interrupted as well.
*/
@Test
public void testCreateObjectApiInterruptedException() throws Exception {
// Prepare the mock return values before invoking the method being tested.
when(mockStorage.objects()).thenReturn(mockStorageObjects);
when(mockStorageObjects.insert(
eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class)))
.thenReturn(mockStorageObjectsInsert);
// Set up the mock Insert to wait forever.
CountDownLatch waitForEverLatch = new CountDownLatch(1);
CountDownLatch writeStartedLatch = new CountDownLatch(2);
CountDownLatch threadsDoneLatch = new CountDownLatch(2);
setupNonConflictedWrite(
unused -> {
try {
writeStartedLatch.countDown();
waitForEverLatch.await();
fail("Unexpected to get here.");
return null;
} finally {
threadsDoneLatch.countDown();
}
});
WritableByteChannel writeChannel = gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME));
assertThat(writeChannel.isOpen()).isTrue();
Future<?> write =
executorService.submit(
() -> {
writeStartedLatch.countDown();
try {
IOException ioe = assertThrows(IOException.class, writeChannel::close);
assertThat(ioe).isInstanceOf(ClosedByInterruptException.class);
} finally {
threadsDoneLatch.countDown();
}
});
// Wait for the insert object to be executed, then cancel the writing thread, and finally wait
// for the two threads to finish.
assertWithMessage("Neither thread started.")
.that(writeStartedLatch.await(5000, TimeUnit.MILLISECONDS))
.isTrue();
write.cancel(/* interrupt= */ true);
assertWithMessage("Failed to wait for tasks to get interrupted.")
.that(threadsDoneLatch.await(5000, TimeUnit.MILLISECONDS))
.isTrue();
verify(mockStorage, times(2)).objects();
verify(mockStorageObjects)
.insert(eq(BUCKET_NAME), any(StorageObject.class), any(AbstractInputStreamContent.class));
verify(mockStorageObjectsInsert).setName(eq(OBJECT_NAME));
verify(mockStorageObjectsInsert).setDisableGZipContent(eq(true));
verify(mockClientRequestHelper).setChunkSize(any(Storage.Objects.Insert.class), anyInt());
verify(mockStorageObjectsInsert).setIfGenerationMatch(eq(0L));
verify(mockStorageObjects).get(eq(BUCKET_NAME), eq(OBJECT_NAME));
verify(mockStorageObjectsGet).execute();
verify(mockErrorExtractor).itemNotFound(any(IOException.class));
verify(mockStorageObjectsInsert).execute();
}