com.google.api.client.googleapis.media.MediaHttpUploader Java Examples

The following examples show how to use com.google.api.client.googleapis.media.MediaHttpUploader. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: MediaUploadProgressListener.java    From jbpm-work-items with Apache License 2.0 6 votes vote down vote up
@Override
public void progressChanged(MediaHttpUploader uploader) throws IOException {
    switch (uploader.getUploadState()) {
        case INITIATION_STARTED:
            logger.info("Upload initialization has started");
            break;
        case INITIATION_COMPLETE:
            logger.info("Uplaod initialization has completed");
            break;
        case MEDIA_IN_PROGRESS:
            logger.info("Upload Progress: " + uploader.getProgress());
            break;
        case MEDIA_COMPLETE:
            logger.info("Upload is complete");
    }
}
 
Example #2
Source File: AndroidGoogleDrive.java    From QtAndroidTools with MIT License 6 votes vote down vote up
public void progressChanged(MediaHttpUploader uploader) throws IOException
{
    switch (uploader.getUploadState())
    {
        case INITIATION_STARTED:
            uploadProgressChanged(STATE_INITIATION_STARTED, 0.0);
            break;
        case INITIATION_COMPLETE:
            uploadProgressChanged(STATE_INITIATION_COMPLETE, 0.0);
            break;
        case MEDIA_IN_PROGRESS:
            uploadProgressChanged(STATE_MEDIA_IN_PROGRESS, uploader.getProgress());
            break;
        case MEDIA_COMPLETE:
            uploadProgressChanged(STATE_MEDIA_COMPLETE, 1.0);
            break;
    }
}
 
Example #3
Source File: RemoteGoogleDriveConnector.java    From cloudsync with GNU General Public License v2.0 6 votes vote down vote up
private void prepareUploader(MediaHttpUploader uploader, long length)
{
	int chunkSize = MediaHttpUploader.MINIMUM_CHUNK_SIZE * CHUNK_COUNT;
	int chunkCount = (int) Math.ceil(length / (double) chunkSize);

	if (showProgress && chunkCount > 1)
	{
		uploader.setDirectUploadEnabled(false);
		uploader.setChunkSize(chunkSize);
		uploader.setProgressListener(new RemoteGoogleDriveProgress(this, length));
	}
	else
	{

		uploader.setDirectUploadEnabled(true);
	}
}
 
Example #4
Source File: AbstractGoogleClientTest.java    From google-api-java-client with Apache License 2.0 5 votes vote down vote up
public void testMediaUpload_defaultGZip() throws Exception {
  MediaTransport transport = new MediaTransport();
  transport.contentLengthNotSpecified = true;
  AbstractGoogleClient client = new MockGoogleClient.Builder(
      transport, TEST_RESUMABLE_REQUEST_URL, "", JSON_OBJECT_PARSER,
      new GZipCheckerInitializer(false)).setApplicationName("Test Application").build();
  InputStream is = new ByteArrayInputStream(new byte[MediaHttpUploader.DEFAULT_CHUNK_SIZE]);
  InputStreamContent mediaContent = new InputStreamContent(TEST_CONTENT_TYPE, is);
  MockGoogleClientRequest<A> rq =
      new MockGoogleClientRequest<A>(client, "POST", "", null, A.class);
  rq.initializeMediaUpload(mediaContent);
  A result = rq.execute();
  assertEquals("somevalue", result.foo);
}
 
Example #5
Source File: AbstractGoogleClientTest.java    From google-api-java-client with Apache License 2.0 5 votes vote down vote up
public void testMediaUpload_enableGZip() throws Exception {
  MediaTransport transport = new MediaTransport();
  transport.contentLengthNotSpecified = true;
  AbstractGoogleClient client = new MockGoogleClient.Builder(
      transport, TEST_RESUMABLE_REQUEST_URL, "", JSON_OBJECT_PARSER,
      new GZipCheckerInitializer(false)).setApplicationName("Test Application").build();
  InputStream is = new ByteArrayInputStream(new byte[MediaHttpUploader.DEFAULT_CHUNK_SIZE]);
  InputStreamContent mediaContent = new InputStreamContent(TEST_CONTENT_TYPE, is);
  MockGoogleClientRequest<A> rq =
      new MockGoogleClientRequest<A>(client, "POST", "", null, A.class);
  rq.initializeMediaUpload(mediaContent);
  rq.setDisableGZipContent(false);
  A result = rq.execute();
  assertEquals("somevalue", result.foo);
}
 
Example #6
Source File: AbstractGoogleClientTest.java    From google-api-java-client with Apache License 2.0 5 votes vote down vote up
public void testMediaUpload_disableGZip() throws Exception {
  MediaTransport transport = new MediaTransport();
  transport.contentLengthNotSpecified = true;
  AbstractGoogleClient client = new MockGoogleClient.Builder(
      transport, TEST_RESUMABLE_REQUEST_URL, "", JSON_OBJECT_PARSER,
      new GZipCheckerInitializer(true)).setApplicationName("Test Application").build();
  InputStream is = new ByteArrayInputStream(new byte[MediaHttpUploader.DEFAULT_CHUNK_SIZE]);
  InputStreamContent mediaContent = new InputStreamContent(TEST_CONTENT_TYPE, is);
  MockGoogleClientRequest<A> rq =
      new MockGoogleClientRequest<A>(client, "POST", "", null, A.class);
  rq.initializeMediaUpload(mediaContent);
  rq.setDisableGZipContent(true);
  A result = rq.execute();
  assertEquals("somevalue", result.foo);
}
 
Example #7
Source File: AbstractGoogleClientTest.java    From google-api-java-client with Apache License 2.0 5 votes vote down vote up
public void testMediaUpload() throws Exception {
  MediaTransport transport = new MediaTransport();
  AbstractGoogleClient client = new MockGoogleClient.Builder(
      transport, TEST_RESUMABLE_REQUEST_URL, "", JSON_OBJECT_PARSER, null).setApplicationName(
      "Test Application").build();
  InputStream is = new ByteArrayInputStream(new byte[MediaHttpUploader.DEFAULT_CHUNK_SIZE]);
  InputStreamContent mediaContent = new InputStreamContent(TEST_CONTENT_TYPE, is);
  mediaContent.setLength(MediaHttpUploader.DEFAULT_CHUNK_SIZE);
  MockGoogleClientRequest<A> rq =
      new MockGoogleClientRequest<A>(client, "POST", "", null, A.class);
  rq.initializeMediaUpload(mediaContent);
  A result = rq.execute();
  assertEquals("somevalue", result.foo);
}
 
Example #8
Source File: AbstractGoogleClientRequest.java    From google-api-java-client with Apache License 2.0 5 votes vote down vote up
/**
 * Initializes the media HTTP uploader based on the media content.
 *
 * @param mediaContent media content
 */
protected final void initializeMediaUpload(AbstractInputStreamContent mediaContent) {
  HttpRequestFactory requestFactory = abstractGoogleClient.getRequestFactory();
  this.uploader = new MediaHttpUploader(
      mediaContent, requestFactory.getTransport(), requestFactory.getInitializer());
  this.uploader.setInitiationRequestMethod(requestMethod);
  if (httpContent != null) {
    this.uploader.setMetadata(httpContent);
  }
}
 
Example #9
Source File: GoogleCloudStorageTest.java    From hadoop-connectors with Apache License 2.0 5 votes vote down vote up
@Test
public void reupload_failure_cacheTooSmall_singleWrite_singleChunk() throws Exception {
  byte[] testData = new byte[MediaHttpUploader.MINIMUM_CHUNK_SIZE];
  new Random().nextBytes(testData);
  int uploadChunkSize = testData.length;
  int uploadCacheSize = testData.length / 2;

  MockHttpTransport transport =
      mockTransport(
          emptyResponse(HttpStatusCodes.STATUS_CODE_NOT_FOUND),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          jsonErrorResponse(ErrorResponses.GONE));

  AsyncWriteChannelOptions writeOptions =
      AsyncWriteChannelOptions.builder()
          .setUploadChunkSize(uploadChunkSize)
          .setUploadCacheSize(uploadCacheSize)
          .build();

  GoogleCloudStorage gcs =
      mockedGcs(GCS_OPTIONS.toBuilder().setWriteChannelOptions(writeOptions).build(), transport);

  WritableByteChannel writeChannel = gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME));
  writeChannel.write(ByteBuffer.wrap(testData));

  IOException writeException = assertThrows(IOException.class, writeChannel::close);

  assertThat(writeException).hasCauseThat().isInstanceOf(GoogleJsonResponseException.class);
  assertThat(writeException).hasCauseThat().hasMessageThat().startsWith("410");
}
 
Example #10
Source File: AsyncWriteChannelOptions.java    From hadoop-connectors with Apache License 2.0 5 votes vote down vote up
private static void checkUploadChunkSize(int chunkSize) {
  checkArgument(
      chunkSize > 0, "Upload chunk size must be great than 0, but was $%s", chunkSize);
  checkArgument(
      chunkSize % MediaHttpUploader.MINIMUM_CHUNK_SIZE == 0,
      "Upload chunk size must be a multiple of %s",
      MediaHttpUploader.MINIMUM_CHUNK_SIZE);
  if (chunkSize > UPLOAD_CHUNK_SIZE_GRANULARITY
      && chunkSize % UPLOAD_CHUNK_SIZE_GRANULARITY != 0) {
    logger.atWarning().log(
        "Upload chunk size should be a multiple of %s for the best performance, got %s",
        UPLOAD_CHUNK_SIZE_GRANULARITY, chunkSize);
  }
}
 
Example #11
Source File: RemoteGoogleDriveConnector.java    From cloudsync with GNU General Public License v2.0 5 votes vote down vote up
@Override
public void progressChanged(MediaHttpUploader mediaHttpUploader) throws IOException
{
	if (mediaHttpUploader == null) return;

	switch ( mediaHttpUploader.getUploadState() )
	{
		case INITIATION_COMPLETE:
			break;
		case INITIATION_STARTED:
		case MEDIA_IN_PROGRESS:

			this.connector.refreshCredential();

			double percent = mediaHttpUploader.getProgress() * 100;

			long currentTime = System.currentTimeMillis();

			String msg = "\r  " + df.format(Math.ceil(percent)) + "% (" + convertToKB(mediaHttpUploader.getNumBytesUploaded()) + " of "
					+ convertToKB(length) + " kb)";

			if (mediaHttpUploader.getUploadState().equals(UploadState.MEDIA_IN_PROGRESS))
			{
				long speed = convertToKB((mediaHttpUploader.getNumBytesUploaded() - lastBytes) / ((currentTime - lastTime) / 1000.0));
				msg += " - " + speed + " kb/s";
			}

			LOGGER.log(Level.FINEST, msg, true);

			lastTime = currentTime;
			lastBytes = mediaHttpUploader.getNumBytesUploaded();
			break;
		case MEDIA_COMPLETE:
			// System.out.println("Upload is complete!");
		default:
			break;
	}
}
 
Example #12
Source File: GoogleDriveWorkitemHandlerTest.java    From jbpm-work-items with Apache License 2.0 5 votes vote down vote up
@Before
public void setUp() {
    try {
        InputStream testInputStream =
                IOUtils.toInputStream("test doc content",
                                      "UTF-8");

        MediaHttpUploader mediaHttpUploader = PowerMockito.mock(MediaHttpUploader.class);
        MediaHttpDownloader mediaHttpDownloader = PowerMockito.mock(MediaHttpDownloader.class);

        Drive.Files.Insert gdriveFilesInsert = PowerMockito.mock(Drive.Files.Insert.class);
        Drive.Files.Get gdriveFilesGet = PowerMockito.mock(Drive.Files.Get.class);

        when(auth.getDriveService(anyString(),
                                  anyString())).thenReturn(gdriveService);
        when(gdriveService.files()).thenReturn(gdriveFiles);
        when(gdriveFiles.insert(any(File.class),
                                any(FileContent.class))).thenReturn(gdriveFilesInsert);
        when(gdriveFiles.get(anyString())).thenReturn(gdriveFilesGet);

        when(gdriveFilesInsert.getMediaHttpUploader()).thenReturn(mediaHttpUploader);
        when(gdriveFilesInsert.execute()).thenReturn(new File());
        when(gdriveFilesGet.getMediaHttpDownloader()).thenReturn(mediaHttpDownloader);
        when(gdriveFilesGet.executeMediaAsInputStream()).thenReturn(testInputStream);
    } catch (Exception e) {
        fail(e.getMessage());
    }
}
 
Example #13
Source File: GoogleCloudStorageTest.java    From hadoop-connectors with Apache License 2.0 4 votes vote down vote up
@Test
public void reupload_success_singleWrite_singleUploadChunk() throws Exception {
  byte[] testData = new byte[MediaHttpUploader.MINIMUM_CHUNK_SIZE];
  new Random().nextBytes(testData);
  int uploadChunkSize = testData.length * 2;
  int uploadCacheSize = testData.length * 2;

  MockHttpTransport transport =
      mockTransport(
          emptyResponse(HttpStatusCodes.STATUS_CODE_NOT_FOUND),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          jsonErrorResponse(ErrorResponses.GONE),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          jsonDataResponse(
              newStorageObject(BUCKET_NAME, OBJECT_NAME)
                  .setSize(BigInteger.valueOf(testData.length))));

  AsyncWriteChannelOptions writeOptions =
      AsyncWriteChannelOptions.builder()
          .setUploadChunkSize(uploadChunkSize)
          .setUploadCacheSize(uploadCacheSize)
          .build();

  GoogleCloudStorage gcs =
      mockedGcs(GCS_OPTIONS.toBuilder().setWriteChannelOptions(writeOptions).build(), transport);

  try (WritableByteChannel writeChannel =
      gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME))) {
    writeChannel.write(ByteBuffer.wrap(testData));
  }

  assertThat(trackingHttpRequestInitializer.getAllRequestStrings())
      .containsExactly(
          getRequestString(BUCKET_NAME, OBJECT_NAME),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 1),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 2))
      .inOrder();

  HttpRequest writeRequest = trackingHttpRequestInitializer.getAllRequests().get(4);
  assertThat(writeRequest.getContent().getLength()).isEqualTo(testData.length);
  try (ByteArrayOutputStream writtenData = new ByteArrayOutputStream(testData.length)) {
    writeRequest.getContent().writeTo(writtenData);
    assertThat(writtenData.toByteArray()).isEqualTo(testData);
  }
}
 
Example #14
Source File: GoogleCloudStorageTest.java    From hadoop-connectors with Apache License 2.0 4 votes vote down vote up
@Test
public void reupload_success_singleWrite_multipleUploadChunks() throws Exception {
  byte[] testData = new byte[2 * MediaHttpUploader.MINIMUM_CHUNK_SIZE];
  new Random().nextBytes(testData);
  int uploadChunkSize = testData.length / 2;
  int uploadCacheSize = testData.length * 2;

  MockHttpTransport transport =
      mockTransport(
          emptyResponse(HttpStatusCodes.STATUS_CODE_NOT_FOUND),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          // "308 Resume Incomplete" - successfully uploaded 1st chunk
          emptyResponse(308).addHeader("Range", "bytes=0-" + (uploadChunkSize - 1)),
          jsonErrorResponse(ErrorResponses.GONE),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          // "308 Resume Incomplete" - successfully uploaded 1st chunk
          emptyResponse(308).addHeader("Range", "bytes=0-" + (uploadChunkSize - 1)),
          jsonDataResponse(
              newStorageObject(BUCKET_NAME, OBJECT_NAME)
                  .setSize(BigInteger.valueOf(testData.length))));

  AsyncWriteChannelOptions writeOptions =
      AsyncWriteChannelOptions.builder()
          .setUploadChunkSize(uploadChunkSize)
          .setUploadCacheSize(uploadCacheSize)
          .build();

  GoogleCloudStorage gcs =
      mockedGcs(GCS_OPTIONS.toBuilder().setWriteChannelOptions(writeOptions).build(), transport);

  try (WritableByteChannel writeChannel =
      gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME))) {
    writeChannel.write(ByteBuffer.wrap(testData));
  }

  assertThat(trackingHttpRequestInitializer.getAllRequestStrings())
      .containsExactly(
          getRequestString(BUCKET_NAME, OBJECT_NAME),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 1),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 2),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 3),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 4))
      .inOrder();

  HttpRequest writeRequestChunk1 = trackingHttpRequestInitializer.getAllRequests().get(5);
  assertThat(writeRequestChunk1.getContent().getLength()).isEqualTo(testData.length / 2);
  HttpRequest writeRequestChunk2 = trackingHttpRequestInitializer.getAllRequests().get(6);
  assertThat(writeRequestChunk2.getContent().getLength()).isEqualTo(testData.length / 2);
  try (ByteArrayOutputStream writtenData = new ByteArrayOutputStream(testData.length)) {
    writeRequestChunk1.getContent().writeTo(writtenData);
    writeRequestChunk2.getContent().writeTo(writtenData);
    assertThat(writtenData.toByteArray()).isEqualTo(testData);
  }
}
 
Example #15
Source File: GoogleCloudStorageTest.java    From hadoop-connectors with Apache License 2.0 4 votes vote down vote up
@Test
public void reupload_success_multipleWrites_singleUploadChunk() throws Exception {
  byte[] testData = new byte[MediaHttpUploader.MINIMUM_CHUNK_SIZE];
  new Random().nextBytes(testData);
  int uploadChunkSize = testData.length * 2;
  int uploadCacheSize = testData.length * 2;

  MockHttpTransport transport =
      mockTransport(
          emptyResponse(HttpStatusCodes.STATUS_CODE_NOT_FOUND),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          jsonErrorResponse(ErrorResponses.GONE),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          jsonDataResponse(
              newStorageObject(BUCKET_NAME, OBJECT_NAME)
                  .setSize(BigInteger.valueOf(testData.length))));

  AsyncWriteChannelOptions writeOptions =
      AsyncWriteChannelOptions.builder()
          .setUploadChunkSize(uploadChunkSize)
          .setUploadCacheSize(uploadCacheSize)
          .build();

  GoogleCloudStorage gcs =
      mockedGcs(GCS_OPTIONS.toBuilder().setWriteChannelOptions(writeOptions).build(), transport);

  try (WritableByteChannel writeChannel =
      gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME))) {
    writeChannel.write(ByteBuffer.wrap(testData));
    writeChannel.write(ByteBuffer.wrap(testData));
  }

  assertThat(trackingHttpRequestInitializer.getAllRequestStrings())
      .containsExactly(
          getRequestString(BUCKET_NAME, OBJECT_NAME),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 1),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 2))
      .inOrder();

  HttpRequest writeRequest = trackingHttpRequestInitializer.getAllRequests().get(4);
  assertThat(writeRequest.getContent().getLength()).isEqualTo(2 * testData.length);
  try (ByteArrayOutputStream writtenData = new ByteArrayOutputStream(testData.length)) {
    writeRequest.getContent().writeTo(writtenData);
    assertThat(writtenData.toByteArray()).isEqualTo(Bytes.concat(testData, testData));
  }
}
 
Example #16
Source File: GoogleCloudStorageTest.java    From hadoop-connectors with Apache License 2.0 4 votes vote down vote up
@Test
public void reupload_success_multipleWrites_multipleUploadChunks() throws Exception {
  byte[] testData = new byte[2 * MediaHttpUploader.MINIMUM_CHUNK_SIZE];
  new Random().nextBytes(testData);
  int uploadChunkSize = testData.length / 2;
  int uploadCacheSize = testData.length * 2;

  MockHttpTransport transport =
      mockTransport(
          emptyResponse(HttpStatusCodes.STATUS_CODE_NOT_FOUND),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          // "308 Resume Incomplete" - successfully uploaded 1st chunk
          emptyResponse(308).addHeader("Range", "bytes=0-" + (uploadChunkSize - 1)),
          jsonErrorResponse(ErrorResponses.GONE),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          // "308 Resume Incomplete" - successfully uploaded 3 chunks
          emptyResponse(308).addHeader("Range", "bytes=0-" + (uploadChunkSize - 1)),
          emptyResponse(308).addHeader("Range", "bytes=0-" + (2 * uploadChunkSize - 1)),
          emptyResponse(308).addHeader("Range", "bytes=0-" + (3 * uploadChunkSize - 1)),
          jsonDataResponse(
              newStorageObject(BUCKET_NAME, OBJECT_NAME)
                  .setSize(BigInteger.valueOf(2 * testData.length))));

  AsyncWriteChannelOptions writeOptions =
      AsyncWriteChannelOptions.builder()
          .setUploadChunkSize(uploadChunkSize)
          .setUploadCacheSize(uploadCacheSize)
          .build();

  GoogleCloudStorage gcs =
      mockedGcs(GCS_OPTIONS.toBuilder().setWriteChannelOptions(writeOptions).build(), transport);

  try (WritableByteChannel writeChannel =
      gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME))) {
    writeChannel.write(ByteBuffer.wrap(testData));
    writeChannel.write(ByteBuffer.wrap(testData));
  }

  assertThat(trackingHttpRequestInitializer.getAllRequestStrings())
      .containsExactly(
          getRequestString(BUCKET_NAME, OBJECT_NAME),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 1),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 2),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 3),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 4),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 5),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 6))
      .inOrder();

  HttpRequest writeRequestChunk1 = trackingHttpRequestInitializer.getAllRequests().get(5);
  assertThat(writeRequestChunk1.getContent().getLength()).isEqualTo(testData.length / 2);
  HttpRequest writeRequestChunk2 = trackingHttpRequestInitializer.getAllRequests().get(6);
  assertThat(writeRequestChunk2.getContent().getLength()).isEqualTo(testData.length / 2);
  HttpRequest writeRequestChunk3 = trackingHttpRequestInitializer.getAllRequests().get(7);
  assertThat(writeRequestChunk3.getContent().getLength()).isEqualTo(testData.length / 2);
  HttpRequest writeRequestChunk4 = trackingHttpRequestInitializer.getAllRequests().get(8);
  assertThat(writeRequestChunk4.getContent().getLength()).isEqualTo(testData.length / 2);
  try (ByteArrayOutputStream writtenData = new ByteArrayOutputStream(testData.length)) {
    writeRequestChunk1.getContent().writeTo(writtenData);
    writeRequestChunk2.getContent().writeTo(writtenData);
    writeRequestChunk3.getContent().writeTo(writtenData);
    writeRequestChunk4.getContent().writeTo(writtenData);
    assertThat(writtenData.toByteArray()).isEqualTo(Bytes.concat(testData, testData));
  }
}
 
Example #17
Source File: LoggingMediaHttpUploaderProgressListener.java    From hadoop-connectors with Apache License 2.0 4 votes vote down vote up
@Override
public void progressChanged(MediaHttpUploader uploader) throws IOException {
  progressChanged(
      uploader.getUploadState(), uploader.getNumBytesUploaded(), System.currentTimeMillis());
}
 
Example #18
Source File: AbstractGoogleClientRequest.java    From google-api-java-client with Apache License 2.0 4 votes vote down vote up
/** Returns the media HTTP Uploader or {@code null} for none. */
public final MediaHttpUploader getMediaHttpUploader() {
  return uploader;
}
 
Example #19
Source File: RemoteGoogleDriveConnector.java    From cloudsync with GNU General Public License v2.0 4 votes vote down vote up
@Override
public void update(final Handler handler, final Item item, final boolean with_filedata) throws CloudsyncException, FileIOException
{
	initService(handler);

	int retryCount = 0;
	do
	{
		try
		{
			refreshCredential();

			if (item.isType(ItemType.FILE))
			{
				final File _parentDriveItem = _getHistoryFolder(item);
				if (_parentDriveItem != null)
				{
					final File copyOfdriveItem = new File();
					final ParentReference _parentReference = new ParentReference();
					_parentReference.setId(_parentDriveItem.getId());
					copyOfdriveItem.setParents(Collections.singletonList(_parentReference));
					// copyOfdriveItem.setTitle(driveItem.getTitle());
					// copyOfdriveItem.setMimeType(driveItem.getMimeType());
					// copyOfdriveItem.setProperties(driveItem.getProperties());
					final File _copyOfDriveItem = service.files().copy(item.getRemoteIdentifier(), copyOfdriveItem).execute();
					if (_copyOfDriveItem == null)
					{
						throw new CloudsyncException("Couldn't make a history snapshot of item '" + item.getPath() + "'");
					}
				}
			}
			File driveItem = new File();
			final LocalStreamData data = _prepareDriveItem(driveItem, item, handler, with_filedata);
			if (data == null)
			{
				driveItem = service.files().update(item.getRemoteIdentifier(), driveItem).execute();
			}
			else
			{
				final InputStreamContent params = new InputStreamContent(FILE, data.getStream());
				params.setLength(data.getLength());
				Update updater = service.files().update(item.getRemoteIdentifier(), driveItem, params);
				MediaHttpUploader uploader = updater.getMediaHttpUploader();
				prepareUploader(uploader, data.getLength());
				driveItem = updater.execute();
			}
			if (driveItem == null)
			{
				throw new CloudsyncException("Couldn't update item '" + item.getPath() + "'");
			}
			else if (driveItem.getLabels().getTrashed())
			{
				throw new CloudsyncException("Remote item '" + item.getPath() + "' [" + driveItem.getId() + "] is trashed\ntry to run with --nocache");
			}
			_addToCache(driveItem, null);
			return;
		}
		catch (final IOException e)
		{
			retryCount = validateException("remote update", item, e, retryCount);
			if(retryCount < 0) // TODO workaround - fix this later
				retryCount = 0;
		}
	}
	while (true);
}
 
Example #20
Source File: MediaUploadWorkitemHandler.java    From jbpm-work-items with Apache License 2.0 4 votes vote down vote up
public void executeWorkItem(WorkItem workItem,
                            WorkItemManager workItemManager) {

    Document docToUpload = (Document) workItem.getParameter("DocToUpload");
    String docMimeType = (String) workItem.getParameter("DocMimeType");
    String uploadPath = (String) workItem.getParameter("UploadPath");

    try {

        RequiredParameterValidator.validate(this.getClass(),
                                            workItem);

        Drive drive = auth.getDriveService(appName,
                                           clientSecret);
        File fileMetadata = new File();
        fileMetadata.setTitle(docToUpload.getName());
        fileMetadata.setAlternateLink(docToUpload.getLink());
        if (docToUpload.getLastModified() != null) {
            fileMetadata.setModifiedDate(new DateTime(docToUpload.getLastModified()));
        }

        java.io.File tempDocFile = java.io.File.createTempFile(FilenameUtils.getBaseName(docToUpload.getName()),
                                                               "." + FilenameUtils.getExtension(docToUpload.getName()));
        FileOutputStream fos = new FileOutputStream(tempDocFile);
        fos.write(docToUpload.getContent());
        fos.close();

        FileContent mediaContent = new FileContent(docMimeType,
                                                   tempDocFile);

        Drive.Files.Insert insert = drive.files().insert(fileMetadata,
                                                         mediaContent);
        MediaHttpUploader uploader = insert.getMediaHttpUploader();
        uploader.setDirectUploadEnabled(true);
        uploader.setProgressListener(new MediaUploadProgressListener());
        insert.execute();

        workItemManager.completeWorkItem(workItem.getId(),
                                         null);
    } catch (Exception e) {
        handleException(e);
    }
}