Java Code Examples for com.google.api.client.googleapis.media.MediaHttpUploader#MINIMUM_CHUNK_SIZE

The following examples show how to use com.google.api.client.googleapis.media.MediaHttpUploader#MINIMUM_CHUNK_SIZE . You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example 1
Source File: RemoteGoogleDriveConnector.java    From cloudsync with GNU General Public License v2.0 6 votes vote down vote up
private void prepareUploader(MediaHttpUploader uploader, long length)
{
	int chunkSize = MediaHttpUploader.MINIMUM_CHUNK_SIZE * CHUNK_COUNT;
	int chunkCount = (int) Math.ceil(length / (double) chunkSize);

	if (showProgress && chunkCount > 1)
	{
		uploader.setDirectUploadEnabled(false);
		uploader.setChunkSize(chunkSize);
		uploader.setProgressListener(new RemoteGoogleDriveProgress(this, length));
	}
	else
	{

		uploader.setDirectUploadEnabled(true);
	}
}
 
Example 2
Source File: GoogleCloudStorageTest.java    From hadoop-connectors with Apache License 2.0 5 votes vote down vote up
@Test
public void reupload_failure_cacheTooSmall_singleWrite_singleChunk() throws Exception {
  byte[] testData = new byte[MediaHttpUploader.MINIMUM_CHUNK_SIZE];
  new Random().nextBytes(testData);
  int uploadChunkSize = testData.length;
  int uploadCacheSize = testData.length / 2;

  MockHttpTransport transport =
      mockTransport(
          emptyResponse(HttpStatusCodes.STATUS_CODE_NOT_FOUND),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          jsonErrorResponse(ErrorResponses.GONE));

  AsyncWriteChannelOptions writeOptions =
      AsyncWriteChannelOptions.builder()
          .setUploadChunkSize(uploadChunkSize)
          .setUploadCacheSize(uploadCacheSize)
          .build();

  GoogleCloudStorage gcs =
      mockedGcs(GCS_OPTIONS.toBuilder().setWriteChannelOptions(writeOptions).build(), transport);

  WritableByteChannel writeChannel = gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME));
  writeChannel.write(ByteBuffer.wrap(testData));

  IOException writeException = assertThrows(IOException.class, writeChannel::close);

  assertThat(writeException).hasCauseThat().isInstanceOf(GoogleJsonResponseException.class);
  assertThat(writeException).hasCauseThat().hasMessageThat().startsWith("410");
}
 
Example 3
Source File: GoogleCloudStorageTest.java    From hadoop-connectors with Apache License 2.0 4 votes vote down vote up
@Test
public void reupload_success_singleWrite_singleUploadChunk() throws Exception {
  byte[] testData = new byte[MediaHttpUploader.MINIMUM_CHUNK_SIZE];
  new Random().nextBytes(testData);
  int uploadChunkSize = testData.length * 2;
  int uploadCacheSize = testData.length * 2;

  MockHttpTransport transport =
      mockTransport(
          emptyResponse(HttpStatusCodes.STATUS_CODE_NOT_FOUND),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          jsonErrorResponse(ErrorResponses.GONE),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          jsonDataResponse(
              newStorageObject(BUCKET_NAME, OBJECT_NAME)
                  .setSize(BigInteger.valueOf(testData.length))));

  AsyncWriteChannelOptions writeOptions =
      AsyncWriteChannelOptions.builder()
          .setUploadChunkSize(uploadChunkSize)
          .setUploadCacheSize(uploadCacheSize)
          .build();

  GoogleCloudStorage gcs =
      mockedGcs(GCS_OPTIONS.toBuilder().setWriteChannelOptions(writeOptions).build(), transport);

  try (WritableByteChannel writeChannel =
      gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME))) {
    writeChannel.write(ByteBuffer.wrap(testData));
  }

  assertThat(trackingHttpRequestInitializer.getAllRequestStrings())
      .containsExactly(
          getRequestString(BUCKET_NAME, OBJECT_NAME),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 1),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 2))
      .inOrder();

  HttpRequest writeRequest = trackingHttpRequestInitializer.getAllRequests().get(4);
  assertThat(writeRequest.getContent().getLength()).isEqualTo(testData.length);
  try (ByteArrayOutputStream writtenData = new ByteArrayOutputStream(testData.length)) {
    writeRequest.getContent().writeTo(writtenData);
    assertThat(writtenData.toByteArray()).isEqualTo(testData);
  }
}
 
Example 4
Source File: GoogleCloudStorageTest.java    From hadoop-connectors with Apache License 2.0 4 votes vote down vote up
@Test
public void reupload_success_singleWrite_multipleUploadChunks() throws Exception {
  byte[] testData = new byte[2 * MediaHttpUploader.MINIMUM_CHUNK_SIZE];
  new Random().nextBytes(testData);
  int uploadChunkSize = testData.length / 2;
  int uploadCacheSize = testData.length * 2;

  MockHttpTransport transport =
      mockTransport(
          emptyResponse(HttpStatusCodes.STATUS_CODE_NOT_FOUND),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          // "308 Resume Incomplete" - successfully uploaded 1st chunk
          emptyResponse(308).addHeader("Range", "bytes=0-" + (uploadChunkSize - 1)),
          jsonErrorResponse(ErrorResponses.GONE),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          // "308 Resume Incomplete" - successfully uploaded 1st chunk
          emptyResponse(308).addHeader("Range", "bytes=0-" + (uploadChunkSize - 1)),
          jsonDataResponse(
              newStorageObject(BUCKET_NAME, OBJECT_NAME)
                  .setSize(BigInteger.valueOf(testData.length))));

  AsyncWriteChannelOptions writeOptions =
      AsyncWriteChannelOptions.builder()
          .setUploadChunkSize(uploadChunkSize)
          .setUploadCacheSize(uploadCacheSize)
          .build();

  GoogleCloudStorage gcs =
      mockedGcs(GCS_OPTIONS.toBuilder().setWriteChannelOptions(writeOptions).build(), transport);

  try (WritableByteChannel writeChannel =
      gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME))) {
    writeChannel.write(ByteBuffer.wrap(testData));
  }

  assertThat(trackingHttpRequestInitializer.getAllRequestStrings())
      .containsExactly(
          getRequestString(BUCKET_NAME, OBJECT_NAME),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 1),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 2),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 3),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 4))
      .inOrder();

  HttpRequest writeRequestChunk1 = trackingHttpRequestInitializer.getAllRequests().get(5);
  assertThat(writeRequestChunk1.getContent().getLength()).isEqualTo(testData.length / 2);
  HttpRequest writeRequestChunk2 = trackingHttpRequestInitializer.getAllRequests().get(6);
  assertThat(writeRequestChunk2.getContent().getLength()).isEqualTo(testData.length / 2);
  try (ByteArrayOutputStream writtenData = new ByteArrayOutputStream(testData.length)) {
    writeRequestChunk1.getContent().writeTo(writtenData);
    writeRequestChunk2.getContent().writeTo(writtenData);
    assertThat(writtenData.toByteArray()).isEqualTo(testData);
  }
}
 
Example 5
Source File: GoogleCloudStorageTest.java    From hadoop-connectors with Apache License 2.0 4 votes vote down vote up
@Test
public void reupload_success_multipleWrites_singleUploadChunk() throws Exception {
  byte[] testData = new byte[MediaHttpUploader.MINIMUM_CHUNK_SIZE];
  new Random().nextBytes(testData);
  int uploadChunkSize = testData.length * 2;
  int uploadCacheSize = testData.length * 2;

  MockHttpTransport transport =
      mockTransport(
          emptyResponse(HttpStatusCodes.STATUS_CODE_NOT_FOUND),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          jsonErrorResponse(ErrorResponses.GONE),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          jsonDataResponse(
              newStorageObject(BUCKET_NAME, OBJECT_NAME)
                  .setSize(BigInteger.valueOf(testData.length))));

  AsyncWriteChannelOptions writeOptions =
      AsyncWriteChannelOptions.builder()
          .setUploadChunkSize(uploadChunkSize)
          .setUploadCacheSize(uploadCacheSize)
          .build();

  GoogleCloudStorage gcs =
      mockedGcs(GCS_OPTIONS.toBuilder().setWriteChannelOptions(writeOptions).build(), transport);

  try (WritableByteChannel writeChannel =
      gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME))) {
    writeChannel.write(ByteBuffer.wrap(testData));
    writeChannel.write(ByteBuffer.wrap(testData));
  }

  assertThat(trackingHttpRequestInitializer.getAllRequestStrings())
      .containsExactly(
          getRequestString(BUCKET_NAME, OBJECT_NAME),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 1),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 2))
      .inOrder();

  HttpRequest writeRequest = trackingHttpRequestInitializer.getAllRequests().get(4);
  assertThat(writeRequest.getContent().getLength()).isEqualTo(2 * testData.length);
  try (ByteArrayOutputStream writtenData = new ByteArrayOutputStream(testData.length)) {
    writeRequest.getContent().writeTo(writtenData);
    assertThat(writtenData.toByteArray()).isEqualTo(Bytes.concat(testData, testData));
  }
}
 
Example 6
Source File: GoogleCloudStorageTest.java    From hadoop-connectors with Apache License 2.0 4 votes vote down vote up
@Test
public void reupload_success_multipleWrites_multipleUploadChunks() throws Exception {
  byte[] testData = new byte[2 * MediaHttpUploader.MINIMUM_CHUNK_SIZE];
  new Random().nextBytes(testData);
  int uploadChunkSize = testData.length / 2;
  int uploadCacheSize = testData.length * 2;

  MockHttpTransport transport =
      mockTransport(
          emptyResponse(HttpStatusCodes.STATUS_CODE_NOT_FOUND),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          // "308 Resume Incomplete" - successfully uploaded 1st chunk
          emptyResponse(308).addHeader("Range", "bytes=0-" + (uploadChunkSize - 1)),
          jsonErrorResponse(ErrorResponses.GONE),
          resumableUploadResponse(BUCKET_NAME, OBJECT_NAME),
          // "308 Resume Incomplete" - successfully uploaded 3 chunks
          emptyResponse(308).addHeader("Range", "bytes=0-" + (uploadChunkSize - 1)),
          emptyResponse(308).addHeader("Range", "bytes=0-" + (2 * uploadChunkSize - 1)),
          emptyResponse(308).addHeader("Range", "bytes=0-" + (3 * uploadChunkSize - 1)),
          jsonDataResponse(
              newStorageObject(BUCKET_NAME, OBJECT_NAME)
                  .setSize(BigInteger.valueOf(2 * testData.length))));

  AsyncWriteChannelOptions writeOptions =
      AsyncWriteChannelOptions.builder()
          .setUploadChunkSize(uploadChunkSize)
          .setUploadCacheSize(uploadCacheSize)
          .build();

  GoogleCloudStorage gcs =
      mockedGcs(GCS_OPTIONS.toBuilder().setWriteChannelOptions(writeOptions).build(), transport);

  try (WritableByteChannel writeChannel =
      gcs.create(new StorageResourceId(BUCKET_NAME, OBJECT_NAME))) {
    writeChannel.write(ByteBuffer.wrap(testData));
    writeChannel.write(ByteBuffer.wrap(testData));
  }

  assertThat(trackingHttpRequestInitializer.getAllRequestStrings())
      .containsExactly(
          getRequestString(BUCKET_NAME, OBJECT_NAME),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 1),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 2),
          resumableUploadRequestString(
              BUCKET_NAME, OBJECT_NAME, /* generationId= */ 0, /* replaceGenerationId= */ false),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 3),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 4),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 5),
          resumableUploadChunkRequestString(BUCKET_NAME, OBJECT_NAME, /* uploadId= */ 6))
      .inOrder();

  HttpRequest writeRequestChunk1 = trackingHttpRequestInitializer.getAllRequests().get(5);
  assertThat(writeRequestChunk1.getContent().getLength()).isEqualTo(testData.length / 2);
  HttpRequest writeRequestChunk2 = trackingHttpRequestInitializer.getAllRequests().get(6);
  assertThat(writeRequestChunk2.getContent().getLength()).isEqualTo(testData.length / 2);
  HttpRequest writeRequestChunk3 = trackingHttpRequestInitializer.getAllRequests().get(7);
  assertThat(writeRequestChunk3.getContent().getLength()).isEqualTo(testData.length / 2);
  HttpRequest writeRequestChunk4 = trackingHttpRequestInitializer.getAllRequests().get(8);
  assertThat(writeRequestChunk4.getContent().getLength()).isEqualTo(testData.length / 2);
  try (ByteArrayOutputStream writtenData = new ByteArrayOutputStream(testData.length)) {
    writeRequestChunk1.getContent().writeTo(writtenData);
    writeRequestChunk2.getContent().writeTo(writtenData);
    writeRequestChunk3.getContent().writeTo(writtenData);
    writeRequestChunk4.getContent().writeTo(writtenData);
    assertThat(writtenData.toByteArray()).isEqualTo(Bytes.concat(testData, testData));
  }
}