com.amazonaws.services.dynamodbv2.model.BatchWriteItemRequest Java Examples

The following examples show how to use com.amazonaws.services.dynamodbv2.model.BatchWriteItemRequest. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DynamoDBConsumer.java    From dynamodb-import-export-tool with Apache License 2.0 6 votes vote down vote up
/**
 * calls splitResultIntoBatches to turn the SegmentedScanResult into several
 * BatchWriteItemRequests and then submits them as individual jobs to the
 * ExecutorService.
 */
@Override
public Future<Void> writeResult(SegmentedScanResult result) {
    Future<Void> jobSubmission = null;
    List<BatchWriteItemRequest> batches = splitResultIntoBatches(
            result.getScanResult(), tableName);
    Iterator<BatchWriteItemRequest> batchesIterator = batches.iterator();
    while (batchesIterator.hasNext()) {
        try {
            jobSubmission = exec
                    .submit(new DynamoDBConsumerWorker(batchesIterator
                            .next(), client, rateLimiter, tableName));
        } catch (NullPointerException npe) {
            throw new NullPointerException(
                    "Thread pool not initialized for LogStashExecutor");
        }
    }
    return jobSubmission;
}
 
Example #2
Source File: DynamoDBService2.java    From Doradus with Apache License 2.0 6 votes vote down vote up
private void commitPartial(List<WriteRequest> list) {
  	Timer t = new Timer();
Map<String, List<WriteRequest>> map = new HashMap<>();
map.put(getTenant().getName(), list);
BatchWriteItemResult result = m_client.batchWriteItem(new BatchWriteItemRequest(map));
int retry = 0;
while(result.getUnprocessedItems().size() > 0) {
	if(retry == RETRY_SLEEPS.length) throw new RuntimeException("All retries failed");
	m_logger.debug("Committing {} unprocessed items, retry: {}", result.getUnprocessedItems().size(), retry + 1);
	try {
		Thread.sleep(RETRY_SLEEPS[retry++]);
	} catch (InterruptedException e) {
		// TODO Auto-generated catch block
		e.printStackTrace();
	}
  		result = m_client.batchWriteItem(new BatchWriteItemRequest(result.getUnprocessedItems()));
}
m_logger.debug("Committed {} writes in {}", list.size(), t);
list.clear();
  }
 
Example #3
Source File: DynamoDBIOTestHelper.java    From beam with Apache License 2.0 5 votes vote down vote up
static List<Map<String, AttributeValue>> generateTestData(String tableName, int numOfItems) {
  BatchWriteItemRequest batchWriteItemRequest =
      generateBatchWriteItemRequest(tableName, numOfItems);

  dynamoDBClient.batchWriteItem(batchWriteItemRequest);
  ScanResult scanResult = dynamoDBClient.scan(new ScanRequest().withTableName(tableName));

  List<Map<String, AttributeValue>> items = scanResult.getItems();
  Assert.assertEquals(numOfItems, items.size());
  return items;
}
 
Example #4
Source File: DynamoDBManager.java    From dynamodb-geo with Apache License 2.0 5 votes vote down vote up
public BatchWritePointResult batchWritePoints(List<PutPointRequest> putPointRequests) {
	BatchWriteItemRequest batchItemRequest = new BatchWriteItemRequest();
	List<WriteRequest> writeRequests = new ArrayList<WriteRequest>();
	for (PutPointRequest putPointRequest : putPointRequests) {
		long geohash = S2Manager.generateGeohash(putPointRequest.getGeoPoint());
		long hashKey = S2Manager.generateHashKey(geohash, config.getHashKeyLength());
		String geoJson = GeoJsonMapper.stringFromGeoObject(putPointRequest.getGeoPoint());

		PutRequest putRequest = putPointRequest.getPutRequest();
		AttributeValue hashKeyValue = new AttributeValue().withN(String.valueOf(hashKey));
		putRequest.getItem().put(config.getHashKeyAttributeName(), hashKeyValue);
		putRequest.getItem().put(config.getRangeKeyAttributeName(), putPointRequest.getRangeKeyValue());
		AttributeValue geohashValue = new AttributeValue().withN(Long.toString(geohash));
		putRequest.getItem().put(config.getGeohashAttributeName(), geohashValue);
		AttributeValue geoJsonValue = new AttributeValue().withS(geoJson);
		putRequest.getItem().put(config.getGeoJsonAttributeName(), geoJsonValue);			
		
		WriteRequest writeRequest = new WriteRequest(putRequest);
		writeRequests.add(writeRequest);
	}
	Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>();
	requestItems.put(config.getTableName(), writeRequests);
	batchItemRequest.setRequestItems(requestItems);
	BatchWriteItemResult batchWriteItemResult = config.getDynamoDBClient().batchWriteItem(batchItemRequest);
	BatchWritePointResult batchWritePointResult = new BatchWritePointResult(batchWriteItemResult);
	return batchWritePointResult;
}
 
Example #5
Source File: DynamoDBIOTest.java    From beam with Apache License 2.0 5 votes vote down vote up
@Test
public void testRetries() throws Throwable {
  thrown.expectMessage("Error writing to DynamoDB");

  final List<WriteRequest> writeRequests = DynamoDBIOTestHelper.generateWriteRequests(numOfItems);

  AmazonDynamoDB amazonDynamoDBMock = Mockito.mock(AmazonDynamoDB.class);
  Mockito.when(amazonDynamoDBMock.batchWriteItem(Mockito.any(BatchWriteItemRequest.class)))
      .thenThrow(new AmazonDynamoDBException("Service unavailable"));

  pipeline
      .apply(Create.of(writeRequests))
      .apply(
          DynamoDBIO.<WriteRequest>write()
              .withWriteRequestMapperFn(
                  (SerializableFunction<WriteRequest, KV<String, WriteRequest>>)
                      writeRequest -> KV.of(tableName, writeRequest))
              .withRetryConfiguration(
                  DynamoDBIO.RetryConfiguration.create(4, Duration.standardSeconds(10)))
              .withAwsClientsProvider(AwsClientsProviderMock.of(amazonDynamoDBMock)));

  try {
    pipeline.run().waitUntilFinish();
  } catch (final Pipeline.PipelineExecutionException e) {
    // check 3 retries were initiated by inspecting the log before passing on the exception
    expectedLogs.verifyWarn(String.format(DynamoDBIO.Write.WriteFn.RETRY_ATTEMPT_LOG, 1));
    expectedLogs.verifyWarn(String.format(DynamoDBIO.Write.WriteFn.RETRY_ATTEMPT_LOG, 2));
    expectedLogs.verifyWarn(String.format(DynamoDBIO.Write.WriteFn.RETRY_ATTEMPT_LOG, 3));
    throw e.getCause();
  }
  fail("Pipeline is expected to fail because we were unable to write to DynamoDB.");
}
 
Example #6
Source File: DynamoDBConsumerWorker.java    From dynamodb-import-export-tool with Apache License 2.0 5 votes vote down vote up
/**
 * Callable class that when called will try to write a batch to a DynamoDB
 * table. If the write returns unprocessed items it will exponentially back
 * off until it succeeds.
 */
public DynamoDBConsumerWorker(BatchWriteItemRequest batchWriteItemRequest,
        AmazonDynamoDBClient client, RateLimiter rateLimiter,
        String tableName) {
    this.batch = batchWriteItemRequest;
    this.client = client;
    this.rateLimiter = rateLimiter;
    this.tableName = tableName;
    this.exponentialBackoffTime = BootstrapConstants.INITIAL_RETRY_TIME_MILLISECONDS;
}
 
Example #7
Source File: DynamoDBConsumerWorker.java    From dynamodb-import-export-tool with Apache License 2.0 5 votes vote down vote up
/**
 * Writes to DynamoDBTable using an exponential backoff. If the
 * batchWriteItem returns unprocessed items then it will exponentially
 * backoff and retry the unprocessed items.
 */
public List<ConsumedCapacity> runWithBackoff(BatchWriteItemRequest req) {
    BatchWriteItemResult writeItemResult = null;
    List<ConsumedCapacity> consumedCapacities = new LinkedList<ConsumedCapacity>();
    Map<String, List<WriteRequest>> unprocessedItems = null;
    boolean interrupted = false;
    try {
        do {
            writeItemResult = client.batchWriteItem(req);
            unprocessedItems = writeItemResult.getUnprocessedItems();
            consumedCapacities
                    .addAll(writeItemResult.getConsumedCapacity());

            if (unprocessedItems != null) {
                req.setRequestItems(unprocessedItems);
                try {
                    Thread.sleep(exponentialBackoffTime);
                } catch (InterruptedException ie) {
                    interrupted = true;
                } finally {
                    exponentialBackoffTime *= 2;
                    if (exponentialBackoffTime > BootstrapConstants.MAX_EXPONENTIAL_BACKOFF_TIME) {
                        exponentialBackoffTime = BootstrapConstants.MAX_EXPONENTIAL_BACKOFF_TIME;
                    }
                }
            }
        } while (unprocessedItems != null && unprocessedItems.get(tableName) != null);
        return consumedCapacities;
    } finally {
        if (interrupted) {
            Thread.currentThread().interrupt();
        }
    }
}
 
Example #8
Source File: DynamoDBConsumer.java    From dynamodb-import-export-tool with Apache License 2.0 5 votes vote down vote up
/**
 * Splits up a ScanResult into a list of BatchWriteItemRequests of size 25
 * items or less each.
 */
public static List<BatchWriteItemRequest> splitResultIntoBatches(
        ScanResult result, String tableName) {
    List<BatchWriteItemRequest> batches = new LinkedList<BatchWriteItemRequest>();
    Iterator<Map<String, AttributeValue>> it = result.getItems().iterator();

    BatchWriteItemRequest req = new BatchWriteItemRequest()
            .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL);
    List<WriteRequest> writeRequests = new LinkedList<WriteRequest>();
    int i = 0;
    while (it.hasNext()) {
        PutRequest put = new PutRequest(it.next());
        writeRequests.add(new WriteRequest(put));

        i++;
        if (i == BootstrapConstants.MAX_BATCH_SIZE_WRITE_ITEM) {
            req.addRequestItemsEntry(tableName, writeRequests);
            batches.add(req);
            req = new BatchWriteItemRequest()
                    .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL);
            writeRequests = new LinkedList<WriteRequest>();
            i = 0;
        }
    }
    if (i > 0) {
        req.addRequestItemsEntry(tableName, writeRequests);
        batches.add(req);
    }
    return batches;
}
 
Example #9
Source File: DynamoDBConsumerTest.java    From dynamodb-import-export-tool with Apache License 2.0 5 votes vote down vote up
/**
 * Test that a ScanResult splits into the correct number of batches.
 */
@Test
public void splitResultIntoBatchesTest() {
    final double numItems = 111.0;

    String tableName = "test tableName";

    ScanResult scanResult = new ScanResult();
    List<Map<String, AttributeValue>> items = new LinkedList<Map<String, AttributeValue>>();
    for (int i = 0; i < numItems; i++) {
        Map<String, AttributeValue> sampleScanResult = new HashMap<String, AttributeValue>();
        sampleScanResult.put("key", new AttributeValue("attribute value "
                + i));
        items.add(sampleScanResult);
    }
    scanResult.setItems(items);

    SegmentedScanResult result = new SegmentedScanResult(scanResult, 0);

    replayAll();
    List<BatchWriteItemRequest> batches = DynamoDBConsumer
            .splitResultIntoBatches(result.getScanResult(), tableName);
    assertEquals(Math.ceil(numItems / BootstrapConstants.MAX_BATCH_SIZE_WRITE_ITEM),
            batches.size(), 0.0);

    verifyAll();
}
 
Example #10
Source File: TransactionDynamoDBFacade.java    From dynamodb-transactions with Apache License 2.0 4 votes vote down vote up
@Override
public BatchWriteItemResult batchWriteItem(BatchWriteItemRequest arg0)
        throws AmazonServiceException, AmazonClientException {
    throw new UnsupportedOperationException("Use the underlying client instance instead");
}
 
Example #11
Source File: PostgresDynamoDB.java    From podyn with Apache License 2.0 4 votes vote down vote up
@Override
public BatchWriteItemResult batchWriteItem(BatchWriteItemRequest batchWriteItemRequest) {
	throw new UnsupportedOperationException();
}
 
Example #12
Source File: ThreadLocalDynamoDBFacade.java    From dynamodb-transactions with Apache License 2.0 4 votes vote down vote up
@Override
public BatchWriteItemResult batchWriteItem(BatchWriteItemRequest request) throws AmazonServiceException, AmazonClientException {
    return getBackend().batchWriteItem(request);
}
 
Example #13
Source File: TransactionManagerDynamoDBFacade.java    From dynamodb-transactions with Apache License 2.0 4 votes vote down vote up
@Override
public BatchWriteItemResult batchWriteItem(BatchWriteItemRequest arg0)
        throws AmazonServiceException, AmazonClientException {
    throw new UnsupportedOperationException("Use the underlying client instance instead");
}
 
Example #14
Source File: LowLevelBatchWrite.java    From aws-dynamodb-examples with Apache License 2.0 4 votes vote down vote up
private static void writeMultipleItemsBatchWrite() {
    try {                    
        
       // Create a map for the requests in the batch
       Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>();
        
       // Create a PutRequest for a new Forum item
       Map<String, AttributeValue> forumItem = new HashMap<String, AttributeValue>();
       forumItem.put("Name", new AttributeValue().withS("Amazon RDS"));
       forumItem.put("Threads", new AttributeValue().withN("0"));
    
       List<WriteRequest> forumList = new ArrayList<WriteRequest>();
       forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem)));
       requestItems.put(table1Name, forumList);
       
       // Create a PutRequest for a new Thread item
       Map<String, AttributeValue> threadItem = new HashMap<String, AttributeValue>();
       threadItem.put("ForumName", new AttributeValue().withS("Amazon RDS"));
       threadItem.put("Subject", new AttributeValue().withS("Amazon RDS Thread 1"));
       threadItem.put("Message", new AttributeValue().withS("ElasticCache Thread 1 message"));
       threadItem.put("KeywordTags", new AttributeValue().withSS(Arrays.asList("cache", "in-memory")));
    
       List<WriteRequest> threadList = new ArrayList<WriteRequest>();
       threadList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(threadItem)));
       
       // Create a DeleteRequest for a Thread item
       Map<String, AttributeValue> threadDeleteKey = new HashMap<String, AttributeValue>(); 
       threadDeleteKey.put("ForumName", new AttributeValue().withS("Amazon S3"));
       threadDeleteKey.put("Subject", new AttributeValue().withS("S3 Thread 100"));
       
       threadList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(threadDeleteKey)));
       requestItems.put(table2Name, threadList);
       
       BatchWriteItemResult result;
       BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest()
           .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL);
       
       do {
           System.out.println("Making the request.");
                           
           batchWriteItemRequest.withRequestItems(requestItems);
           result = client.batchWriteItem(batchWriteItemRequest);
           
           // Print consumed capacity units
           for(ConsumedCapacity consumedCapacity : result.getConsumedCapacity()) {
                String tableName = consumedCapacity.getTableName();
                Double consumedCapacityUnits = consumedCapacity.getCapacityUnits();
                System.out.println("Consumed capacity units for table " + tableName + ": " + consumedCapacityUnits);
           }
           
           // Check for unprocessed keys which could happen if you exceed provisioned throughput
           System.out.println("Unprocessed Put and Delete requests: \n" + result.getUnprocessedItems());
           requestItems = result.getUnprocessedItems();
       } while (result.getUnprocessedItems().size() > 0);
                    
    }  catch (AmazonServiceException ase) {
        System.err.println("Failed to retrieve items: ");
        ase.printStackTrace(System.err);
    }  

}
 
Example #15
Source File: LowLevelBatchWriteSyntax.java    From aws-dynamodb-examples with Apache License 2.0 4 votes vote down vote up
private static void writeMultipleItemsBatchWrite() {
     try {
                   
// Begin syntax extract
         
         
// Create a map for the requests in the batch
Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>();
 
// Create a PutRequest for a new Forum item
Map<String, AttributeValue> forumItem = new HashMap<String, AttributeValue>();
forumItem.put("Name", new AttributeValue().withS("Amazon RDS"));
forumItem.put("Threads", new AttributeValue().withN("0"));

List<WriteRequest> forumList = new ArrayList<WriteRequest>();
forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem)));
requestItems.put("Forum", forumList);

// Create a PutRequest for a new Thread item
Map<String, AttributeValue> threadItem = new HashMap<String, AttributeValue>();
threadItem.put("ForumName", new AttributeValue().withS("Amazon RDS"));
threadItem.put("Subject", new AttributeValue().withS("Amazon RDS Thread 1"));

List<WriteRequest> threadList = new ArrayList<WriteRequest>();
threadList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(threadItem)));

// Create a DeleteRequest for a Thread item
Map<String, AttributeValue> threadDeleteKey = new HashMap<String, AttributeValue>(); 
threadDeleteKey.put("ForumName", new AttributeValue().withS("Some hash attribute value"));
threadDeleteKey.put("Subject", new AttributeValue().withS("Some range attribute value"));

threadList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(threadDeleteKey)));
requestItems.put("Thread", threadList);

BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest();

System.out.println("Making the request.");
                     
batchWriteItemRequest.withRequestItems(requestItems);
client.batchWriteItem(batchWriteItemRequest);

// End syntax extract

                     
     }  catch (AmazonServiceException ase) {
         System.err.println("Failed to retrieve items: ");
         ase.printStackTrace(System.err);
     }  

 }
 
Example #16
Source File: DynamoDBWriter.java    From geowave with Apache License 2.0 4 votes vote down vote up
private void writeBatch(final boolean async) {
  final List<WriteRequest> batch;

  if (batchedItems.size() <= NUM_ITEMS) {
    batch = batchedItems;
  } else {
    batch = batchedItems.subList(0, NUM_ITEMS + 1);
  }
  final Map<String, List<WriteRequest>> writes = new HashMap<>();
  writes.put(tableName, new ArrayList<>(batch));
  if (async) {

    /**
     * To support asynchronous batch write a async handler is created Callbacks are provided for
     * success and error. As there might be unprocessed items on failure, they are retried
     * asynchronously Keep track of futures, so that they can be waited on during "flush"
     */
    final BatchWriteItemRequest batchRequest = new BatchWriteItemRequest(writes);
    final Future<BatchWriteItemResult> future =
        client.batchWriteItemAsync(
            batchRequest,
            new AsyncHandler<BatchWriteItemRequest, BatchWriteItemResult>() {

              @Override
              public void onError(final Exception exception) {
                LOGGER.warn(
                    "Unable to get response from Dynamo-Async Write " + exception.toString());
                futureMap.remove(batchRequest);
                return;
              }

              @Override
              public void onSuccess(
                  final BatchWriteItemRequest request,
                  final BatchWriteItemResult result) {
                retryAsync(result.getUnprocessedItems());
                if (futureMap.remove(request) == null) {
                  LOGGER.warn(" Unable to delete BatchWriteRequest from futuresMap ");
                }
              }
            });

    futureMap.put(batchRequest, future);
  } else {
    final BatchWriteItemResult response =
        client.batchWriteItem(new BatchWriteItemRequest(writes));
    retry(response.getUnprocessedItems());
  }

  batch.clear();
}
 
Example #17
Source File: DynamoDbDelegate.java    From dynamodb-janusgraph-storage-backend with Apache License 2.0 4 votes vote down vote up
public BatchWriteItemResult batchWriteItem(final BatchWriteItemRequest batchRequest) throws BackendException {
    int count = 0;
    for (Entry<String, List<WriteRequest>> entry : batchRequest.getRequestItems().entrySet()) {
        final String tableName = entry.getKey();
        final List<WriteRequest> requests = entry.getValue();
        count += requests.size();
        if (count > BATCH_WRITE_MAX_NUMBER_OF_ITEMS) {
            throw new IllegalArgumentException("cant have more than 25 requests in a batchwrite");
        }
        for (final WriteRequest request : requests) {
            if ((request.getPutRequest() != null) == (request.getDeleteRequest() != null)) {
                throw new IllegalArgumentException("Exactly one of PutRequest or DeleteRequest must be set in each WriteRequest in a batch write operation");
            }
            final int wcu;
            final String apiName;
            if (request.getPutRequest() != null) {
                apiName = PUT_ITEM;
                final int bytes = calculateItemSizeInBytes(request.getPutRequest().getItem());
                wcu = computeWcu(bytes);
            } else { //deleterequest
                apiName = DELETE_ITEM;
                wcu = estimateCapacityUnits(apiName, tableName);
            }
            timedWriteThrottle(apiName, tableName, wcu);
        }
    }

    BatchWriteItemResult result;
    setUserAgent(batchRequest);
    final Timer.Context apiTimerContext = getTimerContext(BATCH_WRITE_ITEM, null /*tableName*/);
    try {
        result = client.batchWriteItem(batchRequest);
    } catch (Exception e) {
        throw processDynamoDbApiException(e, BATCH_WRITE_ITEM, null /*tableName*/);
    } finally {
        apiTimerContext.stop();
    }
    if (result.getConsumedCapacity() != null) {
        for (ConsumedCapacity ccu : result.getConsumedCapacity()) {
            meterConsumedCapacity(BATCH_WRITE_ITEM, ccu);
        }
    }
    return result;
}
 
Example #18
Source File: DynamoDBIOTestHelper.java    From beam with Apache License 2.0 4 votes vote down vote up
static BatchWriteItemRequest generateBatchWriteItemRequest(String tableName, int numOfItems) {
  BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest();
  batchWriteItemRequest.addRequestItemsEntry(tableName, generateWriteRequests(numOfItems));
  return batchWriteItemRequest;
}
 
Example #19
Source File: DynamoDBIO.java    From beam with Apache License 2.0 4 votes vote down vote up
private void flushBatch() throws IOException, InterruptedException {
  if (batch.isEmpty()) {
    return;
  }

  try {
    // Since each element is a KV<tableName, writeRequest> in the batch, we need to group them
    // by tableName
    Map<String, List<WriteRequest>> mapTableRequest =
        batch.stream()
            .collect(
                Collectors.groupingBy(
                    KV::getKey, Collectors.mapping(KV::getValue, Collectors.toList())));

    BatchWriteItemRequest batchRequest = new BatchWriteItemRequest();
    mapTableRequest
        .entrySet()
        .forEach(
            entry -> batchRequest.addRequestItemsEntry(entry.getKey(), entry.getValue()));

    Sleeper sleeper = Sleeper.DEFAULT;
    BackOff backoff = retryBackoff.backoff();
    int attempt = 0;
    while (true) {
      attempt++;
      try {
        client.batchWriteItem(batchRequest);
        break;
      } catch (Exception ex) {
        // Fail right away if there is no retry configuration
        if (spec.getRetryConfiguration() == null
            || !spec.getRetryConfiguration().getRetryPredicate().test(ex)) {
          DYNAMO_DB_WRITE_FAILURES.inc();
          LOG.info(
              "Unable to write batch items {} due to {} ",
              batchRequest.getRequestItems().entrySet(),
              ex);
          throw new IOException("Error writing to DynamoDB (no attempt made to retry)", ex);
        }

        if (!BackOffUtils.next(sleeper, backoff)) {
          throw new IOException(
              String.format(
                  "Error writing to DynamoDB after %d attempt(s). No more attempts allowed",
                  attempt),
              ex);
        } else {
          // Note: this used in test cases to verify behavior
          LOG.warn(String.format(RETRY_ATTEMPT_LOG, attempt), ex);
        }
      }
    }
  } finally {
    batch.clear();
  }
}
 
Example #20
Source File: LowLevelBatchWrite.java    From aws-doc-sdk-examples with Apache License 2.0 4 votes vote down vote up
private static void writeMultipleItemsBatchWrite() {
    try {

        // Create a map for the requests in the batch
        Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>();

        // Create a PutRequest for a new Forum item
        Map<String, AttributeValue> forumItem = new HashMap<String, AttributeValue>();
        forumItem.put("Name", new AttributeValue().withS("Amazon RDS"));
        forumItem.put("Threads", new AttributeValue().withN("0"));

        List<WriteRequest> forumList = new ArrayList<WriteRequest>();
        forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem)));
        requestItems.put(table1Name, forumList);

        // Create a PutRequest for a new Thread item
        Map<String, AttributeValue> threadItem = new HashMap<String, AttributeValue>();
        threadItem.put("ForumName", new AttributeValue().withS("Amazon RDS"));
        threadItem.put("Subject", new AttributeValue().withS("Amazon RDS Thread 1"));
        threadItem.put("Message", new AttributeValue().withS("ElastiCache Thread 1 message"));
        threadItem.put("KeywordTags", new AttributeValue().withSS(Arrays.asList("cache", "in-memory")));

        List<WriteRequest> threadList = new ArrayList<WriteRequest>();
        threadList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(threadItem)));

        // Create a DeleteRequest for a Thread item
        Map<String, AttributeValue> threadDeleteKey = new HashMap<String, AttributeValue>();
        threadDeleteKey.put("ForumName", new AttributeValue().withS("Amazon S3"));
        threadDeleteKey.put("Subject", new AttributeValue().withS("S3 Thread 100"));

        threadList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(threadDeleteKey)));
        requestItems.put(table2Name, threadList);

        BatchWriteItemResult result;
        BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest()
            .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL);

        do {
            System.out.println("Making the request.");

            batchWriteItemRequest.withRequestItems(requestItems);
            result = client.batchWriteItem(batchWriteItemRequest);

            // Print consumed capacity units
            for (ConsumedCapacity consumedCapacity : result.getConsumedCapacity()) {
                String tableName = consumedCapacity.getTableName();
                Double consumedCapacityUnits = consumedCapacity.getCapacityUnits();
                System.out.println("Consumed capacity units for table " + tableName + ": " + consumedCapacityUnits);
            }

            // Check for unprocessed keys which could happen if you exceed
            // provisioned throughput
            System.out.println("Unprocessed Put and Delete requests: \n" + result.getUnprocessedItems());
            requestItems = result.getUnprocessedItems();
        } while (result.getUnprocessedItems().size() > 0);

    }
    catch (AmazonServiceException ase) {
        System.err.println("Failed to retrieve items: ");
        ase.printStackTrace(System.err);
    }

}
 
Example #21
Source File: LowLevelBatchWriteSyntax.java    From aws-doc-sdk-examples with Apache License 2.0 4 votes vote down vote up
private static void writeMultipleItemsBatchWrite() {
    try {

        // Begin syntax extract

        // Create a map for the requests in the batch
        Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>();

        // Create a PutRequest for a new Forum item
        Map<String, AttributeValue> forumItem = new HashMap<String, AttributeValue>();
        forumItem.put("Name", new AttributeValue().withS("Amazon RDS"));
        forumItem.put("Threads", new AttributeValue().withN("0"));

        List<WriteRequest> forumList = new ArrayList<WriteRequest>();
        forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem)));
        requestItems.put("Forum", forumList);

        // Create a PutRequest for a new Thread item
        Map<String, AttributeValue> threadItem = new HashMap<String, AttributeValue>();
        threadItem.put("ForumName", new AttributeValue().withS("Amazon RDS"));
        threadItem.put("Subject", new AttributeValue().withS("Amazon RDS Thread 1"));

        List<WriteRequest> threadList = new ArrayList<WriteRequest>();
        threadList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(threadItem)));

        // Create a DeleteRequest for a Thread item
        Map<String, AttributeValue> threadDeleteKey = new HashMap<String, AttributeValue>();
        threadDeleteKey.put("ForumName", new AttributeValue().withS("Some partition key value"));
        threadDeleteKey.put("Subject", new AttributeValue().withS("Some sort key value"));

        threadList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(threadDeleteKey)));
        requestItems.put("Thread", threadList);

        BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest();

        System.out.println("Making the request.");

        batchWriteItemRequest.withRequestItems(requestItems);
        client.batchWriteItem(batchWriteItemRequest);

        // End syntax extract

    }
    catch (AmazonServiceException ase) {
        System.err.println("Failed to retrieve items: ");
        ase.printStackTrace(System.err);
    }

}