com.amazonaws.services.dynamodbv2.model.DeleteRequest Java Examples

The following examples show how to use com.amazonaws.services.dynamodbv2.model.DeleteRequest. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DynamoDBOperations.java    From geowave with Apache License 2.0 6 votes vote down vote up
public void deleteRowsFromDataIndex(
    final byte[][] dataIds,
    final short adapterId,
    final String typeName) {
  final String tableName =
      typeName + "_" + getQualifiedTableName(DataIndexUtils.DATA_ID_INDEX.getName());
  final Iterator<byte[]> dataIdIterator = Arrays.stream(dataIds).iterator();
  while (dataIdIterator.hasNext()) {
    final List<WriteRequest> deleteRequests = new ArrayList<>();
    int i = 0;
    while (dataIdIterator.hasNext() && (i < MAX_ROWS_FOR_BATCHWRITER)) {
      deleteRequests.add(
          new WriteRequest(
              new DeleteRequest(
                  Collections.singletonMap(
                      DynamoDBRow.GW_PARTITION_ID_KEY,
                      new AttributeValue().withB(ByteBuffer.wrap(dataIdIterator.next()))))));
      i++;
    }

    client.batchWriteItem(Collections.singletonMap(tableName, deleteRequests));
  }
}
 
Example #2
Source File: DeleteDynamoDBTest.java    From localization_nifi with Apache License 2.0 5 votes vote down vote up
@Test
public void testStringHashStringRangeDeleteSuccessfulWithMockOneUnprocessed() {
    Map<String, List<WriteRequest>> unprocessed =
            new HashMap<String, List<WriteRequest>>();
    DeleteRequest delete = new DeleteRequest();
    delete.addKeyEntry("hashS", new AttributeValue("h1"));
    delete.addKeyEntry("rangeS", new AttributeValue("r1"));
    WriteRequest write = new WriteRequest(delete);
    List<WriteRequest> writes = new ArrayList<>();
    writes.add(write);
    unprocessed.put(stringHashStringRangeTableName, writes);
    result.setUnprocessedItems(unprocessed);
    final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);

    deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
    deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
    deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
    deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
    deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
    deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
    deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
    deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
    deleteRunner.enqueue(new byte[] {});

    deleteRunner.run(1);

    deleteRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_UNPROCESSED, 1);

}
 
Example #3
Source File: DeleteDynamoDBTest.java    From nifi with Apache License 2.0 5 votes vote down vote up
@Test
public void testStringHashStringRangeDeleteSuccessfulWithMockOneUnprocessed() {
    Map<String, List<WriteRequest>> unprocessed =
            new HashMap<String, List<WriteRequest>>();
    DeleteRequest delete = new DeleteRequest();
    delete.addKeyEntry("hashS", new AttributeValue("h1"));
    delete.addKeyEntry("rangeS", new AttributeValue("r1"));
    WriteRequest write = new WriteRequest(delete);
    List<WriteRequest> writes = new ArrayList<>();
    writes.add(write);
    unprocessed.put(stringHashStringRangeTableName, writes);
    result.setUnprocessedItems(unprocessed);
    final TestRunner deleteRunner = TestRunners.newTestRunner(deleteDynamoDB);

    deleteRunner.setProperty(AbstractDynamoDBProcessor.ACCESS_KEY,"abcd");
    deleteRunner.setProperty(AbstractDynamoDBProcessor.SECRET_KEY, "cdef");
    deleteRunner.setProperty(AbstractDynamoDBProcessor.REGION, REGION);
    deleteRunner.setProperty(AbstractDynamoDBProcessor.TABLE, stringHashStringRangeTableName);
    deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_NAME, "hashS");
    deleteRunner.setProperty(AbstractDynamoDBProcessor.HASH_KEY_VALUE, "h1");
    deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_NAME, "rangeS");
    deleteRunner.setProperty(AbstractDynamoDBProcessor.RANGE_KEY_VALUE, "r1");
    deleteRunner.enqueue(new byte[] {});

    deleteRunner.run(1);

    deleteRunner.assertAllFlowFilesTransferred(AbstractDynamoDBProcessor.REL_UNPROCESSED, 1);

}
 
Example #4
Source File: DynamoDBClient.java    From emr-dynamodb-connector with Apache License 2.0 4 votes vote down vote up
public BatchWriteItemResult putBatch(String tableName, Map<String, AttributeValue> item,
    long maxItemsPerBatch, Reporter reporter, boolean deletionMode)
    throws UnsupportedEncodingException {

  int itemSizeBytes = DynamoDBUtil.getItemSizeBytes(item);
  if (itemSizeBytes > maxItemByteSize) {
    throw new RuntimeException("Cannot pass items with size greater than " + maxItemByteSize
        + ". Item with size of " + itemSizeBytes + " was given.");
  }
  maxItemsPerBatch = DynamoDBUtil.getBoundedBatchLimit(config, maxItemsPerBatch);
  BatchWriteItemResult result = null;
  if (writeBatchMap.containsKey(tableName)) {

    boolean writeRequestsForTableAtLimit =
        writeBatchMap.get(tableName).size() >= maxItemsPerBatch;

    boolean totalSizeOfWriteBatchesOverLimit =
        writeBatchMapSizeBytes + itemSizeBytes > maxBatchSize;

    if (writeRequestsForTableAtLimit || totalSizeOfWriteBatchesOverLimit) {
        result = writeBatch(reporter, itemSizeBytes);
    }
  }
  // writeBatchMap could be cleared from writeBatch()
  List<WriteRequest> writeBatchList;
  if (!writeBatchMap.containsKey(tableName)) {
    writeBatchList = new ArrayList<>((int) maxItemsPerBatch);
    writeBatchMap.put(tableName, writeBatchList);
  } else {
    writeBatchList = writeBatchMap.get(tableName);
  }

  log.debug("BatchWriteItem deletionMode " + deletionMode);

  if (deletionMode) {
    writeBatchList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(item)));
  } else {
    writeBatchList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(item)));
  }

  writeBatchMapSizeBytes += itemSizeBytes;

  return result;
}
 
Example #5
Source File: LowLevelBatchWriteSyntax.java    From aws-doc-sdk-examples with Apache License 2.0 4 votes vote down vote up
private static void writeMultipleItemsBatchWrite() {
    try {

        // Begin syntax extract

        // Create a map for the requests in the batch
        Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>();

        // Create a PutRequest for a new Forum item
        Map<String, AttributeValue> forumItem = new HashMap<String, AttributeValue>();
        forumItem.put("Name", new AttributeValue().withS("Amazon RDS"));
        forumItem.put("Threads", new AttributeValue().withN("0"));

        List<WriteRequest> forumList = new ArrayList<WriteRequest>();
        forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem)));
        requestItems.put("Forum", forumList);

        // Create a PutRequest for a new Thread item
        Map<String, AttributeValue> threadItem = new HashMap<String, AttributeValue>();
        threadItem.put("ForumName", new AttributeValue().withS("Amazon RDS"));
        threadItem.put("Subject", new AttributeValue().withS("Amazon RDS Thread 1"));

        List<WriteRequest> threadList = new ArrayList<WriteRequest>();
        threadList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(threadItem)));

        // Create a DeleteRequest for a Thread item
        Map<String, AttributeValue> threadDeleteKey = new HashMap<String, AttributeValue>();
        threadDeleteKey.put("ForumName", new AttributeValue().withS("Some partition key value"));
        threadDeleteKey.put("Subject", new AttributeValue().withS("Some sort key value"));

        threadList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(threadDeleteKey)));
        requestItems.put("Thread", threadList);

        BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest();

        System.out.println("Making the request.");

        batchWriteItemRequest.withRequestItems(requestItems);
        client.batchWriteItem(batchWriteItemRequest);

        // End syntax extract

    }
    catch (AmazonServiceException ase) {
        System.err.println("Failed to retrieve items: ");
        ase.printStackTrace(System.err);
    }

}
 
Example #6
Source File: LowLevelBatchWrite.java    From aws-doc-sdk-examples with Apache License 2.0 4 votes vote down vote up
private static void writeMultipleItemsBatchWrite() {
    try {

        // Create a map for the requests in the batch
        Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>();

        // Create a PutRequest for a new Forum item
        Map<String, AttributeValue> forumItem = new HashMap<String, AttributeValue>();
        forumItem.put("Name", new AttributeValue().withS("Amazon RDS"));
        forumItem.put("Threads", new AttributeValue().withN("0"));

        List<WriteRequest> forumList = new ArrayList<WriteRequest>();
        forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem)));
        requestItems.put(table1Name, forumList);

        // Create a PutRequest for a new Thread item
        Map<String, AttributeValue> threadItem = new HashMap<String, AttributeValue>();
        threadItem.put("ForumName", new AttributeValue().withS("Amazon RDS"));
        threadItem.put("Subject", new AttributeValue().withS("Amazon RDS Thread 1"));
        threadItem.put("Message", new AttributeValue().withS("ElastiCache Thread 1 message"));
        threadItem.put("KeywordTags", new AttributeValue().withSS(Arrays.asList("cache", "in-memory")));

        List<WriteRequest> threadList = new ArrayList<WriteRequest>();
        threadList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(threadItem)));

        // Create a DeleteRequest for a Thread item
        Map<String, AttributeValue> threadDeleteKey = new HashMap<String, AttributeValue>();
        threadDeleteKey.put("ForumName", new AttributeValue().withS("Amazon S3"));
        threadDeleteKey.put("Subject", new AttributeValue().withS("S3 Thread 100"));

        threadList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(threadDeleteKey)));
        requestItems.put(table2Name, threadList);

        BatchWriteItemResult result;
        BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest()
            .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL);

        do {
            System.out.println("Making the request.");

            batchWriteItemRequest.withRequestItems(requestItems);
            result = client.batchWriteItem(batchWriteItemRequest);

            // Print consumed capacity units
            for (ConsumedCapacity consumedCapacity : result.getConsumedCapacity()) {
                String tableName = consumedCapacity.getTableName();
                Double consumedCapacityUnits = consumedCapacity.getCapacityUnits();
                System.out.println("Consumed capacity units for table " + tableName + ": " + consumedCapacityUnits);
            }

            // Check for unprocessed keys which could happen if you exceed
            // provisioned throughput
            System.out.println("Unprocessed Put and Delete requests: \n" + result.getUnprocessedItems());
            requestItems = result.getUnprocessedItems();
        } while (result.getUnprocessedItems().size() > 0);

    }
    catch (AmazonServiceException ase) {
        System.err.println("Failed to retrieve items: ");
        ase.printStackTrace(System.err);
    }

}
 
Example #7
Source File: LowLevelBatchWriteSyntax.java    From aws-dynamodb-examples with Apache License 2.0 4 votes vote down vote up
private static void writeMultipleItemsBatchWrite() {
     try {
                   
// Begin syntax extract
         
         
// Create a map for the requests in the batch
Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>();
 
// Create a PutRequest for a new Forum item
Map<String, AttributeValue> forumItem = new HashMap<String, AttributeValue>();
forumItem.put("Name", new AttributeValue().withS("Amazon RDS"));
forumItem.put("Threads", new AttributeValue().withN("0"));

List<WriteRequest> forumList = new ArrayList<WriteRequest>();
forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem)));
requestItems.put("Forum", forumList);

// Create a PutRequest for a new Thread item
Map<String, AttributeValue> threadItem = new HashMap<String, AttributeValue>();
threadItem.put("ForumName", new AttributeValue().withS("Amazon RDS"));
threadItem.put("Subject", new AttributeValue().withS("Amazon RDS Thread 1"));

List<WriteRequest> threadList = new ArrayList<WriteRequest>();
threadList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(threadItem)));

// Create a DeleteRequest for a Thread item
Map<String, AttributeValue> threadDeleteKey = new HashMap<String, AttributeValue>(); 
threadDeleteKey.put("ForumName", new AttributeValue().withS("Some hash attribute value"));
threadDeleteKey.put("Subject", new AttributeValue().withS("Some range attribute value"));

threadList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(threadDeleteKey)));
requestItems.put("Thread", threadList);

BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest();

System.out.println("Making the request.");
                     
batchWriteItemRequest.withRequestItems(requestItems);
client.batchWriteItem(batchWriteItemRequest);

// End syntax extract

                     
     }  catch (AmazonServiceException ase) {
         System.err.println("Failed to retrieve items: ");
         ase.printStackTrace(System.err);
     }  

 }
 
Example #8
Source File: LowLevelBatchWrite.java    From aws-dynamodb-examples with Apache License 2.0 4 votes vote down vote up
private static void writeMultipleItemsBatchWrite() {
    try {                    
        
       // Create a map for the requests in the batch
       Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>();
        
       // Create a PutRequest for a new Forum item
       Map<String, AttributeValue> forumItem = new HashMap<String, AttributeValue>();
       forumItem.put("Name", new AttributeValue().withS("Amazon RDS"));
       forumItem.put("Threads", new AttributeValue().withN("0"));
    
       List<WriteRequest> forumList = new ArrayList<WriteRequest>();
       forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem)));
       requestItems.put(table1Name, forumList);
       
       // Create a PutRequest for a new Thread item
       Map<String, AttributeValue> threadItem = new HashMap<String, AttributeValue>();
       threadItem.put("ForumName", new AttributeValue().withS("Amazon RDS"));
       threadItem.put("Subject", new AttributeValue().withS("Amazon RDS Thread 1"));
       threadItem.put("Message", new AttributeValue().withS("ElasticCache Thread 1 message"));
       threadItem.put("KeywordTags", new AttributeValue().withSS(Arrays.asList("cache", "in-memory")));
    
       List<WriteRequest> threadList = new ArrayList<WriteRequest>();
       threadList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(threadItem)));
       
       // Create a DeleteRequest for a Thread item
       Map<String, AttributeValue> threadDeleteKey = new HashMap<String, AttributeValue>(); 
       threadDeleteKey.put("ForumName", new AttributeValue().withS("Amazon S3"));
       threadDeleteKey.put("Subject", new AttributeValue().withS("S3 Thread 100"));
       
       threadList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(threadDeleteKey)));
       requestItems.put(table2Name, threadList);
       
       BatchWriteItemResult result;
       BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest()
           .withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL);
       
       do {
           System.out.println("Making the request.");
                           
           batchWriteItemRequest.withRequestItems(requestItems);
           result = client.batchWriteItem(batchWriteItemRequest);
           
           // Print consumed capacity units
           for(ConsumedCapacity consumedCapacity : result.getConsumedCapacity()) {
                String tableName = consumedCapacity.getTableName();
                Double consumedCapacityUnits = consumedCapacity.getCapacityUnits();
                System.out.println("Consumed capacity units for table " + tableName + ": " + consumedCapacityUnits);
           }
           
           // Check for unprocessed keys which could happen if you exceed provisioned throughput
           System.out.println("Unprocessed Put and Delete requests: \n" + result.getUnprocessedItems());
           requestItems = result.getUnprocessedItems();
       } while (result.getUnprocessedItems().size() > 0);
                    
    }  catch (AmazonServiceException ase) {
        System.err.println("Failed to retrieve items: ");
        ase.printStackTrace(System.err);
    }  

}