com.amazonaws.services.dynamodbv2.model.TableDescription Java Examples

The following examples show how to use com.amazonaws.services.dynamodbv2.model.TableDescription. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: DynamoDBIOTestHelper.java    From beam with Apache License 2.0 6 votes vote down vote up
static void createTestTable(String tableName) {
  CreateTableResult res = createDynamoTable(tableName);

  TableDescription tableDesc = res.getTableDescription();

  Assert.assertEquals(tableName, tableDesc.getTableName());
  Assert.assertTrue(tableDesc.getKeySchema().toString().contains(ATTR_NAME_1));
  Assert.assertTrue(tableDesc.getKeySchema().toString().contains(ATTR_NAME_2));

  Assert.assertEquals(
      tableDesc.getProvisionedThroughput().getReadCapacityUnits(), Long.valueOf(1000));
  Assert.assertEquals(
      tableDesc.getProvisionedThroughput().getWriteCapacityUnits(), Long.valueOf(1000));
  Assert.assertEquals("ACTIVE", tableDesc.getTableStatus());
  Assert.assertEquals(
      "arn:aws:dynamodb:us-east-1:000000000000:table/" + tableName, tableDesc.getTableArn());

  ListTablesResult tables = dynamoDBClient.listTables();
  Assert.assertEquals(1, tables.getTableNames().size());
}
 
Example #2
Source File: DynamoDBStorageHandlerTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckTableSchemaMappingMissingColumnMapping() throws MetaException {
  TableDescription description = getHashRangeTable();

  Table table = new Table();
  Map<String, String> parameters = Maps.newHashMap();
  parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
   "col2:dynamo_col2#,hashKey:hashKey,hashMap:hashMap");
  table.setParameters(parameters);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> cols = Lists.newArrayList();
  cols.add(new FieldSchema("col1", "string", ""));
  cols.add(new FieldSchema("hashMap", "map<string,string>", ""));
  sd.setCols(cols);
  table.setSd(sd);

  exceptionRule.expect(MetaException.class);
  exceptionRule.expectMessage("Could not find column(s) for column mapping(s): ");
  exceptionRule.expectMessage("col2:dynamo_col2#");
  exceptionRule.expectMessage("hashkey:hashKey");
  storageHandler.checkTableSchemaMapping(description, table);
}
 
Example #3
Source File: DynamoDBEmbeddedTest.java    From aws-dynamodb-examples with Apache License 2.0 6 votes vote down vote up
@Test
public void createTableTest() {
    AmazonDynamoDB ddb = DynamoDBEmbedded.create().amazonDynamoDB();
    try {
        String tableName = "Movies";
        String hashKeyName = "film_id";
        CreateTableResult res = createTable(ddb, tableName, hashKeyName);

        TableDescription tableDesc = res.getTableDescription();
        assertEquals(tableName, tableDesc.getTableName());
        assertEquals("[{AttributeName: " + hashKeyName + ",KeyType: HASH}]", tableDesc.getKeySchema().toString());
        assertEquals("[{AttributeName: " + hashKeyName + ",AttributeType: S}]",
            tableDesc.getAttributeDefinitions().toString());
        assertEquals(Long.valueOf(1000L), tableDesc.getProvisionedThroughput().getReadCapacityUnits());
        assertEquals(Long.valueOf(1000L), tableDesc.getProvisionedThroughput().getWriteCapacityUnits());
        assertEquals("ACTIVE", tableDesc.getTableStatus());
        assertEquals("arn:aws:dynamodb:ddblocal:000000000000:table/Movies", tableDesc.getTableArn());

        ListTablesResult tables = ddb.listTables();
        assertEquals(1, tables.getTableNames().size());
    } finally {
        ddb.shutdown();
    }
}
 
Example #4
Source File: GenericDynamoDBTest.java    From strongbox with Apache License 2.0 6 votes vote down vote up
@Test
public void testCreateTableWithWait() throws Exception {
    // Create fake responses from AWS. First response is still creating the table, second response the table
    // has become active.
    TableDescription creatingDescription = constructTableDescription(TableStatus.CREATING);
    TableDescription createdDescription = constructTableDescription(TableStatus.ACTIVE);
    CreateTableResult mockCreateResult = new CreateTableResult().withTableDescription(creatingDescription);
    DescribeTableResult mockDescribeResultCreating = new DescribeTableResult().withTable(creatingDescription);
    DescribeTableResult mockDescribeResultCreated = new DescribeTableResult().withTable(createdDescription);

    // Create the table.
    CreateTableRequest expectedRequest = dynamoDB.constructCreateTableRequest();
    when(mockDynamoDBClient.createTable(expectedRequest)).thenReturn(mockCreateResult);
    when(mockDynamoDBClient.describeTable(tableName)).thenReturn(mockDescribeResultCreating, mockDescribeResultCreated);
    assertEquals(dynamoDB.create(), TEST_ARN);

    verify(mockDynamoDBClient, times(1)).createTable(expectedRequest);
    verify(mockDynamoDBClient, times(2)).describeTable(tableName);
}
 
Example #5
Source File: LowLevelTableExample.java    From aws-dynamodb-examples with Apache License 2.0 6 votes vote down vote up
private static void waitForTableToBecomeAvailable(String tableName) {
    System.out.println("Waiting for " + tableName + " to become ACTIVE...");

    long startTime = System.currentTimeMillis();
    long endTime = startTime + (10 * 60 * 1000);
    while (System.currentTimeMillis() < endTime) {
        DescribeTableRequest request = new DescribeTableRequest()
                .withTableName(tableName);
        TableDescription tableDescription = client.describeTable(
                request).getTable();
        String tableStatus = tableDescription.getTableStatus();
        System.out.println("  - current state: " + tableStatus);
        if (tableStatus.equals(TableStatus.ACTIVE.toString()))
            return;
        try { Thread.sleep(1000 * 20); } catch (Exception e) { }
    }
    throw new RuntimeException("Table " + tableName + " never went active");
}
 
Example #6
Source File: LowLevelLocalSecondaryIndexExample.java    From aws-dynamodb-examples with Apache License 2.0 6 votes vote down vote up
private static void waitForTableToBecomeAvailable(String tableName) {
    System.out.println("Waiting for " + tableName + " to become ACTIVE...");

    long startTime = System.currentTimeMillis();
    long endTime = startTime + (10 * 60 * 1000);
    while (System.currentTimeMillis() < endTime) {
        DescribeTableRequest request = new DescribeTableRequest()
                .withTableName(tableName);
        TableDescription tableDescription = client.describeTable(
                request).getTable();
        String tableStatus = tableDescription.getTableStatus();
        System.out.println("  - current state: " + tableStatus);
        if (tableStatus.equals(TableStatus.ACTIVE.toString()))
            return;
        try { Thread.sleep(1000 * 20); } catch (Exception e) { }
    }
    throw new RuntimeException("Table " + tableName + " never went active");
}
 
Example #7
Source File: LowLevelGlobalSecondaryIndexExample.java    From aws-dynamodb-examples with Apache License 2.0 6 votes vote down vote up
private static void waitForTableToBecomeAvailable(String tableName) {
    System.out.println("Waiting for " + tableName + " to become ACTIVE...");

    long startTime = System.currentTimeMillis();
    long endTime = startTime + (10 * 60 * 1000);

    while (System.currentTimeMillis() < endTime) {
        DescribeTableRequest request = 
            new DescribeTableRequest().withTableName(tableName);
        TableDescription tableDescription = 
            client.describeTable(request).getTable();
        String tableStatus = tableDescription.getTableStatus();

        System.out.println("  - current state: " + tableStatus);

        if (tableStatus.equals(TableStatus.ACTIVE.toString()))
            return;
            try {
                Thread.sleep(1000 * 20);
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    throw new RuntimeException("Table " + tableName + " never went active");
}
 
Example #8
Source File: LowLevelParallelScan.java    From aws-dynamodb-examples with Apache License 2.0 6 votes vote down vote up
private static void waitForTableToBecomeAvailable(String tableName) {
    System.out.println("Waiting for " + tableName + " to become ACTIVE...");

    long startTime = System.currentTimeMillis();
    long endTime = startTime + (10 * 60 * 1000);
    while (System.currentTimeMillis() < endTime) {
        DescribeTableRequest request = new DescribeTableRequest()
                .withTableName(tableName);
        TableDescription tableDescription = client.describeTable(
                request).getTable();
        String tableStatus = tableDescription.getTableStatus();
        System.out.println("  - current state: " + tableStatus);
        if (tableStatus.equals(TableStatus.ACTIVE.toString()))
            return;
        try { Thread.sleep(1000 * 20); } catch (Exception e) { }
    }
    throw new RuntimeException("Table " + tableName + " never went active");
}
 
Example #9
Source File: DynamoDBStorageHandlerTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckTableSchemaMappingMissingColumn() throws MetaException {
  TableDescription description = getHashRangeTable();

  Table table = new Table();
  Map<String, String> parameters = Maps.newHashMap();
  parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$,hashMap:hashMap");
  table.setParameters(parameters);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> cols = Lists.newArrayList();
  cols.add(new FieldSchema("col1", "string", ""));
  cols.add(new FieldSchema("col2", "tinyint", ""));
  cols.add(new FieldSchema("col3", "string", ""));
  cols.add(new FieldSchema("hashMap", "map<string,string>", ""));
  sd.setCols(cols);
  table.setSd(sd);

  exceptionRule.expect(MetaException.class);
  exceptionRule.expectMessage("Could not find column mapping for column: col2");
  storageHandler.checkTableSchemaMapping(description, table);
}
 
Example #10
Source File: DynamoDBStorageHandlerTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckTableSchemaMappingValid() throws MetaException {
  TableDescription description = getHashRangeTable();

  Table table = new Table();
  Map<String, String> parameters = Maps.newHashMap();
  parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
      "col2:dynamo_col2#,hashKey:hashKey");
  table.setParameters(parameters);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> cols = Lists.newArrayList();
  cols.add(new FieldSchema("col1", "string", ""));
  cols.add(new FieldSchema("col2", "bigint", ""));
  cols.add(new FieldSchema("hashKey", "string", ""));
  sd.setCols(cols);
  table.setSd(sd);
  storageHandler.checkTableSchemaMapping(description, table);
}
 
Example #11
Source File: DynamoDBStorageHandlerTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckTableSchemaTypeInvalidType() throws MetaException {
  TableDescription description = getHashRangeTable();

  Table table = new Table();
  Map<String, String> parameters = Maps.newHashMap();
  parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
      "col2:dynamo_col2#,hashKey:hashKey");
  table.setParameters(parameters);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> cols = Lists.newArrayList();
  cols.add(new FieldSchema("col1", "string", ""));
  cols.add(new FieldSchema("col2", "tinyint", ""));
  cols.add(new FieldSchema("hashKey", "string", ""));
  sd.setCols(cols);
  table.setSd(sd);

  exceptionRule.expect(MetaException.class);
  exceptionRule.expectMessage("The hive type tinyint is not supported in DynamoDB");
  storageHandler.checkTableSchemaType(description, table);
}
 
Example #12
Source File: DynamoDBStorageHandlerTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckTableSchemaTypeValid() throws MetaException {
  TableDescription description = getHashRangeTable();

  Table table = new Table();
  Map<String, String> parameters = Maps.newHashMap();
  parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
      "col2:dynamo_col2#,hashKey:hashKey");
  table.setParameters(parameters);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> cols = Lists.newArrayList();
  cols.add(new FieldSchema("col1", "string", ""));
  cols.add(new FieldSchema("col2", "bigint", ""));
  cols.add(new FieldSchema("hashKey", "string", ""));
  sd.setCols(cols);
  table.setSd(sd);
  // This check is expected to pass for the given input
  storageHandler.checkTableSchemaType(description, table);
}
 
Example #13
Source File: DynamoDBStorageHandlerTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckListTableSchemaTypeValid() throws MetaException {
  TableDescription description = getHashRangeTable();

  Table table = new Table();
  Map<String, String> parameters = Maps.newHashMap();
  parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
          "col2:dynamo_col2#,col3:dynamo_col3#,col4:dynamo_col4#,col5:dynamo_col5#," +
          "col6:dynamo_col6#,col7:dynamo_col7#,hashKey:hashKey");
  table.setParameters(parameters);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> cols = Lists.newArrayList();
  cols.add(new FieldSchema("col1", "map<string,bigint>", ""));
  cols.add(new FieldSchema("col2", "array<map<string,bigint>>", ""));
  cols.add(new FieldSchema("col3", "array<map<string,double>>", ""));
  cols.add(new FieldSchema("col4", "array<map<string,string>>", ""));
  cols.add(new FieldSchema("col5", "array<bigint>", ""));
  cols.add(new FieldSchema("col6", "array<double>", ""));
  cols.add(new FieldSchema("col7", "array<string>", ""));
  cols.add(new FieldSchema("hashKey", "string", ""));
  sd.setCols(cols);
  table.setSd(sd);
  // This check is expected to pass for the given input
  storageHandler.checkTableSchemaType(description, table);
}
 
Example #14
Source File: DynamoDBStorageHandlerTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckStructTableSchemaTypeInvalid() throws MetaException {
  TableDescription description = getHashRangeTable();

  Table table = new Table();
  Map<String, String> parameters = Maps.newHashMap();
  parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
      "col2:dynamo_col2#,hashKey:hashKey");
  table.setParameters(parameters);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> cols = Lists.newArrayList();
  cols.add(new FieldSchema("col1", "struct<bignum:bigint,smallnum:tinyint>", ""));
  cols.add(new FieldSchema("col2", "array<map<string,bigint>>", ""));
  cols.add(new FieldSchema("hashKey", "string", ""));
  sd.setCols(cols);
  table.setSd(sd);

  exceptionRule.expect(MetaException.class);
  exceptionRule.expectMessage("The hive type struct<bignum:bigint,smallnum:tinyint> is not " +
      "supported in DynamoDB");
  storageHandler.checkTableSchemaType(description, table);
}
 
Example #15
Source File: DynamoDBStorageHandlerTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckStructTableSchemaTypeValid() throws MetaException {
  TableDescription description = getHashRangeTable();

  Table table = new Table();
  Map<String, String> parameters = Maps.newHashMap();
  parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
      "col2:dynamo_col2#,hashKey:hashKey");
  table.setParameters(parameters);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> cols = Lists.newArrayList();
  cols.add(new FieldSchema("col1", "struct<numarray:array<bigint>,num:double>", ""));
  cols.add(new FieldSchema("col2", "array<struct<numarray:array<bigint>,num:double>>", ""));
  cols.add(new FieldSchema("hashKey", "string", ""));
  sd.setCols(cols);
  table.setSd(sd);
  // This check is expected to pass for the given input
  storageHandler.checkTableSchemaType(description, table);
}
 
Example #16
Source File: DynamoDBStorageHandlerTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckTableSchemaTypeMappingInvalid() throws MetaException {
  TableDescription description = getHashRangeTable();

  Table table = new Table();
  Map<String, String> parameters = Maps.newHashMap();
  parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
          "col2:dynamo_col2#,hashKey:hashKey");
  parameters.put(DynamoDBConstants.DYNAMODB_TYPE_MAPPING, "col2:NS");
  table.setParameters(parameters);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> cols = Lists.newArrayList();
  cols.add(new FieldSchema("col1", "string", ""));
  cols.add(new FieldSchema("col2", "bigint", ""));
  cols.add(new FieldSchema("hashKey", "string", ""));
  sd.setCols(cols);
  table.setSd(sd);

  exceptionRule.expect(MetaException.class);
  exceptionRule.expectMessage("The DynamoDB type NS does not support Hive type bigint");
  storageHandler.checkTableSchemaType(description, table);
}
 
Example #17
Source File: DynamoDBStorageHandlerTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckTableSchemaTypeMappingValid() throws MetaException {
  TableDescription description = getHashRangeTable();

  Table table = new Table();
  Map<String, String> parameters = Maps.newHashMap();
  parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
          "col2:dynamo_col2#,hashKey:hashKey");
  parameters.put(DynamoDBConstants.DYNAMODB_TYPE_MAPPING, "col2:NS");
  table.setParameters(parameters);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> cols = Lists.newArrayList();
  cols.add(new FieldSchema("col1", "string", ""));
  cols.add(new FieldSchema("col2", "array<bigint>", ""));
  cols.add(new FieldSchema("hashKey", "string", ""));
  sd.setCols(cols);
  table.setSd(sd);

  // This check is expected to pass for the given input
  storageHandler.checkTableSchemaType(description, table);
}
 
Example #18
Source File: DynamoDBStorageHandlerTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckTableSchemaNullSerializationValid() throws MetaException {
  TableDescription description = getHashRangeTable();

  Table table = new Table();
  Map<String, String> parameters = Maps.newHashMap();
  parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
      "col2:dynamo_col2#,hashKey:hashKey");
  parameters.put(DynamoDBConstants.DYNAMODB_NULL_SERIALIZATION, "true");
  table.setParameters(parameters);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> cols = Lists.newArrayList();
  cols.add(new FieldSchema("col1", "string", ""));
  cols.add(new FieldSchema("col2", "array<bigint>", ""));
  cols.add(new FieldSchema("hashKey", "string", ""));
  sd.setCols(cols);
  table.setSd(sd);

  // This check is expected to pass for the given input
  storageHandler.checkTableSchemaType(description, table);
}
 
Example #19
Source File: DynamoDBClient.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
public TableDescription describeTable(String tableName) {
  final DescribeTableRequest describeTablesRequest = new DescribeTableRequest()
      .withTableName(tableName);
  try {
    RetryResult<DescribeTableResult> describeResult = getRetryDriver().runWithRetry(
        new Callable<DescribeTableResult>() {
          @Override
          public DescribeTableResult call() {
            DescribeTableResult result = dynamoDB.describeTable(describeTablesRequest);
            log.info("Describe table output: " + result);
            return result;
          }
        }, null, null);
    return describeResult.result.getTable();
  } catch (Exception e) {
    throw new RuntimeException("Could not lookup table " + tableName + " in DynamoDB.", e);
  }
}
 
Example #20
Source File: WriteIopsCalculatorTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
@Before
public void setup() {
  when(dynamoDBClient.describeTable(TABLE_NAME)).thenReturn(new TableDescription()
      .withBillingModeSummary(
          new BillingModeSummary().withBillingMode(DynamoDBConstants.BILLING_MODE_PROVISIONED))
      .withProvisionedThroughput(
          new ProvisionedThroughputDescription().withWriteCapacityUnits(WRITE_CAPACITY_UNITS)));

  JobConf jobConf = new JobConf();
  jobConf.setNumMapTasks(TOTAL_MAP_TASKS);
  jobConf.set("mapreduce.task.attempt.id", "attempt_m_1");
  jobConf.set(DynamoDBConstants.THROUGHPUT_WRITE_PERCENT, String.valueOf
      (THROUGHPUT_WRITE_PERCENT));
  when(jobClient.getConf()).thenReturn(jobConf);

  writeIopsCalculator = new WriteIopsCalculator(jobClient, dynamoDBClient, TABLE_NAME) {
    @Override
    int calculateMaxMapTasks(int totalMapTasks) {
      return MAX_CONCURRENT_MAP_TASKS;
    }
  };
}
 
Example #21
Source File: DynamoDBRecordReaderTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
private TableDescription getTableDescription(String hashType, String rangeType) {
  List<KeySchemaElement> keySchema = new ArrayList<>();
  List<AttributeDefinition> definitions = new ArrayList<>();

  keySchema.add(new KeySchemaElement().withAttributeName("hashKey").withKeyType(KeyType.HASH));
  definitions.add(new AttributeDefinition().withAttributeName("hashKey").withAttributeType
      (hashType));

  if (rangeType != null) {
    keySchema.add(new KeySchemaElement().withAttributeName("rangeKey").withKeyType(KeyType
        .RANGE));
    definitions.add(new AttributeDefinition().withAttributeName("rangeKey").withAttributeType
        (rangeType));
  }

  TableDescription description = new TableDescription().withKeySchema(keySchema)
      .withAttributeDefinitions(definitions).withProvisionedThroughput(new
          ProvisionedThroughputDescription().withReadCapacityUnits(1000L)
          .withWriteCapacityUnits(1000L));
  return description;
}
 
Example #22
Source File: DynamoDBBootstrapWorker.java    From dynamodb-import-export-tool with Apache License 2.0 6 votes vote down vote up
/**
 * returns the approximate number of segments a table should be broken up
 * when parallel scanning. This function is based off of either read and
 * write capacity, with which you can scan much faster, or the size of your
 * table, which should need many more segments in order to scan the table
 * fast enough in parallel so that one worker does not finish long before
 * other workers.
 * 
 * @throws NullReadCapacityException
 *             if the table returns a null readCapacity units.
 */
public static int getNumberOfSegments(TableDescription description)
        throws NullReadCapacityException {
    ProvisionedThroughputDescription provisionedThroughput = description
            .getProvisionedThroughput();
    double tableSizeInGigabytes = Math.ceil(description.getTableSizeBytes()
            / BootstrapConstants.GIGABYTE);
    Long readCapacity = provisionedThroughput.getReadCapacityUnits();
    Long writeCapacity = provisionedThroughput.getWriteCapacityUnits();
    if (writeCapacity == null) {
        writeCapacity = 1L;
    }
    if (readCapacity == null) {
        throw new NullReadCapacityException(
                "Cannot scan with a null readCapacity provisioned throughput");
    }
    double throughput = (readCapacity + 3 * writeCapacity) / 3000.0;
    return (int) (10 * Math.max(Math.ceil(throughput),
            Math.ceil(tableSizeInGigabytes) / 10));
}
 
Example #23
Source File: DynamoDSETranslatorJSONBlob.java    From dynamo-cassandra-proxy with Apache License 2.0 6 votes vote down vote up
@Override
public DynamoDBResponse deleteTable(DeleteTableRequest deleteTableRequest) {
    logger.info("deleting JSON table");

    String keyspace = keyspaceName;
    String table = deleteTableRequest.getTableName();
    String statement = String.format("DROP TABLE %s.\"%s\";\n", keyspace, table);
    ResultSet result = session().execute(statement);
    if (result.wasApplied()) {

        logger.info("deleted table " + table);
        cassandraManager.refreshSchema();

        TableDescription newTableDesc = this.getTableDescription(table, null,null);
        DeleteTableResult createResult = (new DeleteTableResult()).withTableDescription(newTableDesc);
        return new DynamoDBResponse(createResult, 200);
    }
    return null;
}
 
Example #24
Source File: LowLevelTableExample.java    From aws-doc-sdk-examples with Apache License 2.0 6 votes vote down vote up
private static void waitForTableToBecomeAvailable(String tableName) {
    System.out.println("Waiting for " + tableName + " to become ACTIVE...");

    long startTime = System.currentTimeMillis();
    long endTime = startTime + (10 * 60 * 1000);
    while (System.currentTimeMillis() < endTime) {
        DescribeTableRequest request = new DescribeTableRequest().withTableName(tableName);
        TableDescription tableDescription = client.describeTable(request).getTable();
        String tableStatus = tableDescription.getTableStatus();
        System.out.println("  - current state: " + tableStatus);
        if (tableStatus.equals(TableStatus.ACTIVE.toString()))
            return;
        try {
            Thread.sleep(1000 * 20);
        }
        catch (Exception e) {
        }
    }
    throw new RuntimeException("Table " + tableName + " never went active");
}
 
Example #25
Source File: LowLevelLocalSecondaryIndexExample.java    From aws-doc-sdk-examples with Apache License 2.0 6 votes vote down vote up
private static void waitForTableToBecomeAvailable(String tableName) {
    System.out.println("Waiting for " + tableName + " to become ACTIVE...");

    long startTime = System.currentTimeMillis();
    long endTime = startTime + (10 * 60 * 1000);
    while (System.currentTimeMillis() < endTime) {
        DescribeTableRequest request = new DescribeTableRequest().withTableName(tableName);
        TableDescription tableDescription = client.describeTable(request).getTable();
        String tableStatus = tableDescription.getTableStatus();
        System.out.println("  - current state: " + tableStatus);
        if (tableStatus.equals(TableStatus.ACTIVE.toString()))
            return;
        try {
            Thread.sleep(1000 * 20);
        }
        catch (Exception e) {
        }
    }
    throw new RuntimeException("Table " + tableName + " never went active");
}
 
Example #26
Source File: LowLevelGlobalSecondaryIndexExample.java    From aws-doc-sdk-examples with Apache License 2.0 6 votes vote down vote up
private static void waitForTableToBecomeAvailable(String tableName) {
    System.out.println("Waiting for " + tableName + " to become ACTIVE...");

    long startTime = System.currentTimeMillis();
    long endTime = startTime + (10 * 60 * 1000);

    while (System.currentTimeMillis() < endTime) {
        DescribeTableRequest request = new DescribeTableRequest().withTableName(tableName);
        TableDescription tableDescription = client.describeTable(request).getTable();
        String tableStatus = tableDescription.getTableStatus();

        System.out.println("  - current state: " + tableStatus);

        if (tableStatus.equals(TableStatus.ACTIVE.toString()))
            return;
        try {
            Thread.sleep(1000 * 20);
        }
        catch (Exception e) {
            e.printStackTrace();
        }
    }
    throw new RuntimeException("Table " + tableName + " never went active");
}
 
Example #27
Source File: LowLevelParallelScan.java    From aws-doc-sdk-examples with Apache License 2.0 6 votes vote down vote up
private static void waitForTableToBecomeAvailable(String tableName) {
    System.out.println("Waiting for " + tableName + " to become ACTIVE...");

    long startTime = System.currentTimeMillis();
    long endTime = startTime + (10 * 60 * 1000);
    while (System.currentTimeMillis() < endTime) {
        DescribeTableRequest request = new DescribeTableRequest().withTableName(tableName);
        TableDescription tableDescription = client.describeTable(request).getTable();
        String tableStatus = tableDescription.getTableStatus();
        System.out.println("  - current state: " + tableStatus);
        if (tableStatus.equals(TableStatus.ACTIVE.toString()))
            return;
        try {
            Thread.sleep(1000 * 20);
        }
        catch (Exception e) {
        }
    }
    throw new RuntimeException("Table " + tableName + " never went active");
}
 
Example #28
Source File: DynamoDbDelegate.java    From dynamodb-janusgraph-storage-backend with Apache License 2.0 6 votes vote down vote up
void createTableAndWaitForActive(final CreateTableRequest request) throws BackendException {
    final String tableName = request.getTableName();
    Preconditions.checkArgument(!Strings.isNullOrEmpty(tableName), "Table name was null or empty");
    final TableDescription desc;
    try {
        desc = this.describeTable(tableName);
        if (null != desc && isTableAcceptingWrites(desc.getTableStatus())) {
            return; //store existed
        }
    } catch (BackendNotFoundException e) {
        log.debug(tableName + " did not exist yet, creating it", e);
    }

    createTable(request);
    waitForTableCreation(tableName, false /*verifyIndexesList*/, null /*expectedLsiList*/, null /*expectedGsiList*/);
}
 
Example #29
Source File: DynamoDBStorageHandlerTest.java    From emr-dynamodb-connector with Apache License 2.0 6 votes vote down vote up
@Test
public void testCheckTableSchemaTypeInvalidHashKeyType() throws MetaException {
  TableDescription description = getHashRangeTable();

  Table table = new Table();
  Map<String, String> parameters = Maps.newHashMap();
  parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
      "col2:dynamo_col2#,hashKey:hashKey");
  table.setParameters(parameters);
  StorageDescriptor sd = new StorageDescriptor();
  List<FieldSchema> cols = Lists.newArrayList();
  cols.add(new FieldSchema("col1", "string", ""));
  cols.add(new FieldSchema("col2", "bigint", ""));
  cols.add(new FieldSchema("hashKey", "map<string,string>", ""));
  sd.setCols(cols);
  table.setSd(sd);

  exceptionRule.expect(MetaException.class);
  exceptionRule.expectMessage("The key element hashKey does not match type. DynamoDB Type: S " +
      "Hive type: " + "map<string,string>");
  storageHandler.checkTableSchemaType(description, table);
}
 
Example #30
Source File: DocumentAPITableExample.java    From aws-dynamodb-examples with Apache License 2.0 5 votes vote down vote up
static void getTableInformation() {

        System.out.println("Describing " + tableName);

        TableDescription tableDescription = dynamoDB.getTable(tableName).describe();
        System.out.format("Name: %s:\n" + "Status: %s \n"
                + "Provisioned Throughput (read capacity units/sec): %d \n"
                + "Provisioned Throughput (write capacity units/sec): %d \n",
        tableDescription.getTableName(), 
        tableDescription.getTableStatus(), 
        tableDescription.getProvisionedThroughput().getReadCapacityUnits(),
        tableDescription.getProvisionedThroughput().getWriteCapacityUnits());
    }