下面列出了怎么用com.amazonaws.services.dynamodbv2.model.TableDescription的API类实例代码及写法,或者点击链接到github查看源代码。
@Override
public DynamoDBResponse deleteTable(DeleteTableRequest deleteTableRequest) {
logger.info("deleting JSON table");
String keyspace = keyspaceName;
String table = deleteTableRequest.getTableName();
String statement = String.format("DROP TABLE %s.\"%s\";\n", keyspace, table);
ResultSet result = session().execute(statement);
if (result.wasApplied()) {
logger.info("deleted table " + table);
cassandraManager.refreshSchema();
TableDescription newTableDesc = this.getTableDescription(table, null,null);
DeleteTableResult createResult = (new DeleteTableResult()).withTableDescription(newTableDesc);
return new DynamoDBResponse(createResult, 200);
}
return null;
}
@Override
public DynamoDBResponse createTable(CreateTableRequest createTableRequest) throws IOException {
logger.info("creating JSON table");
String columnPairs = createTableRequest.getAttributeDefinitions().stream().map(this::attributeToPairs).collect(Collectors.joining(", "));
columnPairs += ",json_blob text";
String keyspace = keyspaceName;
String table = createTableRequest.getTableName();
String primaryKey = getPrimaryKey(createTableRequest.getKeySchema());
String statement = String.format("CREATE TABLE IF NOT EXISTS %s.\"%s\" ( %s, PRIMARY KEY %s);\n", keyspace, table, columnPairs, primaryKey);
ResultSet result = session().execute(statement);
if (result.wasApplied()) {
logger.info("created {} as {}", table, statement);
cassandraManager.refreshSchema();
TableDescription newTableDesc = this.getTableDescription(table, createTableRequest.getAttributeDefinitions(), createTableRequest.getKeySchema());
CreateTableResult createResult = (new CreateTableResult()).withTableDescription(newTableDesc);
return new DynamoDBResponse(createResult, 200);
}
return null;
}
@Test
public void testCreateTableWithWait() throws Exception {
// Create fake responses from AWS. First response is still creating the table, second response the table
// has become active.
TableDescription creatingDescription = constructTableDescription(TableStatus.CREATING);
TableDescription createdDescription = constructTableDescription(TableStatus.ACTIVE);
CreateTableResult mockCreateResult = new CreateTableResult().withTableDescription(creatingDescription);
DescribeTableResult mockDescribeResultCreating = new DescribeTableResult().withTable(creatingDescription);
DescribeTableResult mockDescribeResultCreated = new DescribeTableResult().withTable(createdDescription);
// Create the table.
CreateTableRequest expectedRequest = dynamoDB.constructCreateTableRequest();
when(mockDynamoDBClient.createTable(expectedRequest)).thenReturn(mockCreateResult);
when(mockDynamoDBClient.describeTable(tableName)).thenReturn(mockDescribeResultCreating, mockDescribeResultCreated);
assertEquals(dynamoDB.create(), TEST_ARN);
verify(mockDynamoDBClient, times(1)).createTable(expectedRequest);
verify(mockDynamoDBClient, times(2)).describeTable(tableName);
}
@Test
public void createTableTest() {
AmazonDynamoDB ddb = DynamoDBEmbedded.create().amazonDynamoDB();
try {
String tableName = "Movies";
String hashKeyName = "film_id";
CreateTableResult res = createTable(ddb, tableName, hashKeyName);
TableDescription tableDesc = res.getTableDescription();
assertEquals(tableName, tableDesc.getTableName());
assertEquals("[{AttributeName: " + hashKeyName + ",KeyType: HASH}]", tableDesc.getKeySchema().toString());
assertEquals("[{AttributeName: " + hashKeyName + ",AttributeType: S}]",
tableDesc.getAttributeDefinitions().toString());
assertEquals(Long.valueOf(1000L), tableDesc.getProvisionedThroughput().getReadCapacityUnits());
assertEquals(Long.valueOf(1000L), tableDesc.getProvisionedThroughput().getWriteCapacityUnits());
assertEquals("ACTIVE", tableDesc.getTableStatus());
assertEquals("arn:aws:dynamodb:ddblocal:000000000000:table/Movies", tableDesc.getTableArn());
ListTablesResult tables = ddb.listTables();
assertEquals(1, tables.getTableNames().size());
} finally {
ddb.shutdown();
}
}
@Test
public void testCheckTableSchemaMappingMissingColumn() throws MetaException {
TableDescription description = getHashRangeTable();
Table table = new Table();
Map<String, String> parameters = Maps.newHashMap();
parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$,hashMap:hashMap");
table.setParameters(parameters);
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = Lists.newArrayList();
cols.add(new FieldSchema("col1", "string", ""));
cols.add(new FieldSchema("col2", "tinyint", ""));
cols.add(new FieldSchema("col3", "string", ""));
cols.add(new FieldSchema("hashMap", "map<string,string>", ""));
sd.setCols(cols);
table.setSd(sd);
exceptionRule.expect(MetaException.class);
exceptionRule.expectMessage("Could not find column mapping for column: col2");
storageHandler.checkTableSchemaMapping(description, table);
}
@Test
public void testCheckTableSchemaMappingMissingColumnMapping() throws MetaException {
TableDescription description = getHashRangeTable();
Table table = new Table();
Map<String, String> parameters = Maps.newHashMap();
parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
"col2:dynamo_col2#,hashKey:hashKey,hashMap:hashMap");
table.setParameters(parameters);
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = Lists.newArrayList();
cols.add(new FieldSchema("col1", "string", ""));
cols.add(new FieldSchema("hashMap", "map<string,string>", ""));
sd.setCols(cols);
table.setSd(sd);
exceptionRule.expect(MetaException.class);
exceptionRule.expectMessage("Could not find column(s) for column mapping(s): ");
exceptionRule.expectMessage("col2:dynamo_col2#");
exceptionRule.expectMessage("hashkey:hashKey");
storageHandler.checkTableSchemaMapping(description, table);
}
@Test
public void testCheckTableSchemaMappingValid() throws MetaException {
TableDescription description = getHashRangeTable();
Table table = new Table();
Map<String, String> parameters = Maps.newHashMap();
parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
"col2:dynamo_col2#,hashKey:hashKey");
table.setParameters(parameters);
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = Lists.newArrayList();
cols.add(new FieldSchema("col1", "string", ""));
cols.add(new FieldSchema("col2", "bigint", ""));
cols.add(new FieldSchema("hashKey", "string", ""));
sd.setCols(cols);
table.setSd(sd);
storageHandler.checkTableSchemaMapping(description, table);
}
@Test
public void testCheckTableSchemaTypeInvalidType() throws MetaException {
TableDescription description = getHashRangeTable();
Table table = new Table();
Map<String, String> parameters = Maps.newHashMap();
parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
"col2:dynamo_col2#,hashKey:hashKey");
table.setParameters(parameters);
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = Lists.newArrayList();
cols.add(new FieldSchema("col1", "string", ""));
cols.add(new FieldSchema("col2", "tinyint", ""));
cols.add(new FieldSchema("hashKey", "string", ""));
sd.setCols(cols);
table.setSd(sd);
exceptionRule.expect(MetaException.class);
exceptionRule.expectMessage("The hive type tinyint is not supported in DynamoDB");
storageHandler.checkTableSchemaType(description, table);
}
@Test
public void testCheckTableSchemaTypeInvalidHashKeyType() throws MetaException {
TableDescription description = getHashRangeTable();
Table table = new Table();
Map<String, String> parameters = Maps.newHashMap();
parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
"col2:dynamo_col2#,hashKey:hashKey");
table.setParameters(parameters);
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = Lists.newArrayList();
cols.add(new FieldSchema("col1", "string", ""));
cols.add(new FieldSchema("col2", "bigint", ""));
cols.add(new FieldSchema("hashKey", "map<string,string>", ""));
sd.setCols(cols);
table.setSd(sd);
exceptionRule.expect(MetaException.class);
exceptionRule.expectMessage("The key element hashKey does not match type. DynamoDB Type: S " +
"Hive type: " + "map<string,string>");
storageHandler.checkTableSchemaType(description, table);
}
@Test
public void testCheckTableSchemaTypeValid() throws MetaException {
TableDescription description = getHashRangeTable();
Table table = new Table();
Map<String, String> parameters = Maps.newHashMap();
parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
"col2:dynamo_col2#,hashKey:hashKey");
table.setParameters(parameters);
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = Lists.newArrayList();
cols.add(new FieldSchema("col1", "string", ""));
cols.add(new FieldSchema("col2", "bigint", ""));
cols.add(new FieldSchema("hashKey", "string", ""));
sd.setCols(cols);
table.setSd(sd);
// This check is expected to pass for the given input
storageHandler.checkTableSchemaType(description, table);
}
@Test
public void testCheckListTableSchemaTypeValid() throws MetaException {
TableDescription description = getHashRangeTable();
Table table = new Table();
Map<String, String> parameters = Maps.newHashMap();
parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
"col2:dynamo_col2#,col3:dynamo_col3#,col4:dynamo_col4#,col5:dynamo_col5#," +
"col6:dynamo_col6#,col7:dynamo_col7#,hashKey:hashKey");
table.setParameters(parameters);
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = Lists.newArrayList();
cols.add(new FieldSchema("col1", "map<string,bigint>", ""));
cols.add(new FieldSchema("col2", "array<map<string,bigint>>", ""));
cols.add(new FieldSchema("col3", "array<map<string,double>>", ""));
cols.add(new FieldSchema("col4", "array<map<string,string>>", ""));
cols.add(new FieldSchema("col5", "array<bigint>", ""));
cols.add(new FieldSchema("col6", "array<double>", ""));
cols.add(new FieldSchema("col7", "array<string>", ""));
cols.add(new FieldSchema("hashKey", "string", ""));
sd.setCols(cols);
table.setSd(sd);
// This check is expected to pass for the given input
storageHandler.checkTableSchemaType(description, table);
}
@Test
public void testCheckStructTableSchemaTypeInvalid() throws MetaException {
TableDescription description = getHashRangeTable();
Table table = new Table();
Map<String, String> parameters = Maps.newHashMap();
parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
"col2:dynamo_col2#,hashKey:hashKey");
table.setParameters(parameters);
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = Lists.newArrayList();
cols.add(new FieldSchema("col1", "struct<bignum:bigint,smallnum:tinyint>", ""));
cols.add(new FieldSchema("col2", "array<map<string,bigint>>", ""));
cols.add(new FieldSchema("hashKey", "string", ""));
sd.setCols(cols);
table.setSd(sd);
exceptionRule.expect(MetaException.class);
exceptionRule.expectMessage("The hive type struct<bignum:bigint,smallnum:tinyint> is not " +
"supported in DynamoDB");
storageHandler.checkTableSchemaType(description, table);
}
@Test
public void testCheckStructTableSchemaTypeValid() throws MetaException {
TableDescription description = getHashRangeTable();
Table table = new Table();
Map<String, String> parameters = Maps.newHashMap();
parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
"col2:dynamo_col2#,hashKey:hashKey");
table.setParameters(parameters);
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = Lists.newArrayList();
cols.add(new FieldSchema("col1", "struct<numarray:array<bigint>,num:double>", ""));
cols.add(new FieldSchema("col2", "array<struct<numarray:array<bigint>,num:double>>", ""));
cols.add(new FieldSchema("hashKey", "string", ""));
sd.setCols(cols);
table.setSd(sd);
// This check is expected to pass for the given input
storageHandler.checkTableSchemaType(description, table);
}
@Test
public void testCheckTableSchemaTypeMappingInvalid() throws MetaException {
TableDescription description = getHashRangeTable();
Table table = new Table();
Map<String, String> parameters = Maps.newHashMap();
parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
"col2:dynamo_col2#,hashKey:hashKey");
parameters.put(DynamoDBConstants.DYNAMODB_TYPE_MAPPING, "col2:NS");
table.setParameters(parameters);
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = Lists.newArrayList();
cols.add(new FieldSchema("col1", "string", ""));
cols.add(new FieldSchema("col2", "bigint", ""));
cols.add(new FieldSchema("hashKey", "string", ""));
sd.setCols(cols);
table.setSd(sd);
exceptionRule.expect(MetaException.class);
exceptionRule.expectMessage("The DynamoDB type NS does not support Hive type bigint");
storageHandler.checkTableSchemaType(description, table);
}
@Test
public void testCheckTableSchemaTypeMappingValid() throws MetaException {
TableDescription description = getHashRangeTable();
Table table = new Table();
Map<String, String> parameters = Maps.newHashMap();
parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
"col2:dynamo_col2#,hashKey:hashKey");
parameters.put(DynamoDBConstants.DYNAMODB_TYPE_MAPPING, "col2:NS");
table.setParameters(parameters);
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = Lists.newArrayList();
cols.add(new FieldSchema("col1", "string", ""));
cols.add(new FieldSchema("col2", "array<bigint>", ""));
cols.add(new FieldSchema("hashKey", "string", ""));
sd.setCols(cols);
table.setSd(sd);
// This check is expected to pass for the given input
storageHandler.checkTableSchemaType(description, table);
}
@Test
public void testCheckTableSchemaNullSerializationValid() throws MetaException {
TableDescription description = getHashRangeTable();
Table table = new Table();
Map<String, String> parameters = Maps.newHashMap();
parameters.put(DynamoDBConstants.DYNAMODB_COLUMN_MAPPING, "col1:dynamo_col1$," +
"col2:dynamo_col2#,hashKey:hashKey");
parameters.put(DynamoDBConstants.DYNAMODB_NULL_SERIALIZATION, "true");
table.setParameters(parameters);
StorageDescriptor sd = new StorageDescriptor();
List<FieldSchema> cols = Lists.newArrayList();
cols.add(new FieldSchema("col1", "string", ""));
cols.add(new FieldSchema("col2", "array<bigint>", ""));
cols.add(new FieldSchema("hashKey", "string", ""));
sd.setCols(cols);
table.setSd(sd);
// This check is expected to pass for the given input
storageHandler.checkTableSchemaType(description, table);
}
public TableDescription describeTable(String tableName) {
final DescribeTableRequest describeTablesRequest = new DescribeTableRequest()
.withTableName(tableName);
try {
RetryResult<DescribeTableResult> describeResult = getRetryDriver().runWithRetry(
new Callable<DescribeTableResult>() {
@Override
public DescribeTableResult call() {
DescribeTableResult result = dynamoDB.describeTable(describeTablesRequest);
log.info("Describe table output: " + result);
return result;
}
}, null, null);
return describeResult.result.getTable();
} catch (Exception e) {
throw new RuntimeException("Could not lookup table " + tableName + " in DynamoDB.", e);
}
}
@Before
public void setup() {
when(dynamoDBClient.describeTable(TABLE_NAME)).thenReturn(new TableDescription()
.withBillingModeSummary(
new BillingModeSummary().withBillingMode(DynamoDBConstants.BILLING_MODE_PROVISIONED))
.withProvisionedThroughput(
new ProvisionedThroughputDescription().withWriteCapacityUnits(WRITE_CAPACITY_UNITS)));
JobConf jobConf = new JobConf();
jobConf.setNumMapTasks(TOTAL_MAP_TASKS);
jobConf.set("mapreduce.task.attempt.id", "attempt_m_1");
jobConf.set(DynamoDBConstants.THROUGHPUT_WRITE_PERCENT, String.valueOf
(THROUGHPUT_WRITE_PERCENT));
when(jobClient.getConf()).thenReturn(jobConf);
writeIopsCalculator = new WriteIopsCalculator(jobClient, dynamoDBClient, TABLE_NAME) {
@Override
int calculateMaxMapTasks(int totalMapTasks) {
return MAX_CONCURRENT_MAP_TASKS;
}
};
}
private TableDescription getTableDescription(String hashType, String rangeType) {
List<KeySchemaElement> keySchema = new ArrayList<>();
List<AttributeDefinition> definitions = new ArrayList<>();
keySchema.add(new KeySchemaElement().withAttributeName("hashKey").withKeyType(KeyType.HASH));
definitions.add(new AttributeDefinition().withAttributeName("hashKey").withAttributeType
(hashType));
if (rangeType != null) {
keySchema.add(new KeySchemaElement().withAttributeName("rangeKey").withKeyType(KeyType
.RANGE));
definitions.add(new AttributeDefinition().withAttributeName("rangeKey").withAttributeType
(rangeType));
}
TableDescription description = new TableDescription().withKeySchema(keySchema)
.withAttributeDefinitions(definitions).withProvisionedThroughput(new
ProvisionedThroughputDescription().withReadCapacityUnits(1000L)
.withWriteCapacityUnits(1000L));
return description;
}
private static void waitForTableToBecomeAvailable(String tableName) {
System.out.println("Waiting for " + tableName + " to become ACTIVE...");
long startTime = System.currentTimeMillis();
long endTime = startTime + (10 * 60 * 1000);
while (System.currentTimeMillis() < endTime) {
DescribeTableRequest request = new DescribeTableRequest().withTableName(tableName);
TableDescription tableDescription = client.describeTable(request).getTable();
String tableStatus = tableDescription.getTableStatus();
System.out.println(" - current state: " + tableStatus);
if (tableStatus.equals(TableStatus.ACTIVE.toString()))
return;
try {
Thread.sleep(1000 * 20);
}
catch (Exception e) {
}
}
throw new RuntimeException("Table " + tableName + " never went active");
}
private static void waitForTableToBecomeAvailable(String tableName) {
System.out.println("Waiting for " + tableName + " to become ACTIVE...");
long startTime = System.currentTimeMillis();
long endTime = startTime + (10 * 60 * 1000);
while (System.currentTimeMillis() < endTime) {
DescribeTableRequest request = new DescribeTableRequest().withTableName(tableName);
TableDescription tableDescription = client.describeTable(request).getTable();
String tableStatus = tableDescription.getTableStatus();
System.out.println(" - current state: " + tableStatus);
if (tableStatus.equals(TableStatus.ACTIVE.toString()))
return;
try {
Thread.sleep(1000 * 20);
}
catch (Exception e) {
}
}
throw new RuntimeException("Table " + tableName + " never went active");
}
private static void waitForTableToBecomeAvailable(String tableName) {
System.out.println("Waiting for " + tableName + " to become ACTIVE...");
long startTime = System.currentTimeMillis();
long endTime = startTime + (10 * 60 * 1000);
while (System.currentTimeMillis() < endTime) {
DescribeTableRequest request = new DescribeTableRequest().withTableName(tableName);
TableDescription tableDescription = client.describeTable(request).getTable();
String tableStatus = tableDescription.getTableStatus();
System.out.println(" - current state: " + tableStatus);
if (tableStatus.equals(TableStatus.ACTIVE.toString()))
return;
try {
Thread.sleep(1000 * 20);
}
catch (Exception e) {
e.printStackTrace();
}
}
throw new RuntimeException("Table " + tableName + " never went active");
}
private static void waitForTableToBecomeAvailable(String tableName) {
System.out.println("Waiting for " + tableName + " to become ACTIVE...");
long startTime = System.currentTimeMillis();
long endTime = startTime + (10 * 60 * 1000);
while (System.currentTimeMillis() < endTime) {
DescribeTableRequest request = new DescribeTableRequest().withTableName(tableName);
TableDescription tableDescription = client.describeTable(request).getTable();
String tableStatus = tableDescription.getTableStatus();
System.out.println(" - current state: " + tableStatus);
if (tableStatus.equals(TableStatus.ACTIVE.toString()))
return;
try {
Thread.sleep(1000 * 20);
}
catch (Exception e) {
}
}
throw new RuntimeException("Table " + tableName + " never went active");
}
static void createTestTable(String tableName) {
CreateTableResult res = createDynamoTable(tableName);
TableDescription tableDesc = res.getTableDescription();
Assert.assertEquals(tableName, tableDesc.getTableName());
Assert.assertTrue(tableDesc.getKeySchema().toString().contains(ATTR_NAME_1));
Assert.assertTrue(tableDesc.getKeySchema().toString().contains(ATTR_NAME_2));
Assert.assertEquals(
tableDesc.getProvisionedThroughput().getReadCapacityUnits(), Long.valueOf(1000));
Assert.assertEquals(
tableDesc.getProvisionedThroughput().getWriteCapacityUnits(), Long.valueOf(1000));
Assert.assertEquals("ACTIVE", tableDesc.getTableStatus());
Assert.assertEquals(
"arn:aws:dynamodb:us-east-1:000000000000:table/" + tableName, tableDesc.getTableArn());
ListTablesResult tables = dynamoDBClient.listTables();
Assert.assertEquals(1, tables.getTableNames().size());
}
void createTableAndWaitForActive(final CreateTableRequest request) throws BackendException {
final String tableName = request.getTableName();
Preconditions.checkArgument(!Strings.isNullOrEmpty(tableName), "Table name was null or empty");
final TableDescription desc;
try {
desc = this.describeTable(tableName);
if (null != desc && isTableAcceptingWrites(desc.getTableStatus())) {
return; //store existed
}
} catch (BackendNotFoundException e) {
log.debug(tableName + " did not exist yet, creating it", e);
}
createTable(request);
waitForTableCreation(tableName, false /*verifyIndexesList*/, null /*expectedLsiList*/, null /*expectedGsiList*/);
}
/**
* returns the approximate number of segments a table should be broken up
* when parallel scanning. This function is based off of either read and
* write capacity, with which you can scan much faster, or the size of your
* table, which should need many more segments in order to scan the table
* fast enough in parallel so that one worker does not finish long before
* other workers.
*
* @throws NullReadCapacityException
* if the table returns a null readCapacity units.
*/
public static int getNumberOfSegments(TableDescription description)
throws NullReadCapacityException {
ProvisionedThroughputDescription provisionedThroughput = description
.getProvisionedThroughput();
double tableSizeInGigabytes = Math.ceil(description.getTableSizeBytes()
/ BootstrapConstants.GIGABYTE);
Long readCapacity = provisionedThroughput.getReadCapacityUnits();
Long writeCapacity = provisionedThroughput.getWriteCapacityUnits();
if (writeCapacity == null) {
writeCapacity = 1L;
}
if (readCapacity == null) {
throw new NullReadCapacityException(
"Cannot scan with a null readCapacity provisioned throughput");
}
double throughput = (readCapacity + 3 * writeCapacity) / 3000.0;
return (int) (10 * Math.max(Math.ceil(throughput),
Math.ceil(tableSizeInGigabytes) / 10));
}
private static void waitForTableToBecomeAvailable(String tableName) {
System.out.println("Waiting for " + tableName + " to become ACTIVE...");
long startTime = System.currentTimeMillis();
long endTime = startTime + (10 * 60 * 1000);
while (System.currentTimeMillis() < endTime) {
DescribeTableRequest request = new DescribeTableRequest()
.withTableName(tableName);
TableDescription tableDescription = client.describeTable(
request).getTable();
String tableStatus = tableDescription.getTableStatus();
System.out.println(" - current state: " + tableStatus);
if (tableStatus.equals(TableStatus.ACTIVE.toString()))
return;
try { Thread.sleep(1000 * 20); } catch (Exception e) { }
}
throw new RuntimeException("Table " + tableName + " never went active");
}
private static void waitForTableToBecomeAvailable(String tableName) {
System.out.println("Waiting for " + tableName + " to become ACTIVE...");
long startTime = System.currentTimeMillis();
long endTime = startTime + (10 * 60 * 1000);
while (System.currentTimeMillis() < endTime) {
DescribeTableRequest request = new DescribeTableRequest()
.withTableName(tableName);
TableDescription tableDescription = client.describeTable(
request).getTable();
String tableStatus = tableDescription.getTableStatus();
System.out.println(" - current state: " + tableStatus);
if (tableStatus.equals(TableStatus.ACTIVE.toString()))
return;
try { Thread.sleep(1000 * 20); } catch (Exception e) { }
}
throw new RuntimeException("Table " + tableName + " never went active");
}
private static void waitForTableToBecomeAvailable(String tableName) {
System.out.println("Waiting for " + tableName + " to become ACTIVE...");
long startTime = System.currentTimeMillis();
long endTime = startTime + (10 * 60 * 1000);
while (System.currentTimeMillis() < endTime) {
DescribeTableRequest request =
new DescribeTableRequest().withTableName(tableName);
TableDescription tableDescription =
client.describeTable(request).getTable();
String tableStatus = tableDescription.getTableStatus();
System.out.println(" - current state: " + tableStatus);
if (tableStatus.equals(TableStatus.ACTIVE.toString()))
return;
try {
Thread.sleep(1000 * 20);
} catch (Exception e) {
e.printStackTrace();
}
}
throw new RuntimeException("Table " + tableName + " never went active");
}
private static void waitForTableToBecomeAvailable(String tableName) {
System.out.println("Waiting for " + tableName + " to become ACTIVE...");
long startTime = System.currentTimeMillis();
long endTime = startTime + (10 * 60 * 1000);
while (System.currentTimeMillis() < endTime) {
DescribeTableRequest request = new DescribeTableRequest()
.withTableName(tableName);
TableDescription tableDescription = client.describeTable(
request).getTable();
String tableStatus = tableDescription.getTableStatus();
System.out.println(" - current state: " + tableStatus);
if (tableStatus.equals(TableStatus.ACTIVE.toString()))
return;
try { Thread.sleep(1000 * 20); } catch (Exception e) { }
}
throw new RuntimeException("Table " + tableName + " never went active");
}