Examples of BatchWriteItemResult


Examples of com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult

  }

    @Test
    public void vanillaBatchWriteItemTest() throws Exception{
        BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest();
        BatchWriteItemResult result;

        // Create a map for the requests in the batch
        Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>();

        // Test: write items to database
        Map<String, AttributeValue> forumItem = new HashMap<String, AttributeValue>();
        forumItem.put(hashKeyName1, new AttributeValue().withN("1"));
        forumItem.put("range", new AttributeValue().withS("a"));
        List<WriteRequest> forumList = new ArrayList<WriteRequest>();
        forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem)));

        Map<String, AttributeValue> forumItem1 = new HashMap<String, AttributeValue>();
        forumItem1.put(hashKeyName1, new AttributeValue().withN("2"));
        forumItem1.put("range", new AttributeValue().withS("b"));
        forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem1)));

        Map<String, AttributeValue> forumItem5 = new HashMap<String, AttributeValue>();
        forumItem5.put(hashKeyName1, new AttributeValue().withN("3"));
        forumItem5.put("range", new AttributeValue().withS("c"));
        forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem5)));

        Map<String, AttributeValue> forumItem2 = new HashMap<String, AttributeValue>();
        forumItem2.put(hashKeyName1, new AttributeValue().withN("4"));
        forumItem2.put("range", new AttributeValue().withS("d"));
        forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem2)));

        Map<String, AttributeValue> forumItem3 = new HashMap<String, AttributeValue>();
        forumItem3.put(hashKeyName1, new AttributeValue().withN("5"));
        forumItem3.put("range", new AttributeValue().withS("e"));
        forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem3)));

        Map<String, AttributeValue> forumItem4 = new HashMap<String, AttributeValue>();
        forumItem4.put(hashKeyName1, new AttributeValue().withN("6"));
        forumItem4.put("range", new AttributeValue().withS("f"));
        forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem4)));

        //Test case: with duplicated hashkey item but distinguished range key input.
        Map<String, AttributeValue> forumItem6 = new HashMap<String, AttributeValue>();
        forumItem6.put(hashKeyName1, new AttributeValue().withN("6"));
        forumItem6.put("range", new AttributeValue().withS("ff"));
        forumList.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItem6)));

        //Test on Table 2
        Map<String, AttributeValue> forumItemT2 = new HashMap<String, AttributeValue>();
        forumItemT2.put(hashKeyName2, new AttributeValue().withN("1"));
        forumItemT2.put("range", new AttributeValue().withS("a"));
        List<WriteRequest> forumListT2 = new ArrayList<WriteRequest>();
        forumListT2.add(new WriteRequest().withPutRequest(new PutRequest().withItem(forumItemT2)));

        requestItems.put(tableName1, forumList);
        requestItems.put(tableName2, forumListT2);
        do {
            System.out.println("Making the request.");

            batchWriteItemRequest.withRequestItems(requestItems);
            result = getClient().batchWriteItem(batchWriteItemRequest);

            // Print consumed capacity units
            for(ConsumedCapacity entry : result.getConsumedCapacity()) {
                String tableName1 = entry.getTableName();
                Double consumedCapacityUnits = entry.getCapacityUnits();
                System.out.println("Consumed capacity units for table " + tableName1 + ": " + consumedCapacityUnits);
            }

            // Check for unprocessed keys which could happen if you exceed provisioned throughput
            System.out.println("Unprocessed Put and Delete requests: \n" + result.getUnprocessedItems());
            requestItems = result.getUnprocessedItems();
        } while (result.getUnprocessedItems().size() > 0);
    }
View Full Code Here

Examples of com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult

    @Test
    public void batchWriteItemWithDeletionsTest() throws Exception{
        this.vanillaBatchWriteItemTest();

        BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest();
        BatchWriteItemResult result;

        // Create a map for the requests in the batch
        Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>();

        // Test: delete some items from database
        List<WriteRequest> forumList = new ArrayList<WriteRequest>();

        //Test case: Delete Requests
        Map<String, AttributeValue> forumKey3c =
            createItemKey(
                hashKeyName1, new AttributeValue().withN("3"),
                "range", new AttributeValue().withS("c"));

        Map<String, AttributeValue> forumKey5e =
            createItemKey(
                hashKeyName1, new AttributeValue().withN("5"),
                "range", new AttributeValue().withS("e"));

        Map<String, AttributeValue> forumKey6f =
            createItemKey(
                hashKeyName1, new AttributeValue().withN("6"),
                "range", new AttributeValue().withS("f"));

        forumList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(forumKey3c)));
        forumList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(forumKey5e)));
        forumList.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(forumKey6f)));

        //Test on Table 2
        List<WriteRequest> forumListT2 = new ArrayList<WriteRequest>();
        Map<String, AttributeValue> forumKeyT2 =
            createItemKey(
                hashKeyName2, new AttributeValue().withN("1"),
                "range", new AttributeValue().withS("a"));
        forumListT2.add(new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(forumKeyT2)));

        requestItems.put(tableName1, forumList);
        requestItems.put(tableName2, forumListT2);
        do {
            System.out.println("Making the request.");

            batchWriteItemRequest.withRequestItems(requestItems);
            result = getClient().batchWriteItem(batchWriteItemRequest);

            // Print consumed capacity units
            for(ConsumedCapacity entry : result.getConsumedCapacity()) {
                String tableName1 = entry.getTableName();
                Double consumedCapacityUnits = entry.getCapacityUnits();
                System.out.println("Consumed capacity units for table " + tableName1 + ": " + consumedCapacityUnits);
            }

            // Check for unprocessed keys which could happen if you exceed provisioned throughput
            System.out.println("Unprocessed Put and Delete requests: \n" + result.getUnprocessedItems());
            requestItems = result.getUnprocessedItems();
        } while (result.getUnprocessedItems().size() > 0);
    }
View Full Code Here

Examples of com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult

    @Test
    public void batchWriteItemWithDuplicateDeletionTest() throws Exception{
        this.vanillaBatchWriteItemTest();

        BatchWriteItemRequest batchWriteItemRequest = new BatchWriteItemRequest();
        BatchWriteItemResult result;

        // Create a map for the requests in the batch
        Map<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>();

        // Test: delete some items from database
View Full Code Here

Examples of com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult

     * Continue trying to process the batch until it finishes or an exception
     * occurs.
     */

    private FailedBatch callUntilCompletion(Map<String, List<WriteRequest>> batch) {
        BatchWriteItemResult result = null;
        int retries = 0;
        FailedBatch failedBatch = null;
        while (true) {
            try {
                result = db.batchWriteItem(new BatchWriteItemRequest().withRequestItems(batch));
            } catch (Exception e) {
                failedBatch = new FailedBatch();
                failedBatch.setUnprocessedItems(batch);
                failedBatch.setException(e);
                return failedBatch;
            }
            retries++;
            batch = result.getUnprocessedItems();
            if (batch.size() > 0) {
                pauseExponentially(retries);
            } else {
                break;
            }
View Full Code Here

Examples of com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult

     * Continue trying to process the batch until it finishes or an exception
     * occurs.
     */

    private FailedBatch callUntilCompletion(Map<String, List<WriteRequest>> batch) {
        BatchWriteItemResult result = null;
        int retries = 0;
        FailedBatch failedBatch = null;
        while (true) {
            try {
                result = db.batchWriteItem(new BatchWriteItemRequest().withRequestItems(batch));
            } catch (Exception e) {
                failedBatch = new FailedBatch();
                failedBatch.setUnprocessedItems(batch);
                failedBatch.setException(e);
                return failedBatch;
            }
            retries++;
            batch = result.getUnprocessedItems();
            if (batch.size() > 0) {
                pauseExponentially(retries);
            } else {
                break;
            }
View Full Code Here

Examples of com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult

  private void batchWrite(Map<String, List<WriteRequest>> items) {
    if (items == null || items.isEmpty()) {
      return;
    }
    try {
      BatchWriteItemResult result = client().batchWriteItem(new BatchWriteItemRequest().
          withReturnConsumedCapacity(ReturnConsumedCapacity.TOTAL).withRequestItems(items));
      if (result == null) {
        return;
      }
      logger.debug("batchWrite() CC: {}", result.getConsumedCapacity());

      Thread.sleep(1000);

      if (result.getUnprocessedItems() != null && !result.getUnprocessedItems().isEmpty()) {
        logger.warn("UNPROCESSED {0}", result.getUnprocessedItems().size());
        batchWrite(result.getUnprocessedItems());
      }
    } catch (Exception e) {
      logger.error(null, e);
    }
  }
View Full Code Here

Examples of com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult

                if ( !writeRequestIter.hasNext() ) {
                    tableIter.remove();
                }
            }

            BatchWriteItemResult result = db.batchWriteItem(new BatchWriteItemRequest().withRequestItems(batch));

            // add any unprocessed items back into the list to process
            for ( Entry<String, List<WriteRequest>> unprocessedItem : result.getUnprocessedItems().entrySet() ) {
                if ( !requestItems.containsKey(unprocessedItem.getKey()) ) {
                    requestItems.put(unprocessedItem.getKey(), new LinkedList<WriteRequest>());
                }
                requestItems.get(unprocessedItem.getKey()).addAll(unprocessedItem.getValue());
            }
View Full Code Here

Examples of com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult

     * Continue trying to process the batch until it finishes or an exception
     * occurs.
     */

    private FailedBatch callUntilCompletion(Map<String, List<WriteRequest>> batch) {
        BatchWriteItemResult result = null;
        int retries = 0;
        FailedBatch failedBatch = null;
        while (true) {
            try {
                result = db.batchWriteItem(new BatchWriteItemRequest().withRequestItems(batch));
            } catch (Exception e) {
                failedBatch = new FailedBatch();
                failedBatch.setUnprocessedItems(batch);
                failedBatch.setException(e);
                return failedBatch;
            }
            retries++;
            batch = result.getUnprocessedItems();
            if (batch.size() > 0) {
                pauseExponentially(retries);
            } else {
                break;
            }
View Full Code Here

Examples of com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult

                if ( !writeRequestIter.hasNext() ) {
                    tableIter.remove();
                }
            }

            BatchWriteItemResult result = db.batchWriteItem(new BatchWriteItemRequest().withRequestItems(batch));

            // add any unprocessed items back into the list to process
            for ( Entry<String, List<WriteRequest>> unprocessedItem : result.getUnprocessedItems().entrySet() ) {
                if ( !requestItems.containsKey(unprocessedItem.getKey()) ) {
                    requestItems.put(unprocessedItem.getKey(), new LinkedList<WriteRequest>());
                }
                requestItems.get(unprocessedItem.getKey()).addAll(unprocessedItem.getValue());
            }
View Full Code Here
TOP
Copyright © 2018 www.massapi.com. All rights reserved.
All source code are property of their respective owners. Java is a trademark of Sun Microsystems, Inc and owned by ORACLE Inc. Contact coftware#gmail.com.