/src/main/java/com/amazonaws/services/dynamodbv2/datamodeling/DynamoDBMapper.java

https://github.com/DWB-eHealth/aws-sdk-java · Java · 3050 lines · 1551 code · 364 blank · 1135 comment · 322 complexity · 373344e6f5406ab11dc6a1074041530a MD5 · raw file

  1. /*
  2. * Copyright 2011-2014 Amazon Technologies, Inc.
  3. *
  4. * Licensed under the Apache License, Version 2.0 (the "License");
  5. * you may not use this file except in compliance with the License.
  6. * You may obtain a copy of the License at:
  7. *
  8. * http://aws.amazon.com/apache2.0
  9. *
  10. * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
  11. * OR CONDITIONS OF ANY KIND, either express or implied. See the
  12. * License for the specific language governing permissions and
  13. * limitations under the License.
  14. */
  15. package com.amazonaws.services.dynamodbv2.datamodeling;
  16. import java.lang.reflect.InvocationTargetException;
  17. import java.lang.reflect.Method;
  18. import java.text.ParseException;
  19. import java.util.ArrayList;
  20. import java.util.Arrays;
  21. import java.util.Collection;
  22. import java.util.Collections;
  23. import java.util.HashMap;
  24. import java.util.HashSet;
  25. import java.util.Iterator;
  26. import java.util.LinkedList;
  27. import java.util.List;
  28. import java.util.Map;
  29. import java.util.Map.Entry;
  30. import java.util.Random;
  31. import java.util.Set;
  32. import org.apache.commons.logging.Log;
  33. import org.apache.commons.logging.LogFactory;
  34. import com.amazonaws.AmazonClientException;
  35. import com.amazonaws.AmazonServiceException;
  36. import com.amazonaws.AmazonWebServiceRequest;
  37. import com.amazonaws.auth.AWSCredentialsProvider;
  38. import com.amazonaws.retry.RetryUtils;
  39. import com.amazonaws.services.dynamodbv2.AmazonDynamoDB;
  40. import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperConfig.ConsistentReads;
  41. import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperConfig.PaginationLoadingStrategy;
  42. import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperConfig.SaveBehavior;
  43. import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBTableSchemaParser.TableIndexesInfo;
  44. import com.amazonaws.services.dynamodbv2.model.AttributeAction;
  45. import com.amazonaws.services.dynamodbv2.model.AttributeValue;
  46. import com.amazonaws.services.dynamodbv2.model.AttributeValueUpdate;
  47. import com.amazonaws.services.dynamodbv2.model.BatchGetItemRequest;
  48. import com.amazonaws.services.dynamodbv2.model.BatchGetItemResult;
  49. import com.amazonaws.services.dynamodbv2.model.BatchWriteItemRequest;
  50. import com.amazonaws.services.dynamodbv2.model.BatchWriteItemResult;
  51. import com.amazonaws.services.dynamodbv2.model.ComparisonOperator;
  52. import com.amazonaws.services.dynamodbv2.model.Condition;
  53. import com.amazonaws.services.dynamodbv2.model.ConditionalCheckFailedException;
  54. import com.amazonaws.services.dynamodbv2.model.CreateTableRequest;
  55. import com.amazonaws.services.dynamodbv2.model.ConditionalOperator;
  56. import com.amazonaws.services.dynamodbv2.model.DeleteItemRequest;
  57. import com.amazonaws.services.dynamodbv2.model.DeleteRequest;
  58. import com.amazonaws.services.dynamodbv2.model.ExpectedAttributeValue;
  59. import com.amazonaws.services.dynamodbv2.model.GetItemRequest;
  60. import com.amazonaws.services.dynamodbv2.model.GetItemResult;
  61. import com.amazonaws.services.dynamodbv2.model.KeysAndAttributes;
  62. import com.amazonaws.services.dynamodbv2.model.PutItemRequest;
  63. import com.amazonaws.services.dynamodbv2.model.PutItemResult;
  64. import com.amazonaws.services.dynamodbv2.model.PutRequest;
  65. import com.amazonaws.services.dynamodbv2.model.QueryRequest;
  66. import com.amazonaws.services.dynamodbv2.model.QueryResult;
  67. import com.amazonaws.services.dynamodbv2.model.ReturnValue;
  68. import com.amazonaws.services.dynamodbv2.model.ScanRequest;
  69. import com.amazonaws.services.dynamodbv2.model.ScanResult;
  70. import com.amazonaws.services.dynamodbv2.model.Select;
  71. import com.amazonaws.services.dynamodbv2.model.UpdateItemRequest;
  72. import com.amazonaws.services.dynamodbv2.model.UpdateItemResult;
  73. import com.amazonaws.services.dynamodbv2.model.WriteRequest;
  74. import com.amazonaws.services.s3.model.Region;
  75. import com.amazonaws.util.VersionInfoUtils;
  76. /**
  77. * Object mapper for domain-object interaction with DynamoDB.
  78. * <p>
  79. * To use, define a domain class that represents an item in a DynamoDB table and
  80. * annotate it with the annotations found in the
  81. * com.amazonaws.services.dynamodbv2.datamodeling package. In order to allow the
  82. * mapper to correctly persist the data, each modeled property in the domain
  83. * class should be accessible via getter and setter methods, and each property
  84. * annotation should be either applied to the getter method or the class field.
  85. * A minimal example using getter annotations:
  86. *
  87. * <pre class="brush: java">
  88. * &#064;DynamoDBTable(tableName = &quot;TestTable&quot;)
  89. * public class TestClass {
  90. *
  91. * private Long key;
  92. * private double rangeKey;
  93. * private Long version;
  94. *
  95. * private Set&lt;Integer&gt; integerSetAttribute;
  96. *
  97. * &#064;DynamoDBHashKey
  98. * public Long getKey() {
  99. * return key;
  100. * }
  101. *
  102. * public void setKey(Long key) {
  103. * this.key = key;
  104. * }
  105. *
  106. * &#064;DynamoDBRangeKey
  107. * public double getRangeKey() {
  108. * return rangeKey;
  109. * }
  110. *
  111. * public void setRangeKey(double rangeKey) {
  112. * this.rangeKey = rangeKey;
  113. * }
  114. *
  115. * &#064;DynamoDBAttribute(attributeName = &quot;integerSetAttribute&quot;)
  116. * public Set&lt;Integer&gt; getIntegerAttribute() {
  117. * return integerSetAttribute;
  118. * }
  119. *
  120. * public void setIntegerAttribute(Set&lt;Integer&gt; integerAttribute) {
  121. * this.integerSetAttribute = integerAttribute;
  122. * }
  123. *
  124. * &#064;DynamoDBVersionAttribute
  125. * public Long getVersion() {
  126. * return version;
  127. * }
  128. *
  129. * public void setVersion(Long version) {
  130. * this.version = version;
  131. * }
  132. * }
  133. * </pre>
  134. * <p>
  135. * Save instances of annotated classes to DynamoDB, retrieve them, and delete
  136. * them using the {@link DynamoDBMapper} class, as in the following example.
  137. *
  138. * <pre class="brush: java">
  139. * DynamoDBMapper mapper = new DynamoDBMapper(dynamoDBClient);
  140. * Long hashKey = 105L;
  141. * double rangeKey = 1.0d;
  142. * TestClass obj = mapper.load(TestClass.class, hashKey, rangeKey);
  143. * obj.getIntegerAttribute().add(42);
  144. * mapper.save(obj);
  145. * mapper.delete(obj);
  146. * </pre>
  147. * <p>
  148. * When using the save, load, and delete methods, {@link DynamoDBMapper} will
  149. * throw {@link DynamoDBMappingException}s to indicate that domain classes are
  150. * incorrectly annotated or otherwise incompatible with this class. Service
  151. * exceptions will always be propagated as {@link AmazonClientException}, and
  152. * DynamoDB-specific subclasses such as {@link ConditionalCheckFailedException}
  153. * will be used when possible.
  154. * <p>
  155. * This class is thread-safe and can be shared between threads. It's also very
  156. * lightweight, so it doesn't need to be.
  157. *
  158. * @see DynamoDBTable
  159. * @see DynamoDBHashKey
  160. * @see DynamoDBRangeKey
  161. * @see DynamoDBAutoGeneratedKey
  162. * @see DynamoDBAttribute
  163. * @see DynamoDBVersionAttribute
  164. * @see DynamoDBIgnore
  165. * @see DynamoDBMarshalling
  166. * @see DynamoDBMapperConfig
  167. */
  168. public class DynamoDBMapper {
  169. private final S3ClientCache s3cc;
  170. private final AmazonDynamoDB db;
  171. private final DynamoDBMapperConfig config;
  172. private final DynamoDBReflector reflector = new DynamoDBReflector();
  173. private final DynamoDBTableSchemaParser schemaParser = new DynamoDBTableSchemaParser();
  174. private final AttributeTransformer transformer;
  175. /** The max back off time for batch write */
  176. static final long MAX_BACKOFF_IN_MILLISECONDS = 1000 * 3;
  177. /**
  178. * This retry count is applicable only when every batch get item request
  179. * results in no data retrieved from server and the un processed keys is
  180. * same as request items
  181. */
  182. static final int BATCH_GET_MAX_RETRY_COUNT_ALL_KEYS = 5;
  183. /**
  184. * User agent for requests made using the {@link DynamoDBMapper}.
  185. */
  186. private static final String USER_AGENT = DynamoDBMapper.class.getName() + "/" + VersionInfoUtils.getVersion();
  187. private static final String NO_RANGE_KEY = new String();
  188. private static final Log log = LogFactory.getLog(DynamoDBMapper.class);
  189. /**
  190. * Constructs a new mapper with the service object given, using the default
  191. * configuration.
  192. *
  193. * @param dynamoDB
  194. * The service object to use for all service calls.
  195. * @see DynamoDBMapperConfig#DEFAULT
  196. */
  197. public DynamoDBMapper(final AmazonDynamoDB dynamoDB) {
  198. this(dynamoDB, DynamoDBMapperConfig.DEFAULT, null, null);
  199. }
  200. /**
  201. * Constructs a new mapper with the service object and configuration given.
  202. *
  203. * @param dynamoDB
  204. * The service object to use for all service calls.
  205. * @param config
  206. * The default configuration to use for all service calls. It can
  207. * be overridden on a per-operation basis.
  208. */
  209. public DynamoDBMapper(
  210. final AmazonDynamoDB dynamoDB,
  211. final DynamoDBMapperConfig config) {
  212. this(dynamoDB, config, null, null);
  213. }
  214. /**
  215. * Constructs a new mapper with the service object and S3 client cache
  216. * given, using the default configuration.
  217. *
  218. * @param ddb
  219. * The service object to use for all service calls.
  220. * @param s3CredentialProvider
  221. * The credentials provider for accessing S3.
  222. * Relevant only if {@link S3Link} is involved.
  223. * @see DynamoDBMapperConfig#DEFAULT
  224. */
  225. public DynamoDBMapper(
  226. final AmazonDynamoDB ddb,
  227. final AWSCredentialsProvider s3CredentialProvider) {
  228. this(ddb, DynamoDBMapperConfig.DEFAULT, s3CredentialProvider);
  229. }
  230. /**
  231. * Constructs a new mapper with the given service object, configuration,
  232. * and transform hook.
  233. *
  234. * @param dynamoDB
  235. * the service object to use for all service calls
  236. * @param config
  237. * the default configuration to use for all service calls. It
  238. * can be overridden on a per-operation basis
  239. * @param transformer
  240. * The custom attribute transformer to invoke when serializing or
  241. * deserializing an object.
  242. */
  243. public DynamoDBMapper(
  244. final AmazonDynamoDB dynamoDB,
  245. final DynamoDBMapperConfig config,
  246. final AttributeTransformer transformer) {
  247. this(dynamoDB, config, transformer, null);
  248. }
  249. /**
  250. * Constructs a new mapper with the service object, configuration, and S3
  251. * client cache given.
  252. *
  253. * @param dynamoDB
  254. * The service object to use for all service calls.
  255. * @param config
  256. * The default configuration to use for all service calls. It can
  257. * be overridden on a per-operation basis.
  258. * @param s3CredentialProvider
  259. * The credentials provider for accessing S3.
  260. * Relevant only if {@link S3Link} is involved.
  261. */
  262. public DynamoDBMapper(
  263. final AmazonDynamoDB dynamoDB,
  264. final DynamoDBMapperConfig config,
  265. final AWSCredentialsProvider s3CredentialProvider) {
  266. this(dynamoDB, config, null, validate(s3CredentialProvider));
  267. }
  268. /**
  269. * Throws an exception if the given credentials provider is {@code null}.
  270. */
  271. private static AWSCredentialsProvider validate(
  272. final AWSCredentialsProvider provider) {
  273. if (provider == null) {
  274. throw new IllegalArgumentException(
  275. "s3 credentials provider must not be null");
  276. }
  277. return provider;
  278. }
  279. /**
  280. * Constructor with all parameters.
  281. *
  282. * @param dynamoDB
  283. * The service object to use for all service calls.
  284. * @param config
  285. * The default configuration to use for all service calls. It can
  286. * be overridden on a per-operation basis.
  287. * @param transformer
  288. * The custom attribute transformer to invoke when serializing or
  289. * deserializing an object.
  290. * @param s3CredentialProvider
  291. * The credentials provider for accessing S3.
  292. * Relevant only if {@link S3Link} is involved.
  293. */
  294. public DynamoDBMapper(
  295. final AmazonDynamoDB dynamoDB,
  296. final DynamoDBMapperConfig config,
  297. final AttributeTransformer transformer,
  298. final AWSCredentialsProvider s3CredentialsProvider) {
  299. this.db = dynamoDB;
  300. this.config = config;
  301. this.transformer = transformer;
  302. if (s3CredentialsProvider == null) {
  303. this.s3cc = null;
  304. } else {
  305. this.s3cc = new S3ClientCache(s3CredentialsProvider.getCredentials());
  306. }
  307. }
  308. /**
  309. * Loads an object with the hash key given and a configuration override.
  310. * This configuration overrides the default provided at object construction.
  311. *
  312. * @see DynamoDBMapper#load(Class, Object, Object, DynamoDBMapperConfig)
  313. */
  314. public <T extends Object> T load(Class<T> clazz, Object hashKey, DynamoDBMapperConfig config) {
  315. return load(clazz, hashKey, null, config);
  316. }
  317. /**
  318. * Loads an object with the hash key given, using the default configuration.
  319. *
  320. * @see DynamoDBMapper#load(Class, Object, Object, DynamoDBMapperConfig)
  321. */
  322. public <T extends Object> T load(Class<T> clazz, Object hashKey) {
  323. return load(clazz, hashKey, null, config);
  324. }
  325. /**
  326. * Loads an object with a hash and range key, using the default
  327. * configuration.
  328. *
  329. * @see DynamoDBMapper#load(Class, Object, Object, DynamoDBMapperConfig)
  330. */
  331. public <T extends Object> T load(Class<T> clazz, Object hashKey, Object rangeKey) {
  332. return load(clazz, hashKey, rangeKey, config);
  333. }
  334. /**
  335. * Returns an object whose keys match those of the prototype key object given,
  336. * or null if no such item exists.
  337. *
  338. * @param keyObject
  339. * An object of the class to load with the keys values to match.
  340. *
  341. * @see DynamoDBMapper#load(Object, DynamoDBMapperConfig)
  342. */
  343. public <T extends Object> T load(T keyObject) {
  344. return load(keyObject, this.config);
  345. }
  346. /**
  347. * Returns an object whose keys match those of the prototype key object given,
  348. * or null if no such item exists.
  349. *
  350. * @param keyObject
  351. * An object of the class to load with the keys values to match.
  352. * @param config
  353. * Configuration for the service call to retrieve the object from
  354. * DynamoDB. This configuration overrides the default given at
  355. * construction.
  356. */
  357. public <T extends Object> T load(T keyObject, DynamoDBMapperConfig config) {
  358. @SuppressWarnings("unchecked")
  359. Class<T> clazz = (Class<T>) keyObject.getClass();
  360. config = mergeConfig(config);
  361. String tableName = getTableName(clazz, config);
  362. GetItemRequest rq = new GetItemRequest()
  363. .withRequestMetricCollector(config.getRequestMetricCollector());
  364. Map<String, AttributeValue> key = getKey(keyObject, clazz);
  365. rq.setKey(key);
  366. rq.setTableName(tableName);
  367. rq.setConsistentRead(config.getConsistentReads() == ConsistentReads.CONSISTENT);
  368. GetItemResult item = db.getItem(applyUserAgent(rq));
  369. Map<String, AttributeValue> itemAttributes = item.getItem();
  370. if ( itemAttributes == null ) {
  371. return null;
  372. }
  373. T object = marshalIntoObject(toParameters(itemAttributes, clazz, config));
  374. return object;
  375. }
  376. /**
  377. * Returns a key map for the key object given.
  378. *
  379. * @param keyObject
  380. * The key object, corresponding to an item in a dynamo table.
  381. */
  382. @SuppressWarnings("unchecked")
  383. private <T> Map<String, AttributeValue> getKey(T keyObject) {
  384. return getKey(keyObject, (Class<T>)keyObject.getClass());
  385. }
  386. private <T> Map<String, AttributeValue> getKey(T keyObject, Class<T> clazz) {
  387. Map<String, AttributeValue> key = new HashMap<String, AttributeValue>();
  388. for (Method keyGetter : reflector.getPrimaryKeyGetters(clazz)) {
  389. Object getterResult = safeInvoke(keyGetter, keyObject);
  390. AttributeValue keyAttributeValue = getSimpleAttributeValue(keyGetter, getterResult);
  391. if (keyAttributeValue == null) {
  392. throw new DynamoDBMappingException("Null key found for " + keyGetter);
  393. }
  394. key.put(reflector.getAttributeName(keyGetter), keyAttributeValue);
  395. }
  396. if ( key.isEmpty() ) {
  397. throw new DynamoDBMappingException("Class must be annotated with " + DynamoDBHashKey.class + " and "
  398. + DynamoDBRangeKey.class);
  399. }
  400. return key;
  401. }
  402. /**
  403. * Returns an object with the given hash key, or null if no such object
  404. * exists.
  405. *
  406. * @param clazz
  407. * The class to load, corresponding to a DynamoDB table.
  408. * @param hashKey
  409. * The key of the object.
  410. * @param rangeKey
  411. * The range key of the object, or null for tables without a
  412. * range key.
  413. * @param config
  414. * Configuration for the service call to retrieve the object from
  415. * DynamoDB. This configuration overrides the default given at
  416. * construction.
  417. */
  418. public <T extends Object> T load(Class<T> clazz, Object hashKey, Object rangeKey, DynamoDBMapperConfig config) {
  419. config = mergeConfig(config);
  420. T keyObject = createKeyObject(clazz, hashKey, rangeKey);
  421. return load(keyObject, config);
  422. }
  423. /**
  424. * Creates a key prototype object for the class given with the single hash and range key given.
  425. */
  426. private <T> T createKeyObject(Class<T> clazz, Object hashKey, Object rangeKey) {
  427. T keyObject = null;
  428. try {
  429. keyObject = clazz.newInstance();
  430. } catch ( Exception e ) {
  431. throw new DynamoDBMappingException("Failed to instantiate class", e);
  432. }
  433. boolean seenHashKey = false;
  434. boolean seenRangeKey = false;
  435. for ( Method getter : reflector.getPrimaryKeyGetters(clazz) ) {
  436. if ( ReflectionUtils.getterOrFieldHasAnnotation(getter, DynamoDBHashKey.class) ) {
  437. if ( seenHashKey ) {
  438. throw new DynamoDBMappingException("Found more than one method annotated with "
  439. + DynamoDBHashKey.class + " for class " + clazz
  440. + ". Use load(Object) for tables with more than a single hash and range key.");
  441. }
  442. seenHashKey = true;
  443. safeInvoke(reflector.getSetter(getter), keyObject, hashKey);
  444. } else if ( ReflectionUtils.getterOrFieldHasAnnotation(getter, DynamoDBRangeKey.class) ) {
  445. if ( seenRangeKey ) {
  446. throw new DynamoDBMappingException("Found more than one method annotated with "
  447. + DynamoDBRangeKey.class + " for class " + clazz
  448. + ". Use load(Object) for tables with more than a single hash and range key.");
  449. }
  450. seenRangeKey = true;
  451. safeInvoke(reflector.getSetter(getter), keyObject, rangeKey);
  452. }
  453. }
  454. if ( !seenHashKey ) {
  455. throw new DynamoDBMappingException("No method annotated with " + DynamoDBHashKey.class + " for class "
  456. + clazz + ".");
  457. } else if ( rangeKey != null && !seenRangeKey ) {
  458. throw new DynamoDBMappingException("No method annotated with " + DynamoDBRangeKey.class + " for class "
  459. + clazz + ".");
  460. }
  461. return keyObject;
  462. }
  463. /**
  464. * Returns a map of attribute name to EQ condition for the key prototype
  465. * object given. This method considers attributes annotated with either
  466. * {@link DynamoDBHashKey} or {@link DynamoDBIndexHashKey}.
  467. *
  468. * @param obj
  469. * The prototype object that includes the hash key value.
  470. * @return A map of hash key attribute name to EQ condition for the key
  471. * prototype object, or an empty map if obj is null.
  472. */
  473. private Map<String, Condition> getHashKeyEqualsConditions(Object obj) {
  474. Map<String, Condition> conditions = new HashMap<String, Condition>();
  475. if (obj != null) {
  476. for ( Method getter : reflector.getRelevantGetters(obj.getClass()) ) {
  477. if ( ReflectionUtils.getterOrFieldHasAnnotation(getter, DynamoDBHashKey.class)
  478. || ReflectionUtils.getterOrFieldHasAnnotation(getter, DynamoDBIndexHashKey.class) ) {
  479. Object getterReturnResult = safeInvoke(getter, obj, (Object[])null);
  480. if (getterReturnResult != null) {
  481. conditions.put(
  482. reflector.getAttributeName(getter),
  483. new Condition().withComparisonOperator(ComparisonOperator.EQ).withAttributeValueList(
  484. getSimpleAttributeValue(getter, getterReturnResult)));
  485. }
  486. }
  487. }
  488. }
  489. return conditions;
  490. }
  491. /**
  492. * Returns the table name for the class given.
  493. */
  494. protected final String getTableName(final Class<?> clazz,
  495. final DynamoDBMapperConfig config) {
  496. return getTableName(clazz, config, reflector);
  497. }
  498. static String getTableName(final Class<?> clazz,
  499. final DynamoDBMapperConfig config,
  500. final DynamoDBReflector reflector) {
  501. DynamoDBTable table = reflector.getTable(clazz);
  502. String tableName = table.tableName();
  503. if ( config.getTableNameOverride() != null ) {
  504. if ( config.getTableNameOverride().getTableName() != null ) {
  505. tableName = config.getTableNameOverride().getTableName();
  506. } else {
  507. tableName = config.getTableNameOverride().getTableNamePrefix()
  508. + tableName;
  509. }
  510. }
  511. return tableName;
  512. }
  513. /**
  514. * A replacement for {@link #marshallIntoObject(Class, Map)} that takes
  515. * extra parameters to tunnel through to {@code privateMarshalIntoObject}.
  516. * <p>
  517. * Once {@code marshallIntoObject} is removed, this method will directly
  518. * call {@code privateMarshalIntoObject}.
  519. */
  520. private <T> T marshalIntoObject(
  521. final AttributeTransformer.Parameters<T> parameters
  522. ) {
  523. return marshallIntoObject(
  524. parameters.getModelClass(),
  525. MapAnd.wrap(parameters.getAttributeValues(), parameters));
  526. }
  527. /**
  528. * Creates and fills in the attributes on an instance of the class given
  529. * with the attributes given.
  530. * <p>
  531. * This is accomplished by looking for getter methods annotated with an
  532. * appropriate annotation, then looking for matching attribute names in the
  533. * item attribute map.
  534. * <p>
  535. * This method has been marked deprecated because it does not allow
  536. * load/query/scan to pass through their DynamoDBMapperConfig parameter,
  537. * which is needed by some implementations of {@code AttributeTransformer}.
  538. * In a future version of the SDK, load/query/scan will be changed to
  539. * directly call privateMarshalIntoObject, and will no longer call this
  540. * method.
  541. * <p>
  542. * If you are extending DynamoDBMapper and overriding this method to
  543. * customize how the mapper unmarshals POJOs from a raw DynamoDB item,
  544. * please switch to using an AttributeTransformer (or open a GitHub
  545. * issue if you need to fully control the unmarshalling process, and we'll
  546. * figure out a better way to expose such a hook).
  547. * <p>
  548. * If you're simply calling this method, it will continue to be available
  549. * for the forseeable future - feel free to ignore the @Deprecated tag.
  550. *
  551. * @param clazz
  552. * The class to instantiate and hydrate
  553. * @param itemAttributes
  554. * The set of item attributes, keyed by attribute name.
  555. * @deprecated as an extension point for adding custom unmarshalling
  556. */
  557. @Deprecated
  558. public <T> T marshallIntoObject(Class<T> clazz, Map<String, AttributeValue> itemAttributes) {
  559. if (itemAttributes instanceof MapAnd) {
  560. @SuppressWarnings("unchecked")
  561. AttributeTransformer.Parameters<T> parameters =
  562. ((MapAnd<?, ?, AttributeTransformer.Parameters<T>>) itemAttributes)
  563. .getExtra();
  564. return privateMarshalIntoObject(parameters);
  565. } else {
  566. // Called via some unexpected external codepath; use the class-level
  567. // config.
  568. return privateMarshalIntoObject(
  569. toParameters(itemAttributes, clazz, this.config));
  570. }
  571. }
  572. /**
  573. * The one true implementation of marshalIntoObject.
  574. */
  575. private <T> T privateMarshalIntoObject(
  576. final AttributeTransformer.Parameters<T> parameters) {
  577. T toReturn = null;
  578. try {
  579. toReturn = parameters.getModelClass().newInstance();
  580. } catch ( InstantiationException e ) {
  581. throw new DynamoDBMappingException("Failed to instantiate new instance of class", e);
  582. } catch ( IllegalAccessException e ) {
  583. throw new DynamoDBMappingException("Failed to instantiate new instance of class", e);
  584. }
  585. if ( parameters.getAttributeValues() == null
  586. || parameters.getAttributeValues().isEmpty() ) {
  587. return toReturn;
  588. }
  589. Map<String, AttributeValue> result = untransformAttributes(parameters);
  590. for ( Method m : reflector.getRelevantGetters(parameters.getModelClass()) ) {
  591. String attributeName = reflector.getAttributeName(m);
  592. if ( result.containsKey(attributeName) ) {
  593. setValue(toReturn, m, result.get(attributeName));
  594. }
  595. }
  596. return toReturn;
  597. }
  598. /**
  599. * Unmarshalls the list of item attributes into objects of type clazz.
  600. * <p>
  601. * This method has been marked deprecated because it does not allow
  602. * query/scan to pass through their DynamoDBMapperConfig parameter,
  603. * which is needed by some implementations of {@code AttributeTransformer}.
  604. * In a future version of the SDK, query/scan will be changed to directly
  605. * call privateMarshalIntoObjects, and will no longer call this method.
  606. * <p>
  607. * If you are extending DynamoDBMapper and overriding this method to
  608. * customize how the mapper unmarshals POJOs from raw DynamoDB items,
  609. * please switch to using an AttributeTransformer (or open a GitHub
  610. * issue if you need to fully control the unmarshalling process, and we'll
  611. * figure out a better way to expose such a hook).
  612. * <p>
  613. * If you're simply calling this method, it will continue to be available
  614. * for the forseeable future - feel free to ignore the @Deprecated tag.
  615. *
  616. * @see DynamoDBMapper#marshallIntoObject(Class, Map)
  617. * @deprecated as an extension point for adding custom unmarshalling
  618. */
  619. @Deprecated
  620. public <T> List<T> marshallIntoObjects(Class<T> clazz, List<Map<String, AttributeValue>> itemAttributes) {
  621. List<T> result = new ArrayList<T>(itemAttributes.size());
  622. for (Map<String, AttributeValue> item : itemAttributes) {
  623. result.add(marshallIntoObject(clazz, item));
  624. }
  625. return result;
  626. }
  627. /**
  628. * A replacement for {@link #marshallIntoObjects(Class, List)} that takes
  629. * an extra set of parameters to be tunneled through to
  630. * {@code privateMarshalIntoObject} (if nothing along the way is
  631. * overridden). It's package-private because some of the Paginated*List
  632. * classes call back into it, but final because no one, even in this
  633. * package, should ever override it.
  634. * <p>
  635. * In the future, when the deprecated {@code marshallIntoObjects} is
  636. * removed, this method will be changed to directly call
  637. * {@code privateMarshalIntoObject}.
  638. */
  639. final <T> List<T> marshalIntoObjects(
  640. final List<AttributeTransformer.Parameters<T>> parameters
  641. ) {
  642. if (parameters.isEmpty()) {
  643. return Collections.emptyList();
  644. }
  645. Class<T> clazz = parameters.get(0).getModelClass();
  646. List<Map<String, AttributeValue>> list =
  647. new ArrayList<Map<String, AttributeValue>>(parameters.size());
  648. for (AttributeTransformer.Parameters<T> entry : parameters) {
  649. list.add(MapAnd.wrap(entry.getAttributeValues(), entry));
  650. }
  651. return marshallIntoObjects(clazz, list);
  652. }
  653. /**
  654. * Sets the value in the return object corresponding to the service result.
  655. */
  656. private <T> void setValue(final T toReturn, final Method getter, AttributeValue value) {
  657. Method setter = reflector.getSetter(getter);
  658. ArgumentUnmarshaller unmarhsaller = reflector.getArgumentUnmarshaller(toReturn, getter, setter, s3cc);
  659. unmarhsaller.typeCheck(value, setter);
  660. Object argument;
  661. try {
  662. argument = unmarhsaller.unmarshall(value);
  663. } catch ( IllegalArgumentException e ) {
  664. throw new DynamoDBMappingException("Couldn't unmarshall value " + value + " for " + setter, e);
  665. } catch ( ParseException e ) {
  666. throw new DynamoDBMappingException("Error attempting to parse date string " + value + " for "+ setter, e);
  667. }
  668. safeInvoke(setter, toReturn, argument);
  669. }
  670. /**
  671. * Returns an {@link AttributeValue} corresponding to the getter and return
  672. * result given, treating it as a non-versioned attribute.
  673. */
  674. private AttributeValue getSimpleAttributeValue(final Method getter, final Object getterReturnResult) {
  675. if ( getterReturnResult == null )
  676. return null;
  677. ArgumentMarshaller marshaller = reflector.getArgumentMarshaller(getter);
  678. return marshaller.marshall(getterReturnResult);
  679. }
  680. /**
  681. * Saves the object given into DynamoDB, using the default configuration.
  682. *
  683. * @see DynamoDBMapper#save(Object, DynamoDBSaveExpression, DynamoDBMapperConfig)
  684. */
  685. public <T extends Object> void save(T object) {
  686. save(object, null, config);
  687. }
  688. /**
  689. * Saves the object given into DynamoDB, using the default configuration and the specified saveExpression.
  690. *
  691. * @see DynamoDBMapper#save(Object, DynamoDBSaveExpression, DynamoDBMapperConfig)
  692. */
  693. public <T extends Object> void save(T object, DynamoDBSaveExpression saveExpression) {
  694. save(object, saveExpression, config);
  695. }
  696. private boolean needAutoGenerateAssignableKey(Class<?> clazz, Object object) {
  697. Collection<Method> keyGetters = reflector.getPrimaryKeyGetters(clazz);
  698. boolean forcePut = false;
  699. /*
  700. * Determine if there are any auto-assigned keys to assign. If so, force
  701. * a put and assign the keys.
  702. */
  703. boolean hashKeyGetterFound = false;
  704. for ( Method method : keyGetters ) {
  705. Object getterResult = safeInvoke(method, object);
  706. if ( getterResult == null && reflector.isAssignableKey(method) ) {
  707. forcePut = true;
  708. }
  709. if ( ReflectionUtils.getterOrFieldHasAnnotation(method, DynamoDBHashKey.class) ) {
  710. hashKeyGetterFound = true;
  711. }
  712. }
  713. if ( !hashKeyGetterFound ) {
  714. throw new DynamoDBMappingException("No " + DynamoDBHashKey.class + " annotation found in class " + clazz);
  715. }
  716. return forcePut;
  717. }
  718. /**
  719. * Saves the object given into DynamoDB, using the specified configuration.
  720. *
  721. * @see DynamoDBMapper#save(Object, DynamoDBSaveExpression, DynamoDBMapperConfig)
  722. */
  723. public <T extends Object> void save(T object, DynamoDBMapperConfig config) {
  724. save(object, null, config);
  725. }
  726. /**
  727. * Saves an item in DynamoDB. The service method used is determined by the
  728. * {@link DynamoDBMapperConfig#getSaveBehavior()} value, to use either
  729. * {@link AmazonDynamoDB#putItem(PutItemRequest)} or
  730. * {@link AmazonDynamoDB#updateItem(UpdateItemRequest)}:
  731. * <ul>
  732. * <li><b>UPDATE</b> (default) : UPDATE will not affect unmodeled attributes
  733. * on a save operation and a null value for the modeled attribute will
  734. * remove it from that item in DynamoDB. Because of the limitation of
  735. * updateItem request, the implementation of UPDATE will send a putItem
  736. * request when a key-only object is being saved, and it will send another
  737. * updateItem request if the given key(s) already exists in the table.</li>
  738. * <li><b>UPDATE_SKIP_NULL_ATTRIBUTES</b> : Similar to UPDATE except that it
  739. * ignores any null value attribute(s) and will NOT remove them from that
  740. * item in DynamoDB. It also guarantees to send only one single updateItem
  741. * request, no matter the object is key-only or not.</li>
  742. * <li><b>CLOBBER</b> : CLOBBER will clear and replace all attributes,
  743. * included unmodeled ones, (delete and recreate) on save. Versioned field
  744. * constraints will also be disregarded.</li>
  745. * </ul>
  746. *
  747. *
  748. * Any options specified in the saveExpression parameter will be overlaid on
  749. * any constraints due to versioned attributes.
  750. *
  751. * @param object
  752. * The object to save into DynamoDB
  753. * @param saveExpression
  754. * The options to apply to this save request
  755. * @param config
  756. * The configuration to use, which overrides the default provided
  757. * at object construction.
  758. *
  759. * @see DynamoDBMapperConfig.SaveBehavior
  760. */
  761. public <T extends Object> void save(T object, DynamoDBSaveExpression saveExpression, final DynamoDBMapperConfig config) {
  762. final DynamoDBMapperConfig finalConfig = mergeConfig(config);
  763. @SuppressWarnings("unchecked")
  764. Class<? extends T> clazz = (Class<? extends T>) object.getClass();
  765. String tableName = getTableName(clazz, finalConfig);
  766. /*
  767. * We force a putItem request instead of updateItem request either when
  768. * CLOBBER is configured, or part of the primary key of the object needs
  769. * to be auto-generated.
  770. */
  771. boolean forcePut = (finalConfig.getSaveBehavior() == SaveBehavior.CLOBBER)
  772. || needAutoGenerateAssignableKey(clazz, object);
  773. SaveObjectHandler saveObjectHandler;
  774. if (forcePut) {
  775. saveObjectHandler = this.new SaveObjectHandler(clazz, object,
  776. tableName, finalConfig, saveExpression) {
  777. @Override
  778. protected void onKeyAttributeValue(String attributeName,
  779. AttributeValue keyAttributeValue) {
  780. /* Treat key values as common attribute value updates. */
  781. getAttributeValueUpdates().put(attributeName,
  782. new AttributeValueUpdate().withValue(keyAttributeValue)
  783. .withAction("PUT"));
  784. }
  785. /* Use default implementation of onNonKeyAttribute(...) */
  786. @Override
  787. protected void onNullNonKeyAttribute(String attributeName) {
  788. /* When doing a force put, we can safely ignore the null-valued attributes. */
  789. return;
  790. }
  791. @Override
  792. protected void executeLowLevelRequest() {
  793. /* Send a putItem request */
  794. doPutItem();
  795. }
  796. };
  797. } else {
  798. saveObjectHandler = this.new SaveObjectHandler(clazz, object,
  799. tableName, finalConfig, saveExpression) {
  800. @Override
  801. protected void onKeyAttributeValue(String attributeName,
  802. AttributeValue keyAttributeValue) {
  803. /* Put it in the key collection which is later used in the updateItem request. */
  804. getKeyAttributeValues().put(attributeName, keyAttributeValue);
  805. }
  806. @Override
  807. protected void onNonKeyAttribute(String attributeName,
  808. AttributeValue currentValue) {
  809. /* If it's a set attribute and the mapper is configured with APPEND_SET,
  810. * we do an "ADD" update instead of the default "PUT".
  811. */
  812. if (getLocalSaveBehavior() == SaveBehavior.APPEND_SET) {
  813. if (currentValue.getBS() != null
  814. || currentValue.getNS() != null
  815. || currentValue.getSS() != null) {
  816. getAttributeValueUpdates().put(
  817. attributeName,
  818. new AttributeValueUpdate().withValue(
  819. currentValue).withAction("ADD"));
  820. return;
  821. }
  822. }
  823. /* Otherwise, we do the default "PUT" update. */
  824. super.onNonKeyAttribute(attributeName, currentValue);
  825. }
  826. @Override
  827. protected void onNullNonKeyAttribute(String attributeName) {
  828. /*
  829. * If UPDATE_SKIP_NULL_ATTRIBUTES or APPEND_SET is
  830. * configured, we don't delete null value attributes.
  831. */
  832. if (getLocalSaveBehavior() == SaveBehavior.UPDATE_SKIP_NULL_ATTRIBUTES
  833. || getLocalSaveBehavior() == SaveBehavior.APPEND_SET) {
  834. return;
  835. }
  836. else {
  837. /* Delete attributes that are set as null in the object. */
  838. getAttributeValueUpdates()
  839. .put(attributeName,
  840. new AttributeValueUpdate()
  841. .withAction("DELETE"));
  842. }
  843. }
  844. @Override
  845. protected void executeLowLevelRequest() {
  846. UpdateItemResult updateItemResult = doUpdateItem();
  847. // The UpdateItem request is specified to return ALL_NEW
  848. // attributes of the affected item. So if the returned
  849. // UpdateItemResult does not include any ReturnedAttributes,
  850. // it indicates the UpdateItem failed silently (e.g. the
  851. // key-only-put nightmare -
  852. // https://forums.aws.amazon.com/thread.jspa?threadID=86798&tstart=25),
  853. // in which case we should re-send a PutItem
  854. // request instead.
  855. if (updateItemResult.getAttributes() == null
  856. || updateItemResult.getAttributes().isEmpty()) {
  857. // Before we proceed with PutItem, we need to put all
  858. // the key attributes (prepared for the
  859. // UpdateItemRequest) into the AttributeValueUpdates
  860. // collection.
  861. for (String keyAttributeName : getKeyAttributeValues().keySet()) {
  862. getAttributeValueUpdates().put(keyAttributeName,
  863. new AttributeValueUpdate()
  864. .withValue(getKeyAttributeValues().get(keyAttributeName))
  865. .withAction("PUT"));
  866. }
  867. doPutItem();
  868. }
  869. }
  870. };
  871. }
  872. saveObjectHandler.execute();
  873. }
  874. /**
  875. * The handler for saving object using DynamoDBMapper. Caller should
  876. * implement the abstract methods to provide the expected behavior on each
  877. * scenario, and this handler will take care of all the other basic workflow
  878. * and common operations.
  879. */
  880. protected abstract class SaveObjectHandler {
  881. protected final Object object;
  882. protected final Class<?> clazz;
  883. private final String tableName;
  884. private final DynamoDBMapperConfig saveConfig;
  885. private final Map<String, AttributeValue> key;
  886. private final Map<String, AttributeValueUpdate> updateValues;
  887. /**
  888. * Any expected value conditions specified by the implementation of
  889. * DynamoDBMapper, e.g. value assertions on versioned attributes.
  890. */
  891. private final Map<String, ExpectedAttributeValue> internalExpectedValueAssertions;
  892. /**
  893. * Additional expected value conditions specified by the user.
  894. */
  895. protected final Map<String, ExpectedAttributeValue> userProvidedExpectedValueConditions;
  896. /**
  897. * Condition operator on the additional expected value conditions specified by the user.
  898. */
  899. protected final String userProvidedConditionOperator;
  900. private final List<ValueUpdate> inMemoryUpdates;
  901. /**
  902. * Constructs a handler for saving the specified model object.
  903. *
  904. * @param object The model object to be saved.
  905. * @param clazz The domain class of the object.
  906. * @param tableName The table name.
  907. * @param saveConifg The mapper configuration used for this save.
  908. * @param saveExpression The save expression, including the user-provided conditions and an optional logic operator.
  909. */
  910. public SaveObjectHandler(Class<?> clazz, Object object, String tableName, DynamoDBMapperConfig saveConfig, DynamoDBSaveExpression saveExpression) {
  911. this.clazz = clazz;
  912. this.object = object;
  913. this.tableName = tableName;
  914. this.saveConfig = saveConfig;
  915. if (saveExpression != null) {
  916. userProvidedExpectedValueConditions = saveExpression.getExpected();
  917. userProvidedConditionOperator = saveExpression.getConditionalOperator();
  918. } else {
  919. userProvidedExpectedValueConditions = null;
  920. userProvidedConditionOperator = null;
  921. }
  922. updateValues = new HashMap<String, AttributeValueUpdate>();
  923. internalExpectedValueAssertions = new HashMap<String, ExpectedAttributeValue>();
  924. inMemoryUpdates = new LinkedList<ValueUpdate>();
  925. key = new HashMap<String, AttributeValue>();
  926. }
  927. /**
  928. * The general workflow of a save operation.
  929. */
  930. public void execute() {
  931. Collection<Method> keyGetters = reflector.getPrimaryKeyGetters(clazz);
  932. /*
  933. * First handle keys
  934. */
  935. for ( Method method : keyGetters ) {
  936. Object getterResult = safeInvoke(method, object);
  937. String attributeName = reflector.getAttributeName(method);
  938. if ( getterResult == null && reflector.isAssignableKey(method) ) {
  939. onAutoGenerateAssignableKey(method, attributeName);
  940. }
  941. else {
  942. AttributeValue newAttributeValue = getSimpleAttributeValue(method, getterResult);
  943. if ( newAttributeValue == null ) {
  944. throw new DynamoDBMappingException("Null or empty value for key: " + method);
  945. }
  946. onKeyAttributeValue(attributeName, newAttributeValue);
  947. }
  948. }
  949. /*
  950. * Next construct an update for every non-key property
  951. */
  952. for ( Method method : reflector.getRelevantGetters(clazz) ) {
  953. // Skip any key methods, since they are handled separately
  954. if ( keyGetters.contains(method) )
  955. continue;
  956. Object getterResult = safeInvoke(method, object);
  957. String attributeName = reflector.getAttributeName(method);
  958. /*
  959. * If this is a versioned field, update it
  960. */
  961. if ( reflector.isVersionAttributeGetter(method) ) {
  962. onVersionAttribute(method, getterResult, attributeName);
  963. }
  964. /*
  965. * Otherwise apply the update value for this attribute.
  966. */
  967. else {
  968. AttributeValue currentValue = getSimpleAttributeValue(method, getterResult);
  969. if ( currentValue != null ) {
  970. onNonKeyAttribute(attributeName, currentValue);
  971. } else {
  972. onNullNonKeyAttribute(attributeName);
  973. }
  974. }
  975. }
  976. /*
  977. * Execute the implementation of the low level request.
  978. */
  979. executeLowLevelRequest();
  980. /*
  981. * Finally, after the service call has succeeded, update the
  982. * in-memory object with new field values as appropriate. This
  983. * currently takes into account of auto-generated keys and versioned
  984. * attributes.
  985. */
  986. for ( ValueUpdate update : inMemoryUpdates ) {
  987. update.apply();
  988. }
  989. }
  990. /**
  991. * Implement this method to do the necessary operations when a key
  992. * attribute is set with some value.
  993. *
  994. * @param attributeName
  995. * The name of the key attribute.
  996. * @param keyAttributeValue
  997. * The AttributeValue of the key attribute as specified in
  998. * the object.
  999. */
  1000. protected abstract void onKeyAttributeValue(String attributeName, AttributeValue keyAttributeValue);
  1001. /**
  1002. * Implement this method for necessary operations when a non-key
  1003. * attribute is set a non-null value in the object.
  1004. * The default implementation simply adds a "PUT" update for the given attribute.
  1005. *
  1006. * @param attributeName
  1007. * The name of the non-key attribute.
  1008. * @param currentValue
  1009. * The updated value of the given attribute.
  1010. */
  1011. protected void onNonKeyAttribute(String attributeName, AttributeValue currentValue) {
  1012. updateValues.put(attributeName, new AttributeValueUpdate()
  1013. .withValue(currentValue).withAction("PUT"));
  1014. }
  1015. /**
  1016. * Implement this method for necessary operations when a non-key
  1017. * attribute is set null in the object.
  1018. *
  1019. * @param attributeName
  1020. * The name of the non-key attribute.
  1021. */
  1022. protected abstract void onNullNonKeyAttribute(String attributeName);
  1023. /**
  1024. * Implement this method to send the low-level request that is necessary
  1025. * to complete the save operation.
  1026. */
  1027. protected abstract void executeLowLevelRequest();
  1028. /** Get the SaveBehavior used locally for this save operation. **/
  1029. protected SaveBehavior getLocalSaveBehavior() {
  1030. return saveConfig.getSaveBehavior();
  1031. }
  1032. /** Get the table name **/
  1033. protected String getTableName() {
  1034. return tableName;
  1035. }
  1036. /** Get the map of all the specified key of the saved object. **/
  1037. protected Map<String, AttributeValue> getKeyAttributeValues() {
  1038. return key;
  1039. }
  1040. /** Get the map of AttributeValueUpdate on each modeled attribute. **/
  1041. protected Map<String, AttributeValueUpdate> getAttributeValueUpdates() {
  1042. return updateValues;
  1043. }
  1044. /**
  1045. * Merge and return all the expected value conditions (either
  1046. * user-specified or imposed by the internal implementation of
  1047. * DynamoDBMapper) for this save operation.
  1048. */
  1049. protected Map<String, ExpectedAttributeValue> mergeExpectedAttributeValueConditions() {
  1050. return DynamoDBMapper.mergeExpectedAttributeValueConditions(
  1051. internalExpectedValueAssertions,
  1052. userProvidedExpectedValueConditions,
  1053. userProvidedConditionOperator);
  1054. }
  1055. /** Get the list of all the necessary in-memory update on the object. **/
  1056. protected List<ValueUpdate> getInMemoryUpdates() {
  1057. return inMemoryUpdates;
  1058. }
  1059. /**
  1060. * Save the item using a UpdateItem request. The handler will call this
  1061. * method if
  1062. * <ul>
  1063. * <li>CLOBBER configuration is not being used;
  1064. * <li>AND the item does not contain auto-generated key value;
  1065. * </ul>
  1066. * <p>
  1067. * The ReturnedValues parameter for the UpdateItem request is set as
  1068. * ALL_NEW, which means the service should return all of the attributes
  1069. * of the new version of the item after the update. The handler will use
  1070. * the returned attributes to detect silent failure on the server-side.
  1071. */
  1072. protected UpdateItemResult doUpdateItem() {
  1073. UpdateItemRequest req = new UpdateItemRequest()
  1074. .withTableName(getTableName())
  1075. .withKey(getKeyAttributeValues())
  1076. .withAttributeUpdates(
  1077. transformAttributeUpdates(
  1078. this.clazz,
  1079. getKeyAttributeValues(),
  1080. getAttributeValueUpdates(),
  1081. saveConfig))
  1082. .withExpected(mergeExpectedAttributeValueConditions())
  1083. .withConditionalOperator(userProvidedConditionOperator)
  1084. .withReturnValues(ReturnValue.ALL_NEW)
  1085. .withRequestMetricCollector(saveConfig.getRequestMetricCollector());
  1086. return db.updateItem(applyUserAgent(req));
  1087. }
  1088. /**
  1089. * Save the item using a PutItem request. The handler will call this
  1090. * method if
  1091. * <ul>
  1092. * <li> CLOBBER configuration is being used;
  1093. * <li> OR the item contains auto-generated key value;
  1094. * <li> OR an UpdateItem request has silently failed (200 response with
  1095. * no affected attribute), which indicates the key-only-put scenario
  1096. * that we used to handle by the keyOnlyPut(...) hack.
  1097. * </ul>
  1098. */
  1099. protected PutItemResult doPutItem() {
  1100. Map<String, AttributeValue> attributeValues = convertToItem(getAttributeValueUpdates());
  1101. attributeValues = transformAttributes(
  1102. toParameters(attributeValues,
  1103. this.clazz,
  1104. saveConfig));
  1105. PutItemRequest req = new PutItemRequest()
  1106. .withTableName(getTableName())
  1107. .withItem(attributeValues)
  1108. .withExpected(mergeExpectedAttributeValueConditions())
  1109. .withConditionalOperator(userProvidedConditionOperator)
  1110. .withRequestMetricCollector(saveConfig.getRequestMetricCollector());
  1111. return db.putItem(applyUserAgent(req));
  1112. }
  1113. private void onAutoGenerateAssignableKey(Method method, String attributeName) {
  1114. AttributeValue newVersionValue = getAutoGeneratedKeyAttributeValue(method, null);
  1115. updateValues.put(attributeName,
  1116. new AttributeValueUpdate().withAction("PUT").withValue(newVersionValue));
  1117. inMemoryUpdates.add(new ValueUpdate(method, newVersionValue, object));
  1118. if ( getLocalSaveBehavior() != SaveBehavior.CLOBBER
  1119. && !internalExpectedValueAssertions.containsKey(attributeName)) {
  1120. // Add an expect clause to make sure that the item
  1121. // doesn't already exist, since it's supposed to be new
  1122. ExpectedAttributeValue expected = new ExpectedAttributeValue();
  1123. expected.setExists(false);
  1124. internalExpectedValueAssertions.put(attributeName, expected);
  1125. }
  1126. }
  1127. private void onVersionAttribute(Method method, Object getterResult,
  1128. String attributeName) {
  1129. if ( getLocalSaveBehavior() != SaveBehavior.CLOBBER
  1130. && !internalExpectedValueAssertions.containsKey(attributeName)) {
  1131. // First establish the expected (current) value for the
  1132. // update call
  1133. ExpectedAttributeValue expected = new ExpectedAttributeValue();
  1134. // For new objects, insist that the value doesn't exist.
  1135. // For existing ones, insist it has the old value.
  1136. AttributeValue currentValue = getSimpleAttributeValue(method, getterResult);
  1137. expected.setExists(currentValue != null);
  1138. if ( currentValue != null ) {
  1139. expected.setValue(currentValue);
  1140. }
  1141. internalExpectedValueAssertions.put(attributeName, expected);
  1142. }
  1143. AttributeValue newVersionValue = getVersionAttributeValue(method, getterResult);
  1144. updateValues
  1145. .put(attributeName, new AttributeValueUpdate().withAction("PUT").withValue(newVersionValue));
  1146. inMemoryUpdates.add(new ValueUpdate(method, newVersionValue, object));
  1147. }
  1148. }
  1149. /**
  1150. * Deletes the given object from its DynamoDB table using the default configuration.
  1151. */
  1152. public void delete(Object object) {
  1153. delete(object, null, this.config);
  1154. }
  1155. /**
  1156. * Deletes the given object from its DynamoDB table using the specified deleteExpression and default configuration.
  1157. */
  1158. public void delete(Object object, DynamoDBDeleteExpression deleteExpression) {
  1159. delete(object, deleteExpression, this.config);
  1160. }
  1161. /**
  1162. * Deletes the given object from its DynamoDB table using the specified configuration.
  1163. */
  1164. public void delete(Object object, DynamoDBMapperConfig config) {
  1165. delete(object, null, config);
  1166. }
  1167. /**
  1168. * Deletes the given object from its DynamoDB table using the provided deleteExpression and provided configuration.
  1169. * Any options specified in the deleteExpression parameter will be overlaid on any constraints due to
  1170. * versioned attributes.
  1171. * @param deleteExpression
  1172. * The options to apply to this delete request
  1173. * @param config
  1174. * Config override object. If {@link SaveBehavior#CLOBBER} is
  1175. * supplied, version fields will not be considered when deleting
  1176. * the object.
  1177. */
  1178. public <T> void delete(T object, DynamoDBDeleteExpression deleteExpression, DynamoDBMapperConfig config) {
  1179. config = mergeConfig(config);
  1180. @SuppressWarnings("unchecked")
  1181. Class<T> clazz = (Class<T>) object.getClass();
  1182. String tableName = getTableName(clazz, config);
  1183. Map<String, AttributeValue> key = getKey(object, clazz);
  1184. /*
  1185. * If there is a version field, make sure we assert its value. If the
  1186. * version field is null (only should happen in unusual circumstances),
  1187. * pretend it doesn't have a version field after all.
  1188. */
  1189. Map<String, ExpectedAttributeValue> internalAssertions = new HashMap<String, ExpectedAttributeValue>();
  1190. if ( config.getSaveBehavior() != SaveBehavior.CLOBBER ) {
  1191. for ( Method method : reflector.getRelevantGetters(clazz) ) {
  1192. if ( reflector.isVersionAttributeGetter(method) ) {
  1193. Object getterResult = safeInvoke(method, object);
  1194. String attributeName = reflector.getAttributeName(method);
  1195. ExpectedAttributeValue expected = new ExpectedAttributeValue();
  1196. AttributeValue currentValue = getSimpleAttributeValue(method, getterResult);
  1197. expected.setExists(currentValue != null);
  1198. if ( currentValue != null )
  1199. expected.setValue(currentValue);
  1200. internalAssertions.put(attributeName, expected);
  1201. break;
  1202. }
  1203. }
  1204. }
  1205. // Overlay any user provided expected values onto the generated ones
  1206. Map<String, ExpectedAttributeValue> expectedValues = internalAssertions;
  1207. String conditionOperator = null;
  1208. if( deleteExpression != null ) {
  1209. expectedValues = mergeExpectedAttributeValueConditions(
  1210. internalAssertions, deleteExpression.getExpected(),
  1211. deleteExpression.getConditionalOperator());
  1212. conditionOperator = deleteExpression.getConditionalOperator();
  1213. }
  1214. DeleteItemRequest req = applyUserAgent(new DeleteItemRequest()
  1215. .withKey(key).withTableName(tableName)
  1216. .withExpected(expectedValues))
  1217. .withConditionalOperator(conditionOperator)
  1218. .withRequestMetricCollector(config.getRequestMetricCollector())
  1219. ;
  1220. db.deleteItem(req);
  1221. }
  1222. /**
  1223. * Deletes the objects given using one or more calls to the
  1224. * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)} API. <b>No
  1225. * version checks are performed</b>, as required by the API.
  1226. *
  1227. * @see DynamoDBMapper#batchWrite(List, List, DynamoDBMapperConfig)
  1228. */
  1229. public List<FailedBatch> batchDelete(List<? extends Object> objectsToDelete) {
  1230. return batchWrite(Collections.emptyList(), objectsToDelete, this.config);
  1231. }
  1232. /**
  1233. * Deletes the objects given using one or more calls to the
  1234. * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)} API. <b>No
  1235. * version checks are performed</b>, as required by the API.
  1236. *
  1237. * @see DynamoDBMapper#batchWrite(List, List, DynamoDBMapperConfig)
  1238. */
  1239. public List<FailedBatch> batchDelete(Object... objectsToDelete) {
  1240. return batchWrite(Collections.emptyList(), Arrays.asList(objectsToDelete), this.config);
  1241. }
  1242. /**
  1243. * Saves the objects given using one or more calls to the
  1244. * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)} API. <b>No
  1245. * version checks are performed</b>, as required by the API.
  1246. * <p/>
  1247. * <b>This method ignores any SaveBehavior set on the mapper</b>, and
  1248. * always behaves as if SaveBehavior.CLOBBER was specified, as
  1249. * the AmazonDynamoDB.batchWriteItem() request does not support updating
  1250. * existing items.
  1251. *
  1252. * @see DynamoDBMapper#batchWrite(List, List, DynamoDBMapperConfig)
  1253. */
  1254. public List<FailedBatch> batchSave(List<? extends Object> objectsToSave) {
  1255. return batchWrite(objectsToSave, Collections.emptyList(), this.config);
  1256. }
  1257. /**
  1258. * Saves the objects given using one or more calls to the
  1259. * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)} API. <b>No
  1260. * version checks are performed</b>, as required by the API.
  1261. * <p/>
  1262. * <b>This method ignores any SaveBehavior set on the mapper</b>, and
  1263. * always behaves as if SaveBehavior.CLOBBER was specified, as
  1264. * the AmazonDynamoDB.batchWriteItem() request does not support updating
  1265. * existing items.
  1266. *
  1267. * @see DynamoDBMapper#batchWrite(List, List, DynamoDBMapperConfig)
  1268. */
  1269. public List<FailedBatch> batchSave(Object... objectsToSave) {
  1270. return batchWrite(Arrays.asList(objectsToSave), Collections.emptyList(), this.config);
  1271. }
  1272. /**
  1273. * Saves and deletes the objects given using one or more calls to the
  1274. * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)} API. <b>No
  1275. * version checks are performed</b>, as required by the API.
  1276. * <p/>
  1277. * <b>This method ignores any SaveBehavior set on the mapper</b>, and
  1278. * always behaves as if SaveBehavior.CLOBBER was specified, as
  1279. * the AmazonDynamoDB.batchWriteItem() request does not support updating
  1280. * existing items.
  1281. *
  1282. * @see DynamoDBMapper#batchWrite(List, List, DynamoDBMapperConfig)
  1283. */
  1284. public List<FailedBatch> batchWrite(List<? extends Object> objectsToWrite, List<? extends Object> objectsToDelete) {
  1285. return batchWrite(objectsToWrite, objectsToDelete, this.config);
  1286. }
  1287. /**
  1288. * Saves and deletes the objects given using one or more calls to the
  1289. * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)} API.
  1290. *
  1291. * @param objectsToWrite
  1292. * A list of objects to save to DynamoDB. <b>No version checks
  1293. * are performed</b>, as required by the
  1294. * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)}
  1295. * API.
  1296. * @param objectsToDelete
  1297. * A list of objects to delete from DynamoDB. <b>No version
  1298. * checks are performed</b>, as required by the
  1299. * {@link AmazonDynamoDB#batchWriteItem(BatchWriteItemRequest)}
  1300. * API.
  1301. * @param config
  1302. * Only {@link DynamoDBMapperConfig#getTableNameOverride()} is
  1303. * considered; if specified, all objects in the two parameter
  1304. * lists will be considered to belong to the given table
  1305. * override. In particular, this method <b>always acts as
  1306. * if SaveBehavior.CLOBBER was specified</b> regardless of the
  1307. * value of the config parameter.
  1308. * @return A list of failed batches which includes the unprocessed items and
  1309. * the exceptions causing the failure.
  1310. */
  1311. public List<FailedBatch> batchWrite(List<? extends Object> objectsToWrite, List<? extends Object> objectsToDelete, DynamoDBMapperConfig config) {
  1312. config = mergeConfig(config);
  1313. List<FailedBatch> totalFailedBatches = new LinkedList<FailedBatch>();
  1314. HashMap<String, List<WriteRequest>> requestItems = new HashMap<String, List<WriteRequest>>();
  1315. List<ValueUpdate> inMemoryUpdates = new LinkedList<ValueUpdate>();
  1316. for ( Object toWrite : objectsToWrite ) {
  1317. Class<?> clazz = toWrite.getClass();
  1318. String tableName = getTableName(clazz, config);
  1319. Map<String, AttributeValue> attributeValues = new HashMap<String, AttributeValue>();
  1320. // Look at every getter and construct a value object for it
  1321. for ( Method method : reflector.getRelevantGetters(clazz) ) {
  1322. Object getterResult = safeInvoke(method, toWrite);
  1323. String attributeName = reflector.getAttributeName(method);
  1324. AttributeValue currentValue = null;
  1325. if ( getterResult == null && reflector.isAssignableKey(method) ) {
  1326. currentValue = getAutoGeneratedKeyAttributeValue(method, getterResult);
  1327. inMemoryUpdates.add(new ValueUpdate(method, currentValue, toWrite));
  1328. } else {
  1329. currentValue = getSimpleAttributeValue(method, getterResult);
  1330. }
  1331. if ( currentValue != null ) {
  1332. attributeValues.put(attributeName, currentValue);
  1333. }
  1334. }
  1335. if ( !requestItems.containsKey(tableName) ) {
  1336. requestItems.put(tableName, new LinkedList<WriteRequest>());
  1337. }
  1338. AttributeTransformer.Parameters<?> parameters =
  1339. toParameters(attributeValues, clazz, config);
  1340. requestItems.get(tableName).add(
  1341. new WriteRequest().withPutRequest(
  1342. new PutRequest().withItem(
  1343. transformAttributes(parameters))));
  1344. }
  1345. for ( Object toDelete : objectsToDelete ) {
  1346. Class<?> clazz = toDelete.getClass();
  1347. String tableName = getTableName(clazz, config);
  1348. Map<String, AttributeValue> key = getKey(toDelete);
  1349. if ( !requestItems.containsKey(tableName) ) {
  1350. requestItems.put(tableName, new LinkedList<WriteRequest>());
  1351. }
  1352. requestItems.get(tableName).add(
  1353. new WriteRequest().withDeleteRequest(new DeleteRequest().withKey(key)));
  1354. }
  1355. // Break into chunks of 25 items and make service requests to DynamoDB
  1356. while ( !requestItems.isEmpty() ) {
  1357. HashMap<String, List<WriteRequest>> batch = new HashMap<String, List<WriteRequest>>();
  1358. int i = 0;
  1359. Iterator<Entry<String, List<WriteRequest>>> tableIter = requestItems.entrySet().iterator();
  1360. while ( tableIter.hasNext() && i < 25 ) {
  1361. Entry<String, List<WriteRequest>> tableRequest = tableIter.next();
  1362. batch.put(tableRequest.getKey(), new LinkedList<WriteRequest>());
  1363. Iterator<WriteRequest> writeRequestIter = tableRequest.getValue().iterator();
  1364. while ( writeRequestIter.hasNext() && i++ < 25 ) {
  1365. WriteRequest writeRequest = writeRequestIter.next();
  1366. batch.get(tableRequest.getKey()).add(writeRequest);
  1367. writeRequestIter.remove();
  1368. }
  1369. // If we've processed all the write requests for this table,
  1370. // remove it from the parent iterator.
  1371. if ( !writeRequestIter.hasNext() ) {
  1372. tableIter.remove();
  1373. }
  1374. }
  1375. List<FailedBatch> failedBatches = writeOneBatch(batch);
  1376. if (failedBatches != null) {
  1377. totalFailedBatches.addAll(failedBatches);
  1378. // If contains throttling exception, we do a backoff
  1379. if (containsThrottlingException(failedBatches)) {
  1380. try {
  1381. Thread.sleep(1000 * 2);
  1382. } catch (InterruptedException e) {
  1383. Thread.currentThread().interrupt();
  1384. throw new AmazonClientException(e.getMessage(), e);
  1385. }
  1386. }
  1387. }
  1388. }
  1389. // Once the entire batch is processed, update assigned keys in memory
  1390. for ( ValueUpdate update : inMemoryUpdates ) {
  1391. update.apply();
  1392. }
  1393. return totalFailedBatches;
  1394. }
  1395. /**
  1396. * Process one batch of requests(max 25). It will divide the batch if
  1397. * receives request too large exception(the total size of the request is beyond 1M).
  1398. */
  1399. private List<FailedBatch> writeOneBatch(Map<String, List<WriteRequest>> batch) {
  1400. List<FailedBatch> failedBatches = new LinkedList<FailedBatch>();
  1401. Map<String, List<WriteRequest>> firstHalfBatch = new HashMap<String, List<WriteRequest>>();
  1402. Map<String, List<WriteRequest>> secondHalfBatch = new HashMap<String, List<WriteRequest>>();
  1403. FailedBatch failedBatch = callUntilCompletion(batch);
  1404. if (failedBatch != null) {
  1405. // If the exception is request entity too large, we divide the batch
  1406. // into smaller parts.
  1407. if (failedBatch.getException() instanceof AmazonServiceException
  1408. && RetryUtils.isRequestEntityTooLargeException((AmazonServiceException) failedBatch.getException())) {
  1409. // If only one item left, the item size must beyond 64k, which
  1410. // exceedes the limit.
  1411. if (computeFailedBatchSize(failedBatch) == 1) {
  1412. failedBatches.add(failedBatch);
  1413. } else {
  1414. divideBatch(batch, firstHalfBatch, secondHalfBatch);
  1415. failedBatches.addAll(writeOneBatch(firstHalfBatch));
  1416. failedBatches.addAll(writeOneBatch(secondHalfBatch));
  1417. }
  1418. } else {
  1419. failedBatches.add(failedBatch);
  1420. }
  1421. }
  1422. return failedBatches;
  1423. }
  1424. /**
  1425. * Check whether there are throttling exception in the failed batches.
  1426. */
  1427. private boolean containsThrottlingException (List<FailedBatch> failedBatches) {
  1428. for (FailedBatch failedBatch : failedBatches) {
  1429. Exception e = failedBatch.getException();
  1430. if (e instanceof AmazonServiceException
  1431. && RetryUtils.isThrottlingException((AmazonServiceException) e)) {
  1432. return true;
  1433. }
  1434. }
  1435. return false;
  1436. }
  1437. /**
  1438. * Divide the batch of objects to save into two smaller batches. Each contains half of the elements.
  1439. */
  1440. private void divideBatch(Map<String, List<WriteRequest>> batch, Map<String, List<WriteRequest>> firstHalfBatch, Map<String, List<WriteRequest>> secondHalfBatch) {
  1441. for (String key : batch.keySet()) {
  1442. List<WriteRequest> requests = batch.get(key);
  1443. List<WriteRequest> firstHalfRequests = requests.subList(0, requests.size() / 2);
  1444. List<WriteRequest> secondHalfRequests = requests.subList(requests.size() / 2, requests.size());
  1445. firstHalfBatch.put(key, firstHalfRequests);
  1446. secondHalfBatch.put(key, secondHalfRequests);
  1447. }
  1448. }
  1449. /**
  1450. * Count the total number of unprocessed items in the failed batch.
  1451. */
  1452. private int computeFailedBatchSize(FailedBatch failedBatch) {
  1453. int count = 0;
  1454. for (String tableName : failedBatch.getUnprocessedItems().keySet()) {
  1455. count += failedBatch.getUnprocessedItems().get(tableName).size();
  1456. }
  1457. return count;
  1458. }
  1459. /**
  1460. * Continue trying to process the batch until it finishes or an exception
  1461. * occurs.
  1462. */
  1463. private FailedBatch callUntilCompletion(Map<String, List<WriteRequest>> batch) {
  1464. BatchWriteItemResult result = null;
  1465. int retries = 0;
  1466. FailedBatch failedBatch = null;
  1467. while (true) {
  1468. try {
  1469. result = db.batchWriteItem(new BatchWriteItemRequest().withRequestItems(batch));
  1470. } catch (Exception e) {
  1471. failedBatch = new FailedBatch();
  1472. failedBatch.setUnprocessedItems(batch);
  1473. failedBatch.setException(e);
  1474. return failedBatch;
  1475. }
  1476. retries++;
  1477. batch = result.getUnprocessedItems();
  1478. if (batch.size() > 0) {
  1479. pauseExponentially(retries);
  1480. } else {
  1481. break;
  1482. }
  1483. }
  1484. return failedBatch;
  1485. }
  1486. /**
  1487. * Retrieves multiple items from multiple tables using their primary keys.
  1488. *
  1489. * @see DynamoDBMapper#batchLoad(List, DynamoDBMapperConfig)
  1490. *
  1491. * @return A map of the loaded objects. Each key in the map is the name of a
  1492. * DynamoDB table. Each value in the map is a list of objects that
  1493. * have been loaded from that table. All objects for each table can
  1494. * be cast to the associated user defined type that is annotated as
  1495. * mapping that table.
  1496. */
  1497. public Map<String, List<Object>> batchLoad(List<Object> itemsToGet) {
  1498. return batchLoad(itemsToGet, this.config);
  1499. }
  1500. /**
  1501. * Retrieves multiple items from multiple tables using their primary keys.
  1502. *
  1503. * @param itemsToGet
  1504. * Key objects, corresponding to the class to fetch, with their
  1505. * primary key values set.
  1506. * @param config
  1507. * Only {@link DynamoDBMapperConfig#getTableNameOverride()} and
  1508. * {@link DynamoDBMapperConfig#getConsistentReads()} are
  1509. * considered.
  1510. *
  1511. * @return A map of the loaded objects. Each key in the map is the name of a
  1512. * DynamoDB table. Each value in the map is a list of objects that
  1513. * have been loaded from that table. All objects for each table can
  1514. * be cast to the associated user defined type that is annotated as
  1515. * mapping that table.
  1516. */
  1517. public Map<String, List<Object>> batchLoad(List<Object> itemsToGet, DynamoDBMapperConfig config) {
  1518. config = mergeConfig(config);
  1519. boolean consistentReads = (config.getConsistentReads() == ConsistentReads.CONSISTENT);
  1520. if ( itemsToGet == null || itemsToGet.isEmpty() ) {
  1521. return new HashMap<String, List<Object>>();
  1522. }
  1523. Map<String, KeysAndAttributes> requestItems = new HashMap<String, KeysAndAttributes>();
  1524. Map<String, Class<?>> classesByTableName = new HashMap<String, Class<?>>();
  1525. Map<String, List<Object>> resultSet = new HashMap<String, List<Object>>();
  1526. int count = 0;
  1527. for ( Object keyObject : itemsToGet ) {
  1528. Class<?> clazz = keyObject.getClass();
  1529. String tableName = getTableName(clazz, config);
  1530. classesByTableName.put(tableName, clazz);
  1531. if ( !requestItems.containsKey(tableName) ) {
  1532. requestItems.put(
  1533. tableName,
  1534. new KeysAndAttributes().withConsistentRead(consistentReads).withKeys(
  1535. new LinkedList<Map<String, AttributeValue>>()));
  1536. }
  1537. requestItems.get(tableName).getKeys().add(getKey(keyObject));
  1538. // Reach the maximum number which can be handled in a single batchGet
  1539. if ( ++count == 100 ) {
  1540. processBatchGetRequest(classesByTableName, requestItems, resultSet, config);
  1541. requestItems.clear();
  1542. count = 0;
  1543. }
  1544. }
  1545. if ( count > 0 ) {
  1546. processBatchGetRequest(classesByTableName, requestItems, resultSet, config);
  1547. }
  1548. return resultSet;
  1549. }
  1550. /**
  1551. * Retrieves the attributes for multiple items from multiple tables using
  1552. * their primary keys.
  1553. * {@link AmazonDynamoDB#batchGetItem(BatchGetItemRequest)} API.
  1554. *
  1555. * @return A map of the loaded objects. Each key in the map is the name of a
  1556. * DynamoDB table. Each value in the map is a list of objects that
  1557. * have been loaded from that table. All objects for each table can
  1558. * be cast to the associated user defined type that is annotated as
  1559. * mapping that table.
  1560. *
  1561. * @see #batchLoad(List, DynamoDBMapperConfig)
  1562. * @see #batchLoad(Map, DynamoDBMapperConfig)
  1563. */
  1564. public Map<String, List<Object>> batchLoad(Map<Class<?>, List<KeyPair>> itemsToGet) {
  1565. return batchLoad(itemsToGet, this.config);
  1566. }
  1567. /**
  1568. * Retrieves multiple items from multiple tables using their primary keys.
  1569. * Valid only for tables with a single hash key, or a single hash and range
  1570. * key. For other schemas, use
  1571. * {@link DynamoDBMapper#batchLoad(List, DynamoDBMapperConfig)}
  1572. *
  1573. * @param itemsToGet
  1574. * Map from class to load to list of primary key attributes.
  1575. * @param config
  1576. * Only {@link DynamoDBMapperConfig#getTableNameOverride()} and
  1577. * {@link DynamoDBMapperConfig#getConsistentReads()} are
  1578. * considered.
  1579. *
  1580. * @return A map of the loaded objects. Each key in the map is the name of a
  1581. * DynamoDB table. Each value in the map is a list of objects that
  1582. * have been loaded from that table. All objects for each table can
  1583. * be cast to the associated user defined type that is annotated as
  1584. * mapping that table.
  1585. */
  1586. public Map<String, List<Object>> batchLoad(Map<Class<?>, List<KeyPair>> itemsToGet, DynamoDBMapperConfig config) {
  1587. List<Object> keys = new ArrayList<Object>();
  1588. if ( itemsToGet != null ) {
  1589. for ( Class<?> clazz : itemsToGet.keySet() ) {
  1590. if ( itemsToGet.get(clazz) != null ) {
  1591. for ( KeyPair keyPair : itemsToGet.get(clazz) ) {
  1592. keys.add(createKeyObject(clazz, keyPair.getHashKey(), keyPair.getRangeKey()));
  1593. }
  1594. }
  1595. }
  1596. }
  1597. return batchLoad(keys, config);
  1598. }
  1599. /**
  1600. * @param config never null
  1601. */
  1602. private void processBatchGetRequest(
  1603. final Map<String, Class<?>> classesByTableName,
  1604. final Map<String, KeysAndAttributes> requestItems,
  1605. final Map<String, List<Object>> resultSet,
  1606. final DynamoDBMapperConfig config) {
  1607. BatchGetItemResult batchGetItemResult = null;
  1608. BatchGetItemRequest batchGetItemRequest = new BatchGetItemRequest()
  1609. .withRequestMetricCollector(config.getRequestMetricCollector());
  1610. batchGetItemRequest.setRequestItems(requestItems);
  1611. int retries = 0;
  1612. int noOfItemsInOriginalRequest = requestItems.size();
  1613. do {
  1614. if ( batchGetItemResult != null ) {
  1615. retries++;
  1616. if (noOfItemsInOriginalRequest == batchGetItemResult
  1617. .getUnprocessedKeys().size()){
  1618. pauseExponentially(retries);
  1619. if (retries > BATCH_GET_MAX_RETRY_COUNT_ALL_KEYS) {
  1620. throw new AmazonClientException(
  1621. "Batch Get Item request to server hasn't received any data. Please try again later.");
  1622. }
  1623. }
  1624. batchGetItemRequest.setRequestItems(batchGetItemResult.getUnprocessedKeys());
  1625. }
  1626. batchGetItemResult = db.batchGetItem(batchGetItemRequest);
  1627. Map<String, List<Map<String, AttributeValue>>> responses = batchGetItemResult.getResponses();
  1628. for ( String tableName : responses.keySet() ) {
  1629. List<Object> objects = null;
  1630. if ( resultSet.get(tableName) != null ) {
  1631. objects = resultSet.get(tableName);
  1632. } else {
  1633. objects = new LinkedList<Object>();
  1634. }
  1635. Class<?> clazz = classesByTableName.get(tableName);
  1636. for ( Map<String, AttributeValue> item : responses.get(tableName) ) {
  1637. AttributeTransformer.Parameters<?> parameters =
  1638. toParameters(item, clazz, config);
  1639. objects.add(marshalIntoObject(parameters));
  1640. }
  1641. resultSet.put(tableName, objects);
  1642. }
  1643. // To see whether there are unprocessed keys.
  1644. } while ( batchGetItemResult.getUnprocessedKeys() != null && batchGetItemResult.getUnprocessedKeys().size() > 0 );
  1645. }
  1646. /**
  1647. * Swallows the checked exceptions around Method.invoke and repackages them
  1648. * as {@link DynamoDBMappingException}
  1649. */
  1650. private Object safeInvoke(Method method, Object object, Object... arguments) {
  1651. try {
  1652. return method.invoke(object, arguments);
  1653. } catch ( IllegalAccessException e ) {
  1654. throw new DynamoDBMappingException("Couldn't invoke " + method, e);
  1655. } catch ( IllegalArgumentException e ) {
  1656. throw new DynamoDBMappingException("Couldn't invoke " + method, e);
  1657. } catch ( InvocationTargetException e ) {
  1658. throw new DynamoDBMappingException("Couldn't invoke " + method, e);
  1659. }
  1660. }
  1661. private final class ValueUpdate {
  1662. private Method method;
  1663. private AttributeValue newValue;
  1664. private Object target;
  1665. public ValueUpdate(Method method, AttributeValue newValue, Object target) {
  1666. this.method = method;
  1667. this.newValue = newValue;
  1668. this.target = target;
  1669. }
  1670. public void apply() {
  1671. setValue(target, method, newValue);
  1672. }
  1673. }
  1674. /**
  1675. * Converts the {@link AttributeValueUpdate} map given to an equivalent
  1676. * {@link AttributeValue} map.
  1677. */
  1678. private Map<String, AttributeValue> convertToItem(Map<String, AttributeValueUpdate> putValues) {
  1679. Map<String, AttributeValue> map = new HashMap<String, AttributeValue>();
  1680. for ( Entry<String, AttributeValueUpdate> entry : putValues.entrySet() ) {
  1681. String attributeName = entry.getKey();
  1682. AttributeValue attributeValue = entry.getValue().getValue();
  1683. String attributeAction = entry.getValue().getAction();
  1684. /*
  1685. * AttributeValueUpdate allows nulls for its values, since they are
  1686. * semantically meaningful. AttributeValues never have null values.
  1687. */
  1688. if ( attributeValue != null
  1689. && !AttributeAction.DELETE.toString().equals(attributeAction)) {
  1690. map.put(attributeName, attributeValue);
  1691. }
  1692. }
  1693. return map;
  1694. }
  1695. /**
  1696. * Gets the attribute value object corresponding to the
  1697. * {@link DynamoDBVersionAttribute} getter, and its result, given. Null
  1698. * values are assumed to be new objects and given the smallest possible
  1699. * positive value. Non-null values are incremented from their current value.
  1700. */
  1701. private AttributeValue getVersionAttributeValue(final Method getter, Object getterReturnResult) {
  1702. ArgumentMarshaller marshaller = reflector.getVersionedArgumentMarshaller(getter, getterReturnResult);
  1703. return marshaller.marshall(getterReturnResult);
  1704. }
  1705. /**
  1706. * Returns an attribute value corresponding to the key method and value given.
  1707. */
  1708. private AttributeValue getAutoGeneratedKeyAttributeValue(Method getter, Object getterResult) {
  1709. ArgumentMarshaller marshaller = reflector.getAutoGeneratedKeyArgumentMarshaller(getter);
  1710. return marshaller.marshall(getterResult);
  1711. }
  1712. /**
  1713. * Scans through an Amazon DynamoDB table and returns the matching results as
  1714. * an unmodifiable list of instantiated objects, using the default configuration.
  1715. *
  1716. * @see DynamoDBMapper#scan(Class, DynamoDBScanExpression, DynamoDBMapperConfig)
  1717. */
  1718. public <T> PaginatedScanList<T> scan(Class<T> clazz, DynamoDBScanExpression scanExpression) {
  1719. return scan(clazz, scanExpression, config);
  1720. }
  1721. /**
  1722. * Scans through an Amazon DynamoDB table and returns the matching results as
  1723. * an unmodifiable list of instantiated objects. The table to scan is
  1724. * determined by looking at the annotations on the specified class, which
  1725. * declares where to store the object data in Amazon DynamoDB, and the scan
  1726. * expression parameter allows the caller to filter results and control how
  1727. * the scan is executed.
  1728. * <p>
  1729. * Callers should be aware that the returned list is unmodifiable, and any
  1730. * attempts to modify the list will result in an
  1731. * UnsupportedOperationException.
  1732. * <p>
  1733. * You can specify the pagination loading strategy for this scan operation.
  1734. * By default, the list returned is lazily loaded when possible.
  1735. *
  1736. * @param <T>
  1737. * The type of the objects being returned.
  1738. * @param clazz
  1739. * The class annotated with DynamoDB annotations describing how
  1740. * to store the object data in Amazon DynamoDB.
  1741. * @param scanExpression
  1742. * Details on how to run the scan, including any filters to apply
  1743. * to limit results.
  1744. * @param config
  1745. * The configuration to use for this scan, which overrides the
  1746. * default provided at object construction.
  1747. * @return An unmodifiable list of the objects constructed from the results
  1748. * of the scan operation.
  1749. * @see PaginatedScanList
  1750. * @see PaginationLoadingStrategy
  1751. */
  1752. public <T> PaginatedScanList<T> scan(Class<T> clazz, DynamoDBScanExpression scanExpression, DynamoDBMapperConfig config) {
  1753. config = mergeConfig(config);
  1754. ScanRequest scanRequest = createScanRequestFromExpression(clazz, scanExpression, config);
  1755. ScanResult scanResult = db.scan(applyUserAgent(scanRequest));
  1756. return new PaginatedScanList<T>(this, clazz, db, scanRequest, scanResult, config.getPaginationLoadingStrategy(), config);
  1757. }
  1758. /**
  1759. * Scans through an Amazon DynamoDB table on logically partitioned segments
  1760. * in parallel and returns the matching results in one unmodifiable list of
  1761. * instantiated objects, using the default configuration.
  1762. *
  1763. * @see DynamoDBMapper#parallelScan(Class, DynamoDBScanExpression,int,
  1764. * DynamoDBMapperConfig)
  1765. */
  1766. public <T> PaginatedParallelScanList<T> parallelScan(Class<T> clazz, DynamoDBScanExpression scanExpression, int totalSegments) {
  1767. return parallelScan(clazz, scanExpression, totalSegments, config);
  1768. }
  1769. /**
  1770. * Scans through an Amazon DynamoDB table on logically partitioned segments
  1771. * in parallel. This method will create a thread pool of the specified size,
  1772. * and each thread will issue scan requests for its assigned segment,
  1773. * following the returned continuation token, until the end of its segment.
  1774. * Callers should be responsible for setting the appropriate number of total
  1775. * segments. More scan segments would result in better performance but more
  1776. * consumed capacity of the table. The results are returned in one
  1777. * unmodifiable list of instantiated objects. The table to scan is
  1778. * determined by looking at the annotations on the specified class, which
  1779. * declares where to store the object data in Amazon DynamoDB, and the scan
  1780. * expression parameter allows the caller to filter results and control how
  1781. * the scan is executed.
  1782. * <p>
  1783. * Callers should be aware that the returned list is unmodifiable, and any
  1784. * attempts to modify the list will result in an
  1785. * UnsupportedOperationException.
  1786. * <p>
  1787. * You can specify the pagination loading strategy for this parallel scan operation.
  1788. * By default, the list returned is lazily loaded when possible.
  1789. *
  1790. * @param <T>
  1791. * The type of the objects being returned.
  1792. * @param clazz
  1793. * The class annotated with DynamoDB annotations describing how
  1794. * to store the object data in Amazon DynamoDB.
  1795. * @param scanExpression
  1796. * Details on how to run the scan, including any filters to apply
  1797. * to limit results.
  1798. * @param totalSegments
  1799. * Number of total parallel scan segments.
  1800. * <b>Range: </b>1 - 4096
  1801. * @param config
  1802. * The configuration to use for this scan, which overrides the
  1803. * default provided at object construction.
  1804. * @return An unmodifiable list of the objects constructed from the results
  1805. * of the scan operation.
  1806. * @see PaginatedParallelScanList
  1807. * @see PaginationLoadingStrategy
  1808. */
  1809. public <T> PaginatedParallelScanList<T> parallelScan(Class<T> clazz, DynamoDBScanExpression scanExpression, int totalSegments, DynamoDBMapperConfig config) {
  1810. config = mergeConfig(config);
  1811. // Create hard copies of the original scan request with difference segment number.
  1812. List<ScanRequest> parallelScanRequests = createParallelScanRequestsFromExpression(clazz, scanExpression, totalSegments, config);
  1813. ParallelScanTask parallelScanTask = new ParallelScanTask(this, db, parallelScanRequests);
  1814. return new PaginatedParallelScanList<T>(this, clazz, db, parallelScanTask, config.getPaginationLoadingStrategy(), config);
  1815. }
  1816. /**
  1817. * Scans through an Amazon DynamoDB table and returns a single page of matching
  1818. * results. The table to scan is determined by looking at the annotations on
  1819. * the specified class, which declares where to store the object data in AWS
  1820. * DynamoDB, and the scan expression parameter allows the caller to filter
  1821. * results and control how the scan is executed.
  1822. *
  1823. * @param <T>
  1824. * The type of the objects being returned.
  1825. * @param clazz
  1826. * The class annotated with DynamoDB annotations describing how
  1827. * to store the object data in Amazon DynamoDB.
  1828. * @param scanExpression
  1829. * Details on how to run the scan, including any filters to apply
  1830. * to limit results.
  1831. * @param config
  1832. * The configuration to use for this scan, which overrides the
  1833. * default provided at object construction.
  1834. */
  1835. public <T> ScanResultPage<T> scanPage(Class<T> clazz, DynamoDBScanExpression scanExpression, DynamoDBMapperConfig config) {
  1836. config = mergeConfig(config);
  1837. ScanRequest scanRequest = createScanRequestFromExpression(clazz, scanExpression, config);
  1838. ScanResult scanResult = db.scan(applyUserAgent(scanRequest));
  1839. ScanResultPage<T> result = new ScanResultPage<T>();
  1840. List<AttributeTransformer.Parameters<T>> parameters =
  1841. toParameters(scanResult.getItems(), clazz, config);
  1842. result.setResults(marshalIntoObjects(parameters));
  1843. result.setLastEvaluatedKey(scanResult.getLastEvaluatedKey());
  1844. return result;
  1845. }
  1846. /**
  1847. * Scans through an Amazon DynamoDB table and returns a single page of matching
  1848. * results.
  1849. *
  1850. * @see DynamoDBMapper#scanPage(Class, DynamoDBScanExpression, DynamoDBMapperConfig)
  1851. */
  1852. public <T> ScanResultPage<T> scanPage(Class<T> clazz, DynamoDBScanExpression scanExpression) {
  1853. return scanPage(clazz, scanExpression, this.config);
  1854. }
  1855. /**
  1856. * Queries an Amazon DynamoDB table and returns the matching results as an
  1857. * unmodifiable list of instantiated objects, using the default
  1858. * configuration.
  1859. *
  1860. * @see DynamoDBMapper#query(Class, DynamoDBQueryExpression,
  1861. * DynamoDBMapperConfig)
  1862. */
  1863. public <T> PaginatedQueryList<T> query(Class<T> clazz, DynamoDBQueryExpression<T> queryExpression) {
  1864. return query(clazz, queryExpression, config);
  1865. }
  1866. /**
  1867. * Queries an Amazon DynamoDB table and returns the matching results as an
  1868. * unmodifiable list of instantiated objects. The table to query is
  1869. * determined by looking at the annotations on the specified class, which
  1870. * declares where to store the object data in Amazon DynamoDB, and the query
  1871. * expression parameter allows the caller to filter results and control how
  1872. * the query is executed.
  1873. * <p>
  1874. * When the query is on any local/global secondary index, callers should be aware that
  1875. * the returned object(s) will only contain item attributes that are projected
  1876. * into the index. All the other unprojected attributes will be saved as type
  1877. * default values.
  1878. * <p>
  1879. * Callers should also be aware that the returned list is unmodifiable, and any
  1880. * attempts to modify the list will result in an
  1881. * UnsupportedOperationException.
  1882. * <p>
  1883. * You can specify the pagination loading strategy for this query operation.
  1884. * By default, the list returned is lazily loaded when possible.
  1885. *
  1886. * @param <T>
  1887. * The type of the objects being returned.
  1888. * @param clazz
  1889. * The class annotated with DynamoDB annotations describing how
  1890. * to store the object data in Amazon DynamoDB.
  1891. * @param queryExpression
  1892. * Details on how to run the query, including any conditions on
  1893. * the key values
  1894. * @param config
  1895. * The configuration to use for this query, which overrides the
  1896. * default provided at object construction.
  1897. * @return An unmodifiable list of the objects constructed from the results
  1898. * of the query operation.
  1899. * @see PaginatedQueryList
  1900. * @see PaginationLoadingStrategy
  1901. */
  1902. public <T> PaginatedQueryList<T> query(Class<T> clazz, DynamoDBQueryExpression<T> queryExpression, DynamoDBMapperConfig config) {
  1903. config = mergeConfig(config);
  1904. QueryRequest queryRequest = createQueryRequestFromExpression(clazz, queryExpression, config);
  1905. QueryResult queryResult = db.query(applyUserAgent(queryRequest));
  1906. return new PaginatedQueryList<T>(this, clazz, db, queryRequest, queryResult, config.getPaginationLoadingStrategy(), config);
  1907. }
  1908. /**
  1909. * Queries an Amazon DynamoDB table and returns a single page of matching
  1910. * results. The table to query is determined by looking at the annotations
  1911. * on the specified class, which declares where to store the object data in
  1912. * Amazon DynamoDB, and the query expression parameter allows the caller to
  1913. * filter results and control how the query is executed.
  1914. *
  1915. * @see DynamoDBMapper#queryPage(Class, DynamoDBQueryExpression, DynamoDBMapperConfig)
  1916. */
  1917. public <T> QueryResultPage<T> queryPage(Class<T> clazz, DynamoDBQueryExpression<T> queryExpression) {
  1918. return queryPage(clazz, queryExpression, this.config);
  1919. }
  1920. /**
  1921. * Queries an Amazon DynamoDB table and returns a single page of matching
  1922. * results. The table to query is determined by looking at the annotations
  1923. * on the specified class, which declares where to store the object data in
  1924. * Amazon DynamoDB, and the query expression parameter allows the caller to
  1925. * filter results and control how the query is executed.
  1926. *
  1927. * @param <T>
  1928. * The type of the objects being returned.
  1929. * @param clazz
  1930. * The class annotated with DynamoDB annotations describing how
  1931. * to store the object data in AWS DynamoDB.
  1932. * @param queryExpression
  1933. * Details on how to run the query, including any conditions on
  1934. * the key values
  1935. * @param config
  1936. * The configuration to use for this query, which overrides the
  1937. * default provided at object construction.
  1938. */
  1939. public <T> QueryResultPage<T> queryPage(Class<T> clazz, DynamoDBQueryExpression<T> queryExpression, DynamoDBMapperConfig config) {
  1940. config = mergeConfig(config);
  1941. QueryRequest queryRequest = createQueryRequestFromExpression(clazz, queryExpression, config);
  1942. QueryResult scanResult = db.query(applyUserAgent(queryRequest));
  1943. QueryResultPage<T> result = new QueryResultPage<T>();
  1944. List<AttributeTransformer.Parameters<T>> parameters =
  1945. toParameters(scanResult.getItems(), clazz, config);
  1946. result.setResults(marshalIntoObjects(parameters));
  1947. result.setLastEvaluatedKey(scanResult.getLastEvaluatedKey());
  1948. return result;
  1949. }
  1950. /**
  1951. * Evaluates the specified scan expression and returns the count of matching
  1952. * items, without returning any of the actual item data, using the default configuration.
  1953. *
  1954. * @see DynamoDBMapper#count(Class, DynamoDBScanExpression, DynamoDBMapperConfig)
  1955. */
  1956. public int count(Class<?> clazz, DynamoDBScanExpression scanExpression) {
  1957. return count(clazz, scanExpression, config);
  1958. }
  1959. /**
  1960. * Evaluates the specified scan expression and returns the count of matching
  1961. * items, without returning any of the actual item data.
  1962. * <p>
  1963. * This operation will scan your entire table, and can therefore be very
  1964. * expensive. Use with caution.
  1965. *
  1966. * @param clazz
  1967. * The class mapped to a DynamoDB table.
  1968. * @param scanExpression
  1969. * The parameters for running the scan.
  1970. * @param config
  1971. * The configuration to use for this scan, which overrides the
  1972. * default provided at object construction.
  1973. * @return The count of matching items, without returning any of the actual
  1974. * item data.
  1975. */
  1976. public int count(Class<?> clazz, DynamoDBScanExpression scanExpression, DynamoDBMapperConfig config) {
  1977. config = mergeConfig(config);
  1978. ScanRequest scanRequest = createScanRequestFromExpression(clazz, scanExpression, config);
  1979. scanRequest.setSelect(Select.COUNT);
  1980. // Count scans can also be truncated for large datasets
  1981. int count = 0;
  1982. ScanResult scanResult = null;
  1983. do {
  1984. scanResult = db.scan(applyUserAgent(scanRequest));
  1985. count += scanResult.getCount();
  1986. scanRequest.setExclusiveStartKey(scanResult.getLastEvaluatedKey());
  1987. } while (scanResult.getLastEvaluatedKey() != null);
  1988. return count;
  1989. }
  1990. /**
  1991. * Evaluates the specified query expression and returns the count of matching
  1992. * items, without returning any of the actual item data, using the default configuration.
  1993. *
  1994. * @see DynamoDBMapper#count(Class, DynamoDBQueryExpression, DynamoDBMapperConfig)
  1995. */
  1996. public <T> int count(Class<T> clazz, DynamoDBQueryExpression<T> queryExpression) {
  1997. return count(clazz, queryExpression, config);
  1998. }
  1999. /**
  2000. * Evaluates the specified query expression and returns the count of
  2001. * matching items, without returning any of the actual item data.
  2002. *
  2003. * @param clazz
  2004. * The class mapped to a DynamoDB table.
  2005. * @param queryExpression
  2006. * The parameters for running the scan.
  2007. * @param config
  2008. * The mapper configuration to use for the query, which overrides
  2009. * the default provided at object construction.
  2010. * @return The count of matching items, without returning any of the actual
  2011. * item data.
  2012. */
  2013. public <T> int count(Class<T> clazz, DynamoDBQueryExpression<T> queryExpression, DynamoDBMapperConfig config) {
  2014. config = mergeConfig(config);
  2015. QueryRequest queryRequest = createQueryRequestFromExpression(clazz, queryExpression, config);
  2016. queryRequest.setSelect(Select.COUNT);
  2017. // Count queries can also be truncated for large datasets
  2018. int count = 0;
  2019. QueryResult queryResult = null;
  2020. do {
  2021. queryResult = db.query(applyUserAgent(queryRequest));
  2022. count += queryResult.getCount();
  2023. queryRequest.setExclusiveStartKey(queryResult.getLastEvaluatedKey());
  2024. } while (queryResult.getLastEvaluatedKey() != null);
  2025. return count;
  2026. }
  2027. /**
  2028. * Merges the config object given with the one specified at construction and
  2029. * returns the result.
  2030. */
  2031. private DynamoDBMapperConfig mergeConfig(DynamoDBMapperConfig config) {
  2032. if ( config != this.config )
  2033. config = new DynamoDBMapperConfig(this.config, config);
  2034. return config;
  2035. }
  2036. /**
  2037. * @param config never null
  2038. */
  2039. private ScanRequest createScanRequestFromExpression(Class<?> clazz, DynamoDBScanExpression scanExpression, DynamoDBMapperConfig config) {
  2040. ScanRequest scanRequest = new ScanRequest();
  2041. scanRequest.setTableName(getTableName(clazz, config));
  2042. scanRequest.setScanFilter(scanExpression.getScanFilter());
  2043. scanRequest.setLimit(scanExpression.getLimit());
  2044. scanRequest.setExclusiveStartKey(scanExpression.getExclusiveStartKey());
  2045. scanRequest.setTotalSegments(scanExpression.getTotalSegments());
  2046. scanRequest.setSegment(scanExpression.getSegment());
  2047. scanRequest.setConditionalOperator(scanExpression.getConditionalOperator());
  2048. scanRequest.setRequestMetricCollector(config.getRequestMetricCollector());
  2049. return scanRequest;
  2050. }
  2051. /**
  2052. * @param config never null
  2053. */
  2054. private List<ScanRequest> createParallelScanRequestsFromExpression(Class<?> clazz, DynamoDBScanExpression scanExpression, int totalSegments, DynamoDBMapperConfig config) {
  2055. if (totalSegments < 1) {
  2056. throw new IllegalArgumentException("Parallel scan should have at least one scan segment.");
  2057. }
  2058. if (scanExpression.getExclusiveStartKey() != null) {
  2059. log.info("The ExclusiveStartKey parameter specified in the DynamoDBScanExpression is ignored,"
  2060. + " since the individual parallel scan request on each segment is applied on a separate key scope.");
  2061. }
  2062. if (scanExpression.getSegment() != null || scanExpression.getTotalSegments() != null) {
  2063. log.info("The Segment and TotalSegments parameters specified in the DynamoDBScanExpression are ignored.");
  2064. }
  2065. List<ScanRequest> parallelScanRequests= new LinkedList<ScanRequest>();
  2066. for (int segment = 0; segment < totalSegments; segment++) {
  2067. ScanRequest scanRequest = createScanRequestFromExpression(clazz, scanExpression, config);
  2068. parallelScanRequests.add(scanRequest
  2069. .withSegment(segment).withTotalSegments(totalSegments)
  2070. .withExclusiveStartKey(null));
  2071. }
  2072. return parallelScanRequests;
  2073. }
  2074. private <T> QueryRequest createQueryRequestFromExpression(Class<T> clazz, DynamoDBQueryExpression<T> queryExpression, DynamoDBMapperConfig config) {
  2075. QueryRequest queryRequest = new QueryRequest();
  2076. queryRequest.setConsistentRead(queryExpression.isConsistentRead());
  2077. queryRequest.setTableName(getTableName(clazz, config));
  2078. queryRequest.setIndexName(queryExpression.getIndexName());
  2079. // Hash key (primary or index) conditions
  2080. Map<String, Condition> hashKeyConditions = getHashKeyEqualsConditions(queryExpression.getHashKeyValues());
  2081. // Range key (primary or index) conditions
  2082. Map<String, Condition> rangeKeyConditions = queryExpression.getRangeKeyConditions();
  2083. processKeyConditions(clazz, queryRequest, hashKeyConditions, rangeKeyConditions);
  2084. queryRequest.setScanIndexForward(queryExpression.isScanIndexForward());
  2085. queryRequest.setLimit(queryExpression.getLimit());
  2086. queryRequest.setExclusiveStartKey(queryExpression.getExclusiveStartKey());
  2087. queryRequest.setQueryFilter(queryExpression.getQueryFilter());
  2088. queryRequest.setConditionalOperator(queryExpression.getConditionalOperator());
  2089. queryRequest.setRequestMetricCollector(config.getRequestMetricCollector());
  2090. return queryRequest;
  2091. }
  2092. /**
  2093. * Utility method for checking the validity of both hash and range key
  2094. * conditions. It also tries to infer the correct index name from the POJO
  2095. * annotation, if such information is not directly specified by the user.
  2096. *
  2097. * @param clazz
  2098. * The domain class of the queried items.
  2099. * @param queryRequest
  2100. * The QueryRequest object to be sent to service.
  2101. * @param hashKeyConditions
  2102. * All the hash key EQ conditions extracted from the POJO object.
  2103. * The mapper will choose one of them that could be applied together with
  2104. * the user-specified (if any) index name and range key conditions. Or it
  2105. * throws error if more than one conditions are applicable for the query.
  2106. * @param rangeKeyConditions
  2107. * The range conditions specified by the user. We currently only
  2108. * allow at most one range key condition.
  2109. */
  2110. private void processKeyConditions(Class<?> clazz,
  2111. QueryRequest queryRequest,
  2112. Map<String, Condition> hashKeyConditions,
  2113. Map<String, Condition> rangeKeyConditions) {
  2114. // There should be least one hash key condition.
  2115. if (hashKeyConditions == null || hashKeyConditions.isEmpty()) {
  2116. throw new IllegalArgumentException("Illegal query expression: No hash key condition is found in the query");
  2117. }
  2118. // We don't allow multiple range key conditions.
  2119. if (rangeKeyConditions != null && rangeKeyConditions.size() > 1) {
  2120. throw new IllegalArgumentException(
  2121. "Illegal query expression: Conditions on multiple range keys ("
  2122. + rangeKeyConditions.keySet().toString()
  2123. + ") are found in the query. DynamoDB service only accepts up to ONE range key condition.");
  2124. }
  2125. final boolean hasRangeKeyCondition = (rangeKeyConditions != null)
  2126. && (!rangeKeyConditions.isEmpty());
  2127. final String userProvidedIndexName = queryRequest.getIndexName();
  2128. final String primaryHashKeyName = reflector.getPrimaryHashKeyName(clazz);
  2129. final TableIndexesInfo parsedIndexesInfo = schemaParser.parseTableIndexes(clazz, reflector);
  2130. // First collect the names of all the global/local secondary indexes that could be applied to this query.
  2131. // If the user explicitly specified an index name, we also need to
  2132. // 1) check the index is applicable for both hash and range key conditions
  2133. // 2) choose one hash key condition if there are more than one of them
  2134. boolean hasPrimaryHashKeyCondition = false;
  2135. final Map<String, Set<String>> annotatedGSIsOnHashKeys = new HashMap<String, Set<String>>();
  2136. String hashKeyNameForThisQuery = null;
  2137. boolean hasPrimaryRangeKeyCondition = false;
  2138. final Set<String> annotatedLSIsOnRangeKey = new HashSet<String>();
  2139. final Set<String> annotatedGSIsOnRangeKey = new HashSet<String>();
  2140. // Range key condition
  2141. String rangeKeyNameForThisQuery = null;
  2142. if (hasRangeKeyCondition) {
  2143. for (String rangeKeyName : rangeKeyConditions.keySet()) {
  2144. rangeKeyNameForThisQuery = rangeKeyName;
  2145. if (reflector.hasPrimaryRangeKey(clazz)
  2146. && rangeKeyName.equals(reflector.getPrimaryRangeKeyName(clazz))) {
  2147. hasPrimaryRangeKeyCondition = true;
  2148. }
  2149. Collection<String> annotatedLSI = parsedIndexesInfo.getLsiNamesByIndexRangeKey(rangeKeyName);
  2150. if (annotatedLSI != null) {
  2151. annotatedLSIsOnRangeKey.addAll(annotatedLSI);
  2152. }
  2153. Collection<String> annotatedGSI = parsedIndexesInfo.getGsiNamesByIndexRangeKey(rangeKeyName);
  2154. if (annotatedGSI != null) {
  2155. annotatedGSIsOnRangeKey.addAll(annotatedGSI);
  2156. }
  2157. }
  2158. if ( !hasPrimaryRangeKeyCondition
  2159. && annotatedLSIsOnRangeKey.isEmpty()
  2160. && annotatedGSIsOnRangeKey.isEmpty()) {
  2161. throw new DynamoDBMappingException(
  2162. "The query contains a condition on a range key (" +
  2163. rangeKeyNameForThisQuery + ") " +
  2164. "that is not annotated with either @DynamoDBRangeKey or @DynamoDBIndexRangeKey.");
  2165. }
  2166. }
  2167. final boolean userProvidedLSIWithRangeKeyCondition = (userProvidedIndexName != null)
  2168. && (annotatedLSIsOnRangeKey.contains(userProvidedIndexName));
  2169. final boolean hashOnlyLSIQuery = (userProvidedIndexName != null)
  2170. && ( !hasRangeKeyCondition )
  2171. && parsedIndexesInfo.getAllLsiNames().contains(userProvidedIndexName);
  2172. final boolean userProvidedLSI = userProvidedLSIWithRangeKeyCondition || hashOnlyLSIQuery;
  2173. final boolean userProvidedGSIWithRangeKeyCondition = (userProvidedIndexName != null)
  2174. && (annotatedGSIsOnRangeKey.contains(userProvidedIndexName));
  2175. final boolean hashOnlyGSIQuery = (userProvidedIndexName != null)
  2176. && ( !hasRangeKeyCondition )
  2177. && parsedIndexesInfo.getAllGsiNames().contains(userProvidedIndexName);
  2178. final boolean userProvidedGSI = userProvidedGSIWithRangeKeyCondition || hashOnlyGSIQuery;
  2179. if (userProvidedLSI && userProvidedGSI ) {
  2180. throw new DynamoDBMappingException(
  2181. "Invalid query: " +
  2182. "Index \"" + userProvidedIndexName + "\" " +
  2183. "is annotateded as both a LSI and a GSI for attribute.");
  2184. }
  2185. // Hash key conditions
  2186. for (String hashKeyName : hashKeyConditions.keySet()) {
  2187. if (hashKeyName.equals(primaryHashKeyName)) {
  2188. hasPrimaryHashKeyCondition = true;
  2189. }
  2190. Collection<String> annotatedGSINames = parsedIndexesInfo.getGsiNamesByIndexHashKey(hashKeyName);
  2191. annotatedGSIsOnHashKeys.put(hashKeyName,
  2192. annotatedGSINames == null ? new HashSet<String>() : new HashSet<String>(annotatedGSINames));
  2193. // Additional validation if the user provided an index name.
  2194. if (userProvidedIndexName != null) {
  2195. boolean foundHashKeyConditionValidWithUserProvidedIndex = false;
  2196. if (userProvidedLSI && hashKeyName.equals(primaryHashKeyName)) {
  2197. // found an applicable hash key condition (primary hash + LSI range)
  2198. foundHashKeyConditionValidWithUserProvidedIndex = true;
  2199. } else if (userProvidedGSI &&
  2200. annotatedGSINames != null && annotatedGSINames.contains(userProvidedIndexName)) {
  2201. // found an applicable hash key condition (GSI hash + range)
  2202. foundHashKeyConditionValidWithUserProvidedIndex = true;
  2203. }
  2204. if (foundHashKeyConditionValidWithUserProvidedIndex) {
  2205. if ( hashKeyNameForThisQuery != null ) {
  2206. throw new IllegalArgumentException(
  2207. "Ambiguous query expression: More than one hash key EQ conditions (" +
  2208. hashKeyNameForThisQuery + ", " + hashKeyName +
  2209. ") are applicable to the specified index ("
  2210. + userProvidedIndexName + "). " +
  2211. "Please provide only one of them in the query expression.");
  2212. } else {
  2213. // found an applicable hash key condition
  2214. hashKeyNameForThisQuery = hashKeyName;
  2215. }
  2216. }
  2217. }
  2218. }
  2219. // Collate all the key conditions
  2220. Map<String, Condition> keyConditions = new HashMap<String, Condition>();
  2221. // With user-provided index name
  2222. if (userProvidedIndexName != null) {
  2223. if (hasRangeKeyCondition
  2224. && ( !userProvidedLSI )
  2225. && ( !userProvidedGSI )) {
  2226. throw new IllegalArgumentException(
  2227. "Illegal query expression: No range key condition is applicable to the specified index ("
  2228. + userProvidedIndexName + "). ");
  2229. }
  2230. if (hashKeyNameForThisQuery == null) {
  2231. throw new IllegalArgumentException(
  2232. "Illegal query expression: No hash key condition is applicable to the specified index ("
  2233. + userProvidedIndexName + "). ");
  2234. }
  2235. keyConditions.put(hashKeyNameForThisQuery, hashKeyConditions.get(hashKeyNameForThisQuery));
  2236. if (hasRangeKeyCondition) {
  2237. keyConditions.putAll(rangeKeyConditions);
  2238. }
  2239. }
  2240. // Infer the index name by finding the index shared by both hash and range key annotations.
  2241. else {
  2242. if (hasRangeKeyCondition) {
  2243. String inferredIndexName = null;
  2244. hashKeyNameForThisQuery = null;
  2245. if (hasPrimaryHashKeyCondition && hasPrimaryRangeKeyCondition) {
  2246. // Found valid query: primary hash + range key conditions
  2247. hashKeyNameForThisQuery = primaryHashKeyName;
  2248. } else {
  2249. // Intersect the set of all the indexes applicable to the range key
  2250. // with the set of indexes applicable to each hash key condition.
  2251. for (String hashKeyName : annotatedGSIsOnHashKeys.keySet()) {
  2252. boolean foundValidQueryExpressionWithInferredIndex = false;
  2253. String indexNameInferredByThisHashKey = null;
  2254. if (hashKeyName.equals(primaryHashKeyName)) {
  2255. if (annotatedLSIsOnRangeKey.size() == 1) {
  2256. // Found valid query (Primary hash + LSI range conditions)
  2257. foundValidQueryExpressionWithInferredIndex = true;
  2258. indexNameInferredByThisHashKey = annotatedLSIsOnRangeKey.iterator().next();
  2259. }
  2260. }
  2261. Set<String> annotatedGSIsOnHashKey = annotatedGSIsOnHashKeys.get(hashKeyName);
  2262. // We don't need the data in annotatedGSIsOnHashKeys afterwards,
  2263. // so it's safe to do the intersection in-place.
  2264. annotatedGSIsOnHashKey.retainAll(annotatedGSIsOnRangeKey);
  2265. if (annotatedGSIsOnHashKey.size() == 1) {
  2266. // Found valid query (Hash + range conditions on a GSI)
  2267. if (foundValidQueryExpressionWithInferredIndex) {
  2268. hashKeyNameForThisQuery = hashKeyName;
  2269. inferredIndexName = indexNameInferredByThisHashKey;
  2270. }
  2271. foundValidQueryExpressionWithInferredIndex = true;
  2272. indexNameInferredByThisHashKey = annotatedGSIsOnHashKey.iterator().next();
  2273. }
  2274. if (foundValidQueryExpressionWithInferredIndex) {
  2275. if (hashKeyNameForThisQuery != null) {
  2276. throw new IllegalArgumentException(
  2277. "Ambiguous query expression: Found multiple valid queries: " +
  2278. "(Hash: \"" + hashKeyNameForThisQuery + "\", Range: \"" + rangeKeyNameForThisQuery + "\", Index: \"" + inferredIndexName + "\") and " +
  2279. "(Hash: \"" + hashKeyName + "\", Range: \"" + rangeKeyNameForThisQuery + "\", Index: \"" + indexNameInferredByThisHashKey + "\").");
  2280. } else {
  2281. hashKeyNameForThisQuery = hashKeyName;
  2282. inferredIndexName = indexNameInferredByThisHashKey;
  2283. }
  2284. }
  2285. }
  2286. }
  2287. if (hashKeyNameForThisQuery != null) {
  2288. keyConditions.put(hashKeyNameForThisQuery, hashKeyConditions.get(hashKeyNameForThisQuery));
  2289. keyConditions.putAll(rangeKeyConditions);
  2290. queryRequest.setIndexName(inferredIndexName);
  2291. } else {
  2292. throw new IllegalArgumentException(
  2293. "Illegal query expression: Cannot infer the index name from the query expression.");
  2294. }
  2295. } else {
  2296. // No range key condition is specified.
  2297. if (hashKeyConditions.size() > 1) {
  2298. if ( hasPrimaryHashKeyCondition ) {
  2299. keyConditions.put(primaryHashKeyName, hashKeyConditions.get(primaryHashKeyName));
  2300. } else {
  2301. throw new IllegalArgumentException(
  2302. "Ambiguous query expression: More than one index hash key EQ conditions (" +
  2303. hashKeyConditions.keySet() +
  2304. ") are applicable to the query. " +
  2305. "Please provide only one of them in the query expression, or specify the appropriate index name.");
  2306. }
  2307. } else {
  2308. // Only one hash key condition
  2309. String hashKeyName = annotatedGSIsOnHashKeys.keySet().iterator().next();
  2310. if ( !hasPrimaryHashKeyCondition ) {
  2311. if (annotatedGSIsOnHashKeys.get(hashKeyName).size() == 1) {
  2312. // Set the index if the index hash key is only annotated with one GSI.
  2313. queryRequest.setIndexName(annotatedGSIsOnHashKeys.get(hashKeyName).iterator().next());
  2314. } else if (annotatedGSIsOnHashKeys.get(hashKeyName).size() > 1) {
  2315. throw new IllegalArgumentException(
  2316. "Ambiguous query expression: More than one GSIs (" +
  2317. annotatedGSIsOnHashKeys.get(hashKeyName) +
  2318. ") are applicable to the query. " +
  2319. "Please specify one of them in your query expression.");
  2320. } else {
  2321. throw new IllegalArgumentException(
  2322. "Illegal query expression: No GSI is found in the @DynamoDBIndexHashKey annotation for attribute " +
  2323. "\"" + hashKeyName + "\".");
  2324. }
  2325. }
  2326. keyConditions.putAll(hashKeyConditions);
  2327. }
  2328. }
  2329. }
  2330. queryRequest.setKeyConditions(keyConditions);
  2331. }
  2332. private <T> AttributeTransformer.Parameters<T> toParameters(
  2333. final Map<String, AttributeValue> attributeValues,
  2334. final Class<T> modelClass,
  2335. final DynamoDBMapperConfig mapperConfig) {
  2336. return toParameters(attributeValues, false, modelClass, mapperConfig);
  2337. }
  2338. private <T> AttributeTransformer.Parameters<T> toParameters(
  2339. final Map<String, AttributeValue> attributeValues,
  2340. final boolean partialUpdate,
  2341. final Class<T> modelClass,
  2342. final DynamoDBMapperConfig mapperConfig) {
  2343. return new TransformerParameters(reflector,
  2344. attributeValues,
  2345. partialUpdate,
  2346. modelClass,
  2347. mapperConfig);
  2348. }
  2349. final <T> List<AttributeTransformer.Parameters<T>> toParameters(
  2350. final List<Map<String, AttributeValue>> attributeValues,
  2351. final Class<T> modelClass,
  2352. final DynamoDBMapperConfig mapperConfig
  2353. ) {
  2354. List<AttributeTransformer.Parameters<T>> rval =
  2355. new ArrayList<AttributeTransformer.Parameters<T>>(
  2356. attributeValues.size());
  2357. for (Map<String, AttributeValue> item : attributeValues) {
  2358. rval.add(toParameters(item, modelClass, mapperConfig));
  2359. }
  2360. return rval;
  2361. }
  2362. /**
  2363. * The one true implementation of AttributeTransformer.Parameters.
  2364. */
  2365. private static class TransformerParameters<T>
  2366. implements AttributeTransformer.Parameters<T> {
  2367. private final DynamoDBReflector reflector;
  2368. private final Map<String, AttributeValue> attributeValues;
  2369. private final boolean partialUpdate;
  2370. private final Class<T> modelClass;
  2371. private final DynamoDBMapperConfig mapperConfig;
  2372. private String tableName;
  2373. private String hashKeyName;
  2374. private String rangeKeyName;
  2375. public TransformerParameters(
  2376. final DynamoDBReflector reflector,
  2377. final Map<String, AttributeValue> attributeValues,
  2378. final boolean partialUpdate,
  2379. final Class<T> modelClass,
  2380. final DynamoDBMapperConfig mapperConfig) {
  2381. this.reflector = reflector;
  2382. this.attributeValues =
  2383. Collections.unmodifiableMap(attributeValues);
  2384. this.partialUpdate = partialUpdate;
  2385. this.modelClass = modelClass;
  2386. this.mapperConfig = mapperConfig;
  2387. }
  2388. @Override
  2389. public Map<String, AttributeValue> getAttributeValues() {
  2390. return attributeValues;
  2391. }
  2392. @Override
  2393. public boolean isPartialUpdate() {
  2394. return partialUpdate;
  2395. }
  2396. @Override
  2397. public Class<T> getModelClass() {
  2398. return modelClass;
  2399. }
  2400. @Override
  2401. public DynamoDBMapperConfig getMapperConfig() {
  2402. return mapperConfig;
  2403. }
  2404. @Override
  2405. public String getTableName() {
  2406. if (tableName == null) {
  2407. tableName = DynamoDBMapper
  2408. .getTableName(modelClass, mapperConfig, reflector);
  2409. }
  2410. return tableName;
  2411. }
  2412. @Override
  2413. public String getHashKeyName() {
  2414. if (hashKeyName == null) {
  2415. Method hashKeyGetter = reflector.getPrimaryHashKeyGetter(modelClass);
  2416. hashKeyName = reflector.getAttributeName(hashKeyGetter);
  2417. }
  2418. return hashKeyName;
  2419. }
  2420. @Override
  2421. public String getRangeKeyName() {
  2422. if (rangeKeyName == null) {
  2423. Method rangeKeyGetter =
  2424. reflector.getPrimaryRangeKeyGetter(modelClass);
  2425. if (rangeKeyGetter == null) {
  2426. rangeKeyName = NO_RANGE_KEY;
  2427. } else {
  2428. rangeKeyName = reflector.getAttributeName(rangeKeyGetter);
  2429. }
  2430. }
  2431. if (rangeKeyName == NO_RANGE_KEY) {
  2432. return null;
  2433. }
  2434. return rangeKeyName;
  2435. }
  2436. }
  2437. private Map<String, AttributeValue> untransformAttributes(
  2438. final AttributeTransformer.Parameters parameters
  2439. ) {
  2440. if (transformer != null) {
  2441. return transformer.untransform(parameters);
  2442. }
  2443. return untransformAttributes(
  2444. parameters.getModelClass(),
  2445. parameters.getAttributeValues());
  2446. }
  2447. /**
  2448. * By default, just calls {@link #untransformAttributes(String, String, Map)}.
  2449. *
  2450. * @deprecated in favor of {@link AttributeTransformer}
  2451. */
  2452. @Deprecated
  2453. protected Map<String, AttributeValue> untransformAttributes(Class<?> clazz, Map<String, AttributeValue> attributeValues) {
  2454. Method hashKeyGetter = reflector.getPrimaryHashKeyGetter(clazz);
  2455. String hashKeyName = reflector.getAttributeName(hashKeyGetter);
  2456. Method rangeKeyGetter = reflector.getPrimaryRangeKeyGetter(clazz);
  2457. String rangeKeyName = rangeKeyGetter == null ? null : reflector.getAttributeName(rangeKeyGetter);
  2458. return untransformAttributes(hashKeyName, rangeKeyName, attributeValues);
  2459. }
  2460. /**
  2461. * Transforms the attribute values after loading from DynamoDb.
  2462. * Only ever called by {@link #untransformAttributes(Class, Map)}.
  2463. * By default, returns the attributes unchanged.
  2464. *
  2465. * @param hashKey the attribute name of the hash key
  2466. * @param rangeKey the attribute name of the range key (or null if there is none)
  2467. * @param attributeValues
  2468. * @return the decrypted attributes
  2469. * @deprecated in favor of {@link AttributeTransformer}
  2470. */
  2471. @Deprecated
  2472. protected Map<String, AttributeValue> untransformAttributes(String hashKey, String rangeKey,
  2473. Map<String, AttributeValue> attributeValues) {
  2474. return attributeValues;
  2475. }
  2476. private Map<String, AttributeValue> transformAttributes(
  2477. final AttributeTransformer.Parameters parameters) {
  2478. if (transformer != null) {
  2479. return transformer.transform(parameters);
  2480. }
  2481. return transformAttributes(
  2482. parameters.getModelClass(),
  2483. parameters.getAttributeValues());
  2484. }
  2485. /**
  2486. * By default, just calls {@link #transformAttributes(String, String, Map)}.
  2487. *
  2488. * @param clazz
  2489. * @param attributeValues
  2490. * @return the decrypted attribute values
  2491. * @deprecated in favor of {@link AttributeTransformer}
  2492. */
  2493. @Deprecated
  2494. protected Map<String, AttributeValue> transformAttributes(Class<?> clazz, Map<String, AttributeValue> attributeValues) {
  2495. Method hashKeyGetter = reflector.getPrimaryHashKeyGetter(clazz);
  2496. String hashKeyName = reflector.getAttributeName(hashKeyGetter);
  2497. Method rangeKeyGetter = reflector.getPrimaryRangeKeyGetter(clazz);
  2498. String rangeKeyName = rangeKeyGetter == null ? null : reflector.getAttributeName(rangeKeyGetter);
  2499. return transformAttributes(hashKeyName, rangeKeyName, attributeValues);
  2500. }
  2501. /**
  2502. * Transform attribute values prior to storing in DynamoDB.
  2503. * Only ever called by {@link #transformAttributes(Class, Map)}.
  2504. * By default, returns the attributes unchanged.
  2505. *
  2506. * @param hashKey the attribute name of the hash key
  2507. * @param rangeKey the attribute name of the range key (or null if there is none)
  2508. * @param attributeValues
  2509. * @return the encrypted attributes
  2510. * @deprecated in favor of {@link AttributeTransformer}
  2511. */
  2512. @Deprecated
  2513. protected Map<String, AttributeValue> transformAttributes(String hashKey, String rangeKey,
  2514. Map<String, AttributeValue> attributeValues) {
  2515. return attributeValues;
  2516. }
  2517. private Map<String, AttributeValueUpdate> transformAttributeUpdates(
  2518. final Class<?> clazz,
  2519. final Map<String, AttributeValue> keys,
  2520. final Map<String, AttributeValueUpdate> updateValues,
  2521. final DynamoDBMapperConfig config
  2522. ) {
  2523. Map<String, AttributeValue> item = convertToItem(updateValues);
  2524. HashSet<String> keysAdded = new HashSet<String>();
  2525. for (Map.Entry<String, AttributeValue> e : keys.entrySet()) {
  2526. if (!item.containsKey(e.getKey())) {
  2527. keysAdded.add(e.getKey());
  2528. item.put(e.getKey(), e.getValue());
  2529. }
  2530. }
  2531. AttributeTransformer.Parameters<?> parameters =
  2532. toParameters(item, true, clazz, config);
  2533. String hashKey = parameters.getHashKeyName();
  2534. if (!item.containsKey(hashKey)) {
  2535. item.put(hashKey, keys.get(hashKey));
  2536. }
  2537. item = transformAttributes(parameters);
  2538. for(Map.Entry<String, AttributeValue> entry: item.entrySet()) {
  2539. if (keysAdded.contains(entry.getKey())) {
  2540. // This was added in for context before calling
  2541. // transformAttributes, but isn't actually being changed.
  2542. continue;
  2543. }
  2544. AttributeValueUpdate update = updateValues.get(entry.getKey());
  2545. if (update != null) {
  2546. update.getValue()
  2547. .withB( entry.getValue().getB() )
  2548. .withBS(entry.getValue().getBS())
  2549. .withN( entry.getValue().getN() )
  2550. .withNS(entry.getValue().getNS())
  2551. .withS( entry.getValue().getS() )
  2552. .withSS(entry.getValue().getSS());
  2553. } else {
  2554. updateValues.put(entry.getKey(),
  2555. new AttributeValueUpdate(entry.getValue(),
  2556. "PUT"));
  2557. }
  2558. }
  2559. return updateValues;
  2560. }
  2561. private void pauseExponentially(int retries) {
  2562. if (retries == 0) {
  2563. return;
  2564. }
  2565. Random random = new Random();
  2566. long delay = 0;
  2567. long scaleFactor = 500 + random.nextInt(100);
  2568. delay = (long) (Math.pow(2, retries) * scaleFactor);
  2569. delay = Math.min(delay, MAX_BACKOFF_IN_MILLISECONDS);
  2570. try {
  2571. Thread.sleep(delay);
  2572. } catch (InterruptedException e) {
  2573. Thread.currentThread().interrupt();
  2574. throw new AmazonClientException(e.getMessage(), e);
  2575. }
  2576. }
  2577. /**
  2578. * Returns a new map object that merges the two sets of expected value
  2579. * conditions (user-specified or imposed by the internal implementation of
  2580. * DynamoDBMapper). Internal assertion on an attribute will be overridden by
  2581. * any user-specified condition on the same attribute.
  2582. * <p>
  2583. * Exception is thrown if the two sets of conditions cannot be combined
  2584. * together.
  2585. */
  2586. private static Map<String, ExpectedAttributeValue> mergeExpectedAttributeValueConditions(
  2587. Map<String, ExpectedAttributeValue> internalAssertions,
  2588. Map<String, ExpectedAttributeValue> userProvidedConditions,
  2589. String userProvidedConditionOperator) {
  2590. // If any of the condition map is null, simply return a copy of the other one.
  2591. if (internalAssertions == null && userProvidedConditions == null) {
  2592. return null;
  2593. } else if (internalAssertions == null) {
  2594. return new HashMap<String, ExpectedAttributeValue>(userProvidedConditions);
  2595. } else if (userProvidedConditions == null) {
  2596. return new HashMap<String, ExpectedAttributeValue>(internalAssertions);
  2597. }
  2598. // Start from a copy of the internal conditions
  2599. Map<String, ExpectedAttributeValue> mergedExpectedValues =
  2600. new HashMap<String, ExpectedAttributeValue>(internalAssertions);
  2601. // Remove internal conditions that are going to be overlaid by user-provided ones.
  2602. for (String attrName : userProvidedConditions.keySet()) {
  2603. mergedExpectedValues.remove(attrName);
  2604. }
  2605. // All the generated internal conditions must be joined by AND.
  2606. // Throw an exception if the user specifies an OR operator, and that the
  2607. // internal conditions are not totally overlaid by the user-provided
  2608. // ones.
  2609. if ( ConditionalOperator.OR.toString().equals(userProvidedConditionOperator)
  2610. && !mergedExpectedValues.isEmpty() ) {
  2611. throw new IllegalArgumentException("Unable to assert the value of the fields "
  2612. + mergedExpectedValues.keySet() + ", since the expected value conditions cannot be combined "
  2613. + "with user-specified conditions joined by \"OR\". You can use SaveBehavior.CLOBBER to "
  2614. + "skip the assertion on these fields.");
  2615. }
  2616. mergedExpectedValues.putAll(userProvidedConditions);
  2617. return mergedExpectedValues;
  2618. }
  2619. static <X extends AmazonWebServiceRequest> X applyUserAgent(X request) {
  2620. request.getRequestClientOptions().appendUserAgent(USER_AGENT);
  2621. return request;
  2622. }
  2623. /**
  2624. * The return type of batchWrite, batchDelete and batchSave. It contains the information about the unprocessed items
  2625. * and the exception causing the failure.
  2626. *
  2627. */
  2628. public static class FailedBatch {
  2629. private Map<String, java.util.List<WriteRequest>> unprocessedItems;
  2630. private Exception exception;
  2631. public void setUnprocessedItems(Map<String, java.util.List<WriteRequest>> unprocessedItems) {
  2632. this.unprocessedItems = unprocessedItems;
  2633. }
  2634. public Map<String, java.util.List<WriteRequest>> getUnprocessedItems() {
  2635. return unprocessedItems;
  2636. }
  2637. public void setException(Exception excetpion) {
  2638. this.exception = excetpion;
  2639. }
  2640. public Exception getException() {
  2641. return exception;
  2642. }
  2643. }
  2644. /**
  2645. * Returns the underlying {@link S3ClientCache} for accessing S3.
  2646. */
  2647. public S3ClientCache getS3ClientCache() {
  2648. return s3cc;
  2649. }
  2650. /**
  2651. * Creates an S3Link with the specified bucket name and key using the
  2652. * default S3 region.
  2653. * This method requires the mapper to have been initialized with the
  2654. * necessary credentials for accessing S3.
  2655. *
  2656. * @throws IllegalStateException if the mapper has not been constructed
  2657. * with the necessary S3 AWS credentials.
  2658. */
  2659. public S3Link createS3Link(String bucketName, String key) {
  2660. return createS3Link(null, bucketName , key);
  2661. }
  2662. /**
  2663. * Creates an S3Link with the specified region, bucket name and key.
  2664. * This method requires the mapper to have been initialized with the
  2665. * necessary credentials for accessing S3.
  2666. *
  2667. * @throws IllegalStateException if the mapper has not been constructed
  2668. * with the necessary S3 AWS credentials.
  2669. */
  2670. public S3Link createS3Link(Region s3region, String bucketName, String key) {
  2671. if ( s3cc == null ) {
  2672. throw new IllegalStateException("Mapper must be constructed with S3 AWS Credentials to create S3Link");
  2673. }
  2674. return new S3Link(s3cc, s3region, bucketName , key);
  2675. }
  2676. /**
  2677. * Parse the given POJO class and return the CreateTableRequest for the
  2678. * DynamoDB table it represents. Note that the returned request does not
  2679. * include the required ProvisionedThroughput parameters for the primary
  2680. * table and the GSIs, and that all secondary indexes are initialized with
  2681. * the default projection type - KEY_ONLY.
  2682. */
  2683. public CreateTableRequest generateCreateTableRequest(Class<?> clazz) {
  2684. return schemaParser.parseTablePojoToCreateTableRequest(clazz, config, reflector);
  2685. }
  2686. }