From 3254e2bb528c2e0551cacf795ab4941ff0a0b605 Mon Sep 17 00:00:00 2001 From: "Gavin M. Roy" Date: Wed, 2 Mar 2016 11:31:03 -0500 Subject: [PATCH 01/13] Add a bootstrap and docker-compose file --- bootstrap | 80 ++++++++++++++++++++++++++++++++++++++++++++++ docker-compose.yml | 7 ++++ 2 files changed, 87 insertions(+) create mode 100755 bootstrap create mode 100644 docker-compose.yml diff --git a/bootstrap b/bootstrap new file mode 100755 index 0000000..496a81e --- /dev/null +++ b/bootstrap @@ -0,0 +1,80 @@ +#!/bin/sh +# +# NAME +# bootstrap -- initialize/update docker environment +# +# SYNOPSIS +# bootstrap +# bootstrap shellinit +# +# DESCRIPTION +# Execute this script without parameters to build the local docker +# environment. Once bootstrapped, dependent services are running +# via docker-compose and the environment variables are written to +# *build/test-environment* for future use. +# +# Running this script with the _shellinit_ command line parameter +# causes it to simply interrogate the running docker environment, +# update *build/test-environment*, and print the environment to +# the standard output stream in a shell executable manner. This +# makes the following pattern for setting environment variables +# in the current shell work. +# +# prompt% $(./bootstrap shellinit) +# +# vim: set ts=2 sts=2 sw=2 et: +PROJECT=sprockets + +if test -e /var/run/docker.sock +then + DOCKER_IP=127.0.0.1 +else + docker-machine status ${PROJECT} >/dev/null 2>/dev/null + RESULT=$? + if [ ${RESULT} -ne 0 ] + then + docker-machine create --driver virtualbox ${PROJECT} + fi + eval $(docker-machine env ${PROJECT} 2>/dev/null) || { + echo "Failed to initialize docker environment" + exit 2 + } + DOCKER_IP=$(docker-machine ip ${PROJECT}) +fi + +COMPOSE_ARGS= +if test -n "${DOCKER_COMPOSE_PREFIX}" +then + COMPOSE_ARGS="-p ${DOCKER_COMPOSE_PREFIX}" +fi + +get_exposed_port() { + docker-compose ${COMPOSE_ARGS} port $1 $2 | cut -d: -f2 +} + +build_env_file() { + DYNAMODB_PORT=$(get_exposed_port dynamodb 7777) + (echo "export DOCKER_COMPOSE_PREFIX=${DOCKER_COMPOSE_PREFIX}" + echo "export DOCKER_TLS_VERIFY=${DOCKER_TLS_VERIFY}" + echo "export DOCKER_HOST=${DOCKER_HOST}" + echo "export DOCKER_CERT_PATH=${DOCKER_CERT_PATH}" + echo "export DOCKER_MACHINE_NAME=${DOCKER_MACHINE_NAME}" + echo "export DYNAMODB_ENDPOINT=http://${DOCKER_IP}:${DYNAMODB_PORT}" + ) > $1 +} + +set -e + +mkdir -p build + +if test "$1" == 'shellinit' +then + # just build the environment file from docker containers + build_env_file build/test-environment +else + docker-compose ${COMPOSE_ARGS} stop + docker-compose ${COMPOSE_ARGS} rm --force + docker-compose ${COMPOSE_ARGS} up -d + build_env_file build/test-environment +fi +cat build/test-environment diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..8b15c7a --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,7 @@ +%YAML 1.2 +--- +dynamodb: + image: tray/dynamodb-local + command: -inMemory -port 7777 + ports: + - 7777 From c52124c366f01a6d17b04258717ad327394f2bc5 Mon Sep 17 00:00:00 2001 From: "Gavin M. Roy" Date: Wed, 2 Mar 2016 11:31:12 -0500 Subject: [PATCH 02/13] Ignore PyCharm files --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index d42f6b1..054fa63 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ dist env *.egg-info .coverage +.idea From 15d700686d110b5cf50ae82f10b3ca0f78ae3e2e Mon Sep 17 00:00:00 2001 From: "Gavin M. Roy" Date: Wed, 2 Mar 2016 11:31:28 -0500 Subject: [PATCH 03/13] Add arrow and mock for testing --- requires/testing.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/requires/testing.txt b/requires/testing.txt index 74507ed..776ca53 100644 --- a/requires/testing.txt +++ b/requires/testing.txt @@ -1,2 +1,4 @@ nose>=1.3.7,<2 coverage>=3.7,<4 +arrow>=0.7.0,<1 +mock>=1.3.0,<2 From fa841bcb10206fcfd902ec836e81e3ca51de3494 Mon Sep 17 00:00:00 2001 From: "Gavin M. Roy" Date: Wed, 2 Mar 2016 11:31:43 -0500 Subject: [PATCH 04/13] Remove stub test file --- tests.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 tests.py diff --git a/tests.py b/tests.py deleted file mode 100644 index e69de29..0000000 From 5623a13c2a10e4cdfb912b58ba0f85f3c6dcc5fe Mon Sep 17 00:00:00 2001 From: "Gavin M. Roy" Date: Wed, 2 Mar 2016 11:32:04 -0500 Subject: [PATCH 05/13] Add exceptions and constants --- sprockets/clients/dynamodb/__init__.py | 16 +- sprockets/clients/dynamodb/exceptions.py | 194 +++++++++++++++++++++++ 2 files changed, 209 insertions(+), 1 deletion(-) create mode 100644 sprockets/clients/dynamodb/exceptions.py diff --git a/sprockets/clients/dynamodb/__init__.py b/sprockets/clients/dynamodb/__init__.py index fa8267b..878d46b 100644 --- a/sprockets/clients/dynamodb/__init__.py +++ b/sprockets/clients/dynamodb/__init__.py @@ -6,4 +6,18 @@ except ImportError as error: version_info = (0, 0, 0) __version__ = '.'.join(str(v) for v in version_info) -__all__ = ['DynamoDB', 'version_info', '__version__'] + +# Response constants +TABLE_ACTIVE = 'ACTIVE' +TABLE_CREATING = 'CREATING' +TABLE_DELETING = 'DELETING' +TABLE_DISABLED = 'DISABLED' +TABLE_UPDATING = 'UPDATING' + +# Table stream view type constants +STREAM_VIEW_NEW_IMAGE = 'NEW_IMAGE' +STREAM_VIEW_OLD_IMAGE = 'OLD_IMAGE' +STREAM_VIEW_NEW_AND_OLD_IMAGES = 'NEW_AND_OLD_IMAGES' +STREAM_VIEW_KEYS_ONLY = 'KEYS_ONLY' +_STREAM_VIEW_TYPES = (STREAM_VIEW_NEW_IMAGE, STREAM_VIEW_OLD_IMAGE, + STREAM_VIEW_NEW_AND_OLD_IMAGES, STREAM_VIEW_KEYS_ONLY) diff --git a/sprockets/clients/dynamodb/exceptions.py b/sprockets/clients/dynamodb/exceptions.py new file mode 100644 index 0000000..288763a --- /dev/null +++ b/sprockets/clients/dynamodb/exceptions.py @@ -0,0 +1,194 @@ +""" +DynamoDB Exceptions +=================== + +""" + + +class DynamoDBException(Exception): + """Base exception that is extended by all exceptions raised by + tornado_dynamodb. + + :ivar msg: The error message + + """ + def __init__(self, *args, **kwargs): + super(DynamoDBException, self).__init__(*args, **kwargs) + + +class ConditionalCheckFailedException(DynamoDBException): + """A condition specified in the operation could not be evaluated.""" + pass + + +class ConfigNotFound(DynamoDBException): + """The configuration file could not be parsed.""" + pass + + +class ConfigParserError(DynamoDBException): + """Error raised when parsing a configuration file with + :class:`~configparser.RawConfigParser` + + """ + pass + + +class InternalFailure(DynamoDBException): + """The request processing has failed because of an unknown error, exception + or failure. + + """ + pass + + +class ItemCollectionSizeLimitExceeded(DynamoDBException): + """An item collection is too large. This exception is only returned for + tables that have one or more local secondary indexes. + + """ + pass + + +class InvalidAction(DynamoDBException): + """The action or operation requested is invalid. Verify that the action is + typed correctly. + + """ + pass + + +class InvalidParameterCombination(DynamoDBException): + """Parameters that must not be used together were used together.""" + pass + + +class InvalidParameterValue(DynamoDBException): + """An invalid or out-of-range value was supplied for the input parameter.""" + pass + + +class InvalidQueryParameter(DynamoDBException): + """The AWS query string is malformed or does not adhere to AWS standards.""" + pass + + +class LimitExceeded(DynamoDBException): + """The number of concurrent table requests (cumulative number of tables in + the ``CREATING``, ``DELETING`` or ``UPDATING`` state) exceeds the maximum + allowed of ``10``. + + Also, for tables with secondary indexes, only one of those tables can be in + the ``CREATING`` state at any point in time. Do not attempt to create more + than one such table simultaneously. + + The total limit of tables in the ``ACTIVE`` state is ``250``. + + """ + pass + + +class MalformedQueryString(DynamoDBException): + """The query string contains a syntax error.""" + pass + + +class MissingParameter(DynamoDBException): + """A required parameter for the specified action is not supplied.""" + pass + + +class NoCredentialsError(DynamoDBException): + """Raised when the credentials could not be located.""" + pass + + +class NoProfileError(DynamoDBException): + """Raised when the specified profile could not be located.""" + pass + + +class OptInRequired(DynamoDBException): + """The AWS access key ID needs a subscription for the service.""" + pass + + +class ThroughputExceeded(DynamoDBException): + """Your request rate is too high. The AWS SDKs for DynamoDB automatically + retry requests that receive this exception. Your request is eventually + successful, unless your retry queue is too large to finish. Reduce the + frequency of requests and use exponential backoff. For more information, go + to `Error Retries and Exponential Backoff `_ in + the Amazon DynamoDB Developer Guide. + + """ + pass + + +class RequestException(DynamoDBException): + """A generic HTTP request exception has occurred when communicating with + DynamoDB. + + """ + pass + + +class RequestExpired(DynamoDBException): + """The request reached the service more than 15 minutes after the date + stamp on the request or more than 15 minutes after the request expiration + date (such as for pre-signed URLs), or the date stamp on the request is + more than 15 minutes in the future. + + """ + pass + + +class ResourceInUse(DynamoDBException): + """he operation conflicts with the resource's availability. For example, + you attempted to recreate an existing table, or tried to delete a table + currently in the ``CREATING`` state. + + """ + pass + + +class ResourceNotFound(DynamoDBException): + """The operation tried to access a nonexistent table or index. The resource + might not be specified correctly, or its status might not be ``ACTIVE``. + + """ + pass + + +class ServiceUnavailable(DynamoDBException): + """The request has failed due to a temporary failure of the server.""" + pass + + +class ThrottlingException(DynamoDBException): + """The request was denied due to request throttling.""" + pass + + +class TimeoutException(DynamoDBException): + """The request to DynamoDB timed out.""" + pass + + +class ValidationException(DynamoDBException): + """The input fails to satisfy the constraints specified by an AWS service. + + """ + pass + + +MAP = { + 'com.amazonaws.dynamodb.v20120810#InternalFailure': InternalFailure, + 'com.amazonaws.dynamodb.v20120810#ProvisionedThroughputExceeded': + ThroughputExceeded, + 'com.amazonaws.dynamodb.v20120810#ResourceInUseException': ResourceInUse, + 'com.amazonaws.dynamodb.v20120810#ResourceNotFoundException': + ResourceNotFound, + 'com.amazon.coral.validate#ValidationException': ValidationException +} From eeebc47e5fe85720d56d9c53b83ac1c5e1298802 Mon Sep 17 00:00:00 2001 From: "Gavin M. Roy" Date: Wed, 2 Mar 2016 11:32:15 -0500 Subject: [PATCH 06/13] Add utils tests --- tests/utils_tests.py | 122 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 122 insertions(+) create mode 100644 tests/utils_tests.py diff --git a/tests/utils_tests.py b/tests/utils_tests.py new file mode 100644 index 0000000..d7edd0e --- /dev/null +++ b/tests/utils_tests.py @@ -0,0 +1,122 @@ +import datetime +import unittest +import uuid + +import arrow + +from sprockets.clients.dynamodb import utils + + +class UTC(datetime.tzinfo): + def utcoffset(self, dt): + return datetime.timedelta(0) + + def tzname(self, dt): + return 'UTC' + + def dst(self, dt): + return datetime.timedelta(0) + + +class MarshallTests(unittest.TestCase): + maxDiff = None + + def test_complex_document(self): + uuid_value = uuid.uuid4() + arrow_value = arrow.utcnow() + dt_value = datetime.datetime.utcnow().replace(tzinfo=UTC()) + value = { + 'key1': 'str', + 'key2': 10, + 'key3': { + 'sub-key1': 20, + 'sub-key2': True, + 'sub-key3': 'value' + }, + 'key4': None, + 'key5': ['one', 'two', 'three', 4, None, True], + 'key6': set(['a', 'b', 'c']), + 'key7': {1, 2, 3, 4}, + 'key8': arrow_value, + 'key9': uuid_value, + 'key10': b'\0x01\0x02\0x03', + 'key11': {b'\0x01\0x02\0x03', b'\0x04\0x05\0x06'}, + 'key12': dt_value + } + expectation = { + 'key1': {'S': 'str'}, + 'key2': {'N': '10'}, + 'key3': {'M': + { + 'sub-key1': {'N': '20'}, + 'sub-key2': {'BOOL': True}, + 'sub-key3': {'S': 'value'} + } + }, + 'key4': {'NULL': True}, + 'key5': {'L': [{'S': 'one'}, {'S': 'two'}, {'S': 'three'}, + {'N': '4'}, {'NULL': True}, {'BOOL': True}]}, + 'key6': {'SS': ['a', 'b', 'c']}, + 'key7': {'NS': ['1', '2', '3', '4']}, + 'key8': {'S': arrow_value.isoformat()}, + 'key9': {'S': str(uuid_value)}, + 'key10': {'B': b'\0x01\0x02\0x03'}, + 'key11': {'BS': [b'\0x01\0x02\0x03', b'\0x04\0x05\0x06']}, + 'key12': {'S': dt_value.isoformat()} + } + self.assertDictEqual(expectation, utils.marshall(value)) + + def test_value_error_raised_on_unsupported_type(self): + self.assertRaises(ValueError, utils.marshall, {'key': self}) + + def test_value_error_raised_on_mixed_set(self): + self.assertRaises(ValueError, utils.marshall, {'key': {1, 'two', 3}}) + + +class UnmarshallTests(unittest.TestCase): + maxDiff = None + + def test_complex_document(self): + uuid_value = uuid.uuid4() + dt_value = arrow.utcnow() + value = { + 'key1': {'S': 'str'}, + 'key2': {'N': '10'}, + 'key3': {'M': + { + 'sub-key1': {'N': '20'}, + 'sub-key2': {'BOOL': True}, + 'sub-key3': {'S': 'value'} + } + }, + 'key4': {'NULL': True}, + 'key5': {'L': [{'S': 'one'}, {'S': 'two'}, {'S': 'three'}, + {'N': '4'}, {'NULL': True}, {'BOOL': True}]}, + 'key6': {'SS': ['a', 'b', 'c']}, + 'key7': {'NS': ['1', '2', '3', '4']}, + 'key8': {'S': dt_value.isoformat()}, + 'key9': {'S': str(uuid_value)}, + 'key10': {'B': b'\0x01\0x02\0x03'}, + 'key11': {'BS': [b'\0x01\0x02\0x03', b'\0x04\0x05\0x06']} + } + expectation = { + 'key1': 'str', + 'key2': 10, + 'key3': { + 'sub-key1': 20, + 'sub-key2': True, + 'sub-key3': 'value' + }, + 'key4': None, + 'key5': ['one', 'two', 'three', 4, None, True], + 'key6': {'a', 'b', 'c'}, + 'key7': {1, 2, 3, 4}, + 'key8': dt_value.isoformat(), + 'key9': uuid_value, + 'key10': b'\0x01\0x02\0x03', + 'key11': {b'\0x01\0x02\0x03', b'\0x04\0x05\0x06'} + } + self.assertDictEqual(expectation, utils.unmarshall(value)) + + def test_value_error_raised_on_unsupported_type(self): + self.assertRaises(ValueError, utils.unmarshall, {'key': {'T': 1}}) From 358aa2d379f3691caa66eec6a6fc735687106df7 Mon Sep 17 00:00:00 2001 From: "Gavin M. Roy" Date: Wed, 2 Mar 2016 11:36:56 -0500 Subject: [PATCH 07/13] Setup travis to test changes and deploy --- .travis.yml | 21 ++++++++++++++++++++- 1 file changed, 20 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index e325c75..8e23f99 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,14 +1,33 @@ +sudo: reuired +services: + - docker language: python python: - 2.7 - 3.4 - 3.5 before_install: -- pip install nose coverage codecov +- docker pull tray/dynamodb-local +- docker run -d -p 7777:7777 tray/dynamodb-local -inMemory -port 7777 +- mkdir /home/travis/.aws +- printf "[default]\nregion=us-east-1\noutput=json\n" > /home/travis/.aws/config +- printf "[default]\naws_access_key_id = FAKE0000000000000000\naws_secret_access_key = FAKE000000000000000000000000000000000000\n" > /home/travis/.aws/credentials - pip install -r requires/testing.txt install: - pip install -e . +env: + DYNAMODB_ENDPOINT: http://localhost:7777 script: nosetests --with-coverage after_success: - codecov sudo: false +deploy: + distributions: sdist bdist_wheel + provider: pypi + user: sprockets + on: + python: 3.5 + tags: true + all_branches: true + password: + secure: "pCvF0ROHU/p+mDgZT40yoRdNUmpov5B1jUh7mJ6bAUlsMNEaugX/cL+cUGNLgIhrcwBF93B7kdfuhGjO/2uF+k8aPhPocewwJ9qPTTyNMLGjpIclWp56KH9KLNISGmeTPguw06bpV0xOUw40AvSfTw4nmf4jaZsx1Ai2DUuoji7m1OvXwLL5+zXclngmxF7zVvPTnKmPDbJWUsF3n4DEJml8GBr7NW92yIo0Zu1LG3AiNrZWBebWa58Uv/DKOHQXYgyK0j3EixzTPkptoQgAByA6OVPPh6UOE2GUXuV83vDKeciyr/AExLQnlIaONa2FS4utOFdu2zoLsUJy+jeCJxVZ5D+jfYXSx1LyeQKjOZikUKNhI3O3XH7IYwd2YqhlRAE6SvFGQB1nYn6mXklSwdyOEaQ0ufUY4aCH9PRvswOUDJKIJw4xsiEUF46enrGWHVCnW3l0fPbhPx1GbB/PfzcJS3WSEgOKHbZ2u7PHrIkElxAOrI6Vabmrr0g5GD1T2DqBls600lQ/+HkRQ9cXVjegiUach3xj3IKL/gZJUuqiwl2xMPdIfi33GsZp5OItSt1fNmBZo4gz5zBEXYShpgeqx0hP0XEfQWnDLNoaHNhzaW9d1PYs7JHIsiLRw9HcJNdRzm4u08442m42WyEP5i3XnpmylFu6U+2a1mR4VEg=" From 9e14ac5803acc1a45077d4519055a765eaff0cbf Mon Sep 17 00:00:00 2001 From: "Gavin M. Roy" Date: Wed, 2 Mar 2016 11:37:06 -0500 Subject: [PATCH 08/13] Add codecov to testing requirements --- requires/testing.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/requires/testing.txt b/requires/testing.txt index 776ca53..d38f130 100644 --- a/requires/testing.txt +++ b/requires/testing.txt @@ -2,3 +2,4 @@ nose>=1.3.7,<2 coverage>=3.7,<4 arrow>=0.7.0,<1 mock>=1.3.0,<2 +codecov>=1.6.3,<2 From 30a5ebc72d5f4abd693e2023ac84409f4bcda785 Mon Sep 17 00:00:00 2001 From: "Gavin M. Roy" Date: Wed, 2 Mar 2016 11:38:09 -0500 Subject: [PATCH 09/13] Merge in tornado-dynamodb code and tests --- sprockets/clients/dynamodb/connector.py | 1171 ++++++++++++++++++++++- tests/api_tests.py | 205 ++++ 2 files changed, 1346 insertions(+), 30 deletions(-) create mode 100644 tests/api_tests.py diff --git a/sprockets/clients/dynamodb/connector.py b/sprockets/clients/dynamodb/connector.py index c6fb848..564780e 100644 --- a/sprockets/clients/dynamodb/connector.py +++ b/sprockets/clients/dynamodb/connector.py @@ -2,10 +2,12 @@ import json import logging import os -from tornado import concurrent, ioloop -from tornado_aws import client +from tornado import concurrent, httpclient, ioloop +import tornado_aws +from tornado_aws import exceptions as aws_exceptions from . import utils +from . import exceptions LOGGER = logging.getLogger(__name__) @@ -50,7 +52,7 @@ class DynamoDB(object): @property def client(self): if self._client is None: - self._client = client.AsyncAWSClient('dynamodb', **self._args) + self._client = tornado_aws.AsyncAWSClient('dynamodb', **self._args) return self._client def execute(self, function, body): @@ -67,6 +69,23 @@ class DynamoDB(object): easier for you. It does this for the ``GetItem`` and ``Query`` functions currrently. + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ThroughputExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` + """ encoded = json.dumps(body).encode('utf-8') headers = { @@ -78,17 +97,38 @@ class DynamoDB(object): def handle_response(f): self.logger.debug('processing %s() = %r', function, f) try: - response = f.result() - result = json.loads(response.body.decode('utf-8')) - future.set_result(_unwrap_result(function, result)) + result = self._process_response(f) + except aws_exceptions.AWSError as aws_error: + future.set_exception(exceptions.DynamoDBException(aws_error)) + except httpclient.HTTPError as http_err: + if http_err.code == 599: + future.set_exception(exceptions.TimeoutException()) + else: + future.set_exception( + exceptions.RequestException(http_err.message)) except Exception as exception: future.set_exception(exception) + else: + future.set_result(_unwrap_result(function, result)) - self.logger.debug('calling %s', function) - aws_response = self.client.fetch('POST', '/', body=encoded, - headers=headers) - ioloop.IOLoop.current().add_future(aws_response, handle_response) - + try: + aws_response = self.client.fetch('POST', '/', body=encoded, + headers=headers) + except aws_exceptions.ConfigNotFound as error: + future.set_exception(exceptions.ConfigNotFound(str(error))) + except aws_exceptions.ConfigParserError as error: + future.set_exception(exceptions.ConfigParserError(str(error))) + except aws_exceptions.NoCredentialsError as error: + future.set_exception(exceptions.NoCredentialsError(str(error))) + except aws_exceptions.NoProfileError as error: + future.set_exception(exceptions.NoProfileError(str(error))) + except httpclient.HTTPError as err: + if err.code == 599: + future.set_exception(exceptions.TimeoutException()) + else: + future.set_exception(exceptions.RequestException(err.message)) + else: + ioloop.IOLoop.current().add_future(aws_response, handle_response) return future def create_table(self, table_definition): @@ -102,28 +142,184 @@ class DynamoDB(object): .. _CreateTable: http://docs.aws.amazon.com/amazondynamodb/ latest/APIReference/API_CreateTable.html - """ - return self.execute('CreateTable', table_definition) + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` - def describe_table(self, table_name): """ - Invoke the `DescribeTable`_ function. + future = concurrent.TracebackFuture() - :param str table_name: name of the table to describe. + def handle_response(response): + exception = response.exception() + if exception: + future.set_exception(exception) + else: + future.set_result(response.result()['TableDescription']) + + aws_response = self.execute('CreateTable', table_definition) + ioloop.IOLoop.current().add_future(aws_response, handle_response) + return future + + def update_table(self, table_definition): + """ + Modifies the provisioned throughput settings, global secondary + indexes, or DynamoDB Streams settings for a given table. + + You can only perform one of the following operations at once: + + - Modify the provisioned throughput settings of the table. + - Enable or disable Streams on the table. + - Remove a global secondary index from the table. + - Create a new global secondary index on the table. Once the index + begins backfilling, you can use *UpdateTable* to perform other + operations. + + *UpdateTable* is an asynchronous operation; while it is executing, the + table status changes from ``ACTIVE`` to ``UPDATING``. While it is + ``UPDATING``, you cannot issue another *UpdateTable* request. When the + table returns to the ``ACTIVE`` state, the *UpdateTable* operation is + complete. + + :param dict table_definition: description of the table to + update according to `UpdateTable`_ :rtype: tornado.concurrent.Future - .. _DescribeTable: http://docs.aws.amazon.com/amazondynamodb/ - latest/APIReference/API_DescribeTable.html + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` + + .. _UpdateTable: http://docs.aws.amazon.com/amazondynamodb/ + latest/APIReference/API_UpdateTable.html """ - return self.execute('DescribeTable', {'TableName': table_name}) + raise NotImplementedError def delete_table(self, table_name): """ - Invoke the `DeleteTable`_ function. + Invoke the `DeleteTable`_ function. The DeleteTable operation deletes a + table and all of its items. After a DeleteTable request, the specified + table is in the DELETING state until DynamoDB completes the deletion. + If the table is in the ACTIVE state, you can delete it. If a table is + in CREATING or UPDATING states, then a + :py:exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + exception is raised. If the specified table does not exist, a + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceNotFound` + exception is raised. If table is already in the DELETING state, no + error is returned. :param str table_name: name of the table to describe. :rtype: tornado.concurrent.Future + :returns: Response Format: + + .. code:: json + + { + "AttributeDefinitions": [{ + "AttributeName": "string", + "AttributeType": "string" + }], + "CreationDateTime": number, + "GlobalSecondaryIndexes": [{ + "Backfilling": boolean, + "IndexArn": "string", + "IndexName": "string", + "IndexSizeBytes": number, + "IndexStatus": "string", + "ItemCount": number, + "KeySchema": [{ + "AttributeName": "string", + "KeyType": "string" + }], + "Projection": { + "NonKeyAttributes": [ + "string" + ], + "ProjectionType": "string" + }, + "ProvisionedThroughput": { + "LastDecreaseDateTime": number, + "LastIncreaseDateTime": number, + "NumberOfDecreasesToday": number, + "ReadCapacityUnits": number, + "WriteCapacityUnits": number + } + }], + "ItemCount": number, + "KeySchema": [{ + "AttributeName": "string", + "KeyType": "string" + }], + "LatestStreamArn": "string", + "LatestStreamLabel": "string", + "LocalSecondaryIndexes": [{ + "IndexArn": "string", + "IndexName": "string", + "IndexSizeBytes": number, + "ItemCount": number, + "KeySchema": [{ + "AttributeName": "string", + "KeyType": "string" + }], + "Projection": { + "NonKeyAttributes": [ + "string" + ], + "ProjectionType": "string" + } + }], + "ProvisionedThroughput": { + "LastDecreaseDateTime": number, + "LastIncreaseDateTime": number, + "NumberOfDecreasesToday": number, + "ReadCapacityUnits": number, + "WriteCapacityUnits": number + }, + "StreamSpecification": { + "StreamEnabled": boolean, + "StreamViewType": "string" + }, + "TableArn": "string", + "TableName": "string", + "TableSizeBytes": number, + "TableStatus": "string" + } + + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` .. _DeleteTable: http://docs.aws.amazon.com/amazondynamodb/ latest/APIReference/API_DeleteTable.html @@ -131,37 +327,952 @@ class DynamoDB(object): """ return self.execute('DeleteTable', {'TableName': table_name}) - def put_item(self, table_name, item): + def describe_table(self, table_name): """ - Invoke the `PutItem`_ function. + Invoke the `DescribeTable`_ function. - :param str table_name: table to insert into - :param dict item: item to insert. This will be marshalled - for you so a native :class:`dict` of native items works. + :param str table_name: name of the table to describe. :rtype: tornado.concurrent.Future + :returns: Response Format: + + .. code:: json + + { + "AttributeDefinitions": [{ + "AttributeName": "string", + "AttributeType": "string" + }], + "CreationDateTime": number, + "GlobalSecondaryIndexes": [{ + "Backfilling": boolean, + "IndexArn": "string", + "IndexName": "string", + "IndexSizeBytes": number, + "IndexStatus": "string", + "ItemCount": number, + "KeySchema": [{ + "AttributeName": "string", + "KeyType": "string" + }], + "Projection": { + "NonKeyAttributes": [ + "string" + ], + "ProjectionType": "string" + }, + "ProvisionedThroughput": { + "LastDecreaseDateTime": number, + "LastIncreaseDateTime": number, + "NumberOfDecreasesToday": number, + "ReadCapacityUnits": number, + "WriteCapacityUnits": number + } + }], + "ItemCount": number, + "KeySchema": [{ + "AttributeName": "string", + "KeyType": "string" + }], + "LatestStreamArn": "string", + "LatestStreamLabel": "string", + "LocalSecondaryIndexes": [{ + "IndexArn": "string", + "IndexName": "string", + "IndexSizeBytes": number, + "ItemCount": number, + "KeySchema": [{ + "AttributeName": "string", + "KeyType": "string" + }], + "Projection": { + "NonKeyAttributes": [ + "string" + ], + "ProjectionType": "string" + } + }], + "ProvisionedThroughput": { + "LastDecreaseDateTime": number, + "LastIncreaseDateTime": number, + "NumberOfDecreasesToday": number, + "ReadCapacityUnits": number, + "WriteCapacityUnits": number + }, + "StreamSpecification": { + "StreamEnabled": boolean, + "StreamViewType": "string" + }, + "TableArn": "string", + "TableName": "string", + "TableSizeBytes": number, + "TableStatus": "string" + } + + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` + + .. _DescribeTable: http://docs.aws.amazon.com/amazondynamodb/ + latest/APIReference/API_DescribeTable.html + + """ + future = concurrent.TracebackFuture() + + def handle_response(response): + exception = response.exception() + if exception: + future.set_exception(exception) + else: + future.set_result(response.result()['Table']) + + aws_response = self.execute('DescribeTable', {'TableName': table_name}) + ioloop.IOLoop.current().add_future(aws_response, handle_response) + return future + + def list_tables(self, exclusive_start_table_name=None, limit=None): + """ + Invoke the `ListTables`_ function. + + Returns an array of table names associated with the current account + and endpoint. The output from *ListTables* is paginated, with each page + returning a maximum of ``100`` table names. + + :param str exclusive_start_table_name: The first table name that this + operation will evaluate. Use the value that was returned for + ``LastEvaluatedTableName`` in a previous operation, so that you can + obtain the next page of results. + :param int limit: A maximum number of table names to return. If this + parameter is not specified, the limit is ``100``. + :returns: Response Format: + + .. code:: json + + { + "LastEvaluatedTableName": "string", + "TableNames": [ + "string" + ] + } + + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` + + .. _ListTables: http://docs.aws.amazon.com/amazondynamodb/ + latest/APIReference/API_ListTables.html + + """ + payload = {} + if exclusive_start_table_name: + payload['ExclusiveStartTableName'] = exclusive_start_table_name + if limit: + payload['Limit'] = limit + return self.execute('ListTables', payload) + + def put_item(self, table_name, item, return_values=False, + condition_expression=None, + expression_attribute_names=None, + expression_attribute_values=None, + return_consumed_capacity=None, + return_item_collection_metrics=False): + """Invoke the `PutItem`_ function, creating a new item, or replaces an + old item with a new item. If an item that has the same primary key as + the new item already exists in the specified table, the new item + completely replaces the existing item. You can perform a conditional + put operation (add a new item if one with the specified primary key + doesn't exist), or replace an existing item if it has certain attribute + values. + + In addition to putting an item, you can also return the item's + attribute values in the same operation, using the ``return_values`` + parameter. + + When you add an item, the primary key attribute(s) are the only + required attributes. Attribute values cannot be null. String and Binary + type attributes must have lengths greater than zero. Set type + attributes cannot be empty. Requests with empty values will be rejected + with a + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException`. + + You can request that PutItem return either a copy of the original item + (before the update) or a copy of the updated item (after the update). + For more information, see the ReturnValues description below. + + .. note:: To prevent a new item from replacing an existing item, use a + conditional expression that contains the attribute_not_exists + function with the name of the attribute being used as the partition + key for the table. Since every record must contain that attribute, + the attribute_not_exists function will only succeed if no matching + item exists. + + For more information about using this API, see Working with Items in + the Amazon DynamoDB Developer Guide. + + :param str table_name: The table to put the item to + :param dict item: A map of attribute name/value pairs, one for each + attribute. Only the primary key attributes are required; you can + optionally provide other attribute name-value pairs for the item. + + You must provide all of the attributes for the primary key. For + example, with a simple primary key, you only need to provide a + value for the partition key. For a composite primary key, you must + provide both values for both the partition key and the sort key. + + If you specify any attributes that are part of an index key, then + the data types for those attributes must match those of the schema + in the table's attribute definition. + :param bool return_values: Set to ``True`` if you want to get the item + attributes as they appeared before they were updated with the + *PutItem* request. + :param str condition_expression: A condition that must be satisfied in + order for a conditional *PutItem* operation to succeed. See the + `AWS documentation for ConditionExpression `_ for more information. + :param dict expression_attribute_names: One or more substitution tokens + for attribute names in an expression. See the `AWS documentation + for ExpressionAttributeNames `_ for more information. + :param dict expression_attribute_values: One or more values that can be + substituted in an expression. See the `AWS documentation + for ExpressionAttributeValues `_ for more information. + :param str return_consumed_capacity: Determines the level of detail + about provisioned throughput consumption that is returned in the + response. Should be ``None`` or one of ``INDEXES`` or ``TOTAL`` + :param bool return_item_collection_metrics: Determines whether item + collection metrics are returned. + :rtype: tornado.concurrent.Future + + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` .. _PutItem: http://docs.aws.amazon.com/amazondynamodb/ latest/APIReference/API_PutItem.html """ - return self.execute('PutItem', {'TableName': table_name, - 'Item': utils.marshall(item)}) + payload = {'TableName': table_name, 'Item': utils.marshall(item)} + if condition_expression: + payload['ConditionExpression'] = condition_expression + if expression_attribute_names: + payload['ExpressionAttributeNames'] = expression_attribute_names + if expression_attribute_values: + payload['ExpressionAttributeValues'] = expression_attribute_values + if return_consumed_capacity: + payload['ReturnConsumedCapacity'] = return_consumed_capacity + if return_item_collection_metrics: + payload['ReturnItemCollectionMetrics'] = 'SIZE' + if return_values: + payload['ReturnValues'] = 'ALL_OLD' + return self.execute('PutItem', payload) - def get_item(self, table_name, key_dict): + def get_item(self, table_name, key_dict, consistent_read=False, + expression_attribute_names=None, + projection_expression=None, return_consumed_capacity=None): """ Invoke the `GetItem`_ function. :param str table_name: table to retrieve the item from :param dict key_dict: key to use for retrieval. This will be marshalled for you so a native :class:`dict` works. + :param bool consistent_read: Determines the read consistency model: If + set to :py:data`True`, then the operation uses strongly consistent + reads; otherwise, the operation uses eventually consistent reads. + :param dict expression_attribute_names: One or more substitution tokens + for attribute names in an expression. + :param str projection_expression: A string that identifies one or more + attributes to retrieve from the table. These attributes can include + scalars, sets, or elements of a JSON document. The attributes in + the expression must be separated by commas. If no attribute names + are specified, then all attributes will be returned. If any of the + requested attributes are not found, they will not appear in the + result. + :param str return_consumed_capacity: Determines the level of detail + about provisioned throughput consumption that is returned in the + response: + + - INDEXES: The response includes the aggregate consumed + capacity for the operation, together with consumed capacity for + each table and secondary index that was accessed. Note that + some operations, such as *GetItem* and *BatchGetItem*, do not + access any indexes at all. In these cases, specifying INDEXES + will only return consumed capacity information for table(s). + - TOTAL: The response includes only the aggregate consumed + capacity for the operation. + - NONE: No consumed capacity details are included in the + response. :rtype: tornado.concurrent.Future + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ThroughputExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` + .. _GetItem: http://docs.aws.amazon.com/amazondynamodb/ latest/APIReference/API_GetItem.html """ - return self.execute('GetItem', {'TableName': table_name, - 'Key': utils.marshall(key_dict)}) + payload = {'TableName': table_name, + 'Key': utils.marshall(key_dict), + 'ConsistentRead': consistent_read} + if expression_attribute_names: + payload['ExpressionAttributeNames'] = expression_attribute_names + if projection_expression: + payload['ProjectionExpression'] = projection_expression + if return_consumed_capacity: + payload['ReturnConsumedCapacity'] = return_consumed_capacity + return self.execute('GetItem', payload) + + def update_item(self, table_name, key, return_values=False, + condition_expression=None, update_expression=None, + expression_attribute_names=None, + expression_attribute_values=None, + return_consumed_capacity=None, + return_item_collection_metrics=False): + """Invoke the `UpdateItem`_ function. + + Edits an existing item's attributes, or adds a new item to the table + if it does not already exist. You can put, delete, or add attribute + values. You can also perform a conditional update on an existing item + (insert a new attribute name-value pair if it doesn't exist, or replace + an existing name-value pair if it has certain expected attribute + values). + + :param str table_name: The name of the table that contains the item to + update + :param dict key: A dictionary of key/value pairs that are used to + define the primary key values for the item. For the primary key, + you must provide all of the attributes. For example, with a simple + primary key, you only need to provide a value for the partition + key. For a composite primary key, you must provide values for both + the partition key and the sort key. + :param bool return_values: Set to ``True`` if you want to get the item + attributes as they appeared before they were updated with the + *UpdateItem* request. + :param str condition_expression: A condition that must be satisfied in + order for a conditional *UpdateItem* operation to succeed. One of: + ``attribute_exists``, ``attribute_not_exists``, ``attribute_type``, + ``contains``, ``begins_with``, ``size``, ``=``, ``<>``, ``<``, + ``>``, ``<=``, ``>=``, ``BETWEEN``, ``IN``, ``AND``, ``OR``, or + ``NOT``. + :param str update_expression: An expression that defines one or more + attributes to be updated, the action to be performed on them, and + new value(s) for them. + :param dict expression_attribute_names: One or more substitution tokens + for attribute names in an expression. + :param dict expression_attribute_values: One or more values that can be + substituted in an expression. + :param str return_consumed_capacity: Determines the level of detail + about provisioned throughput consumption that is returned in the + response. Should be ``None`` or one of ``INDEXES`` or ``TOTAL`` + :param bool return_item_collection_metrics: Determines whether item + collection metrics are returned. + :rtype: dict + + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ThroughputExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` + + .. _GetItem: http://docs.aws.amazon.com/amazondynamodb/ + latest/APIReference/API_UpdateItem.html + + """ + raise NotImplementedError + + def delete_item(self, table_name, key, condition_expression=None, + expression_attribute_names=None, + expression_attribute_values=None, + return_consumed_capacity=None, + return_item_collection_metrics=False, + return_values=False): + """Invoke the `DeleteItem`_ function that deletes a single item in a + table by primary key. You can perform a conditional delete operation + that deletes the item if it exists, or if it has an expected attribute + value. + + In addition to deleting an item, you can also return the item's + attribute values in the same operation, using the ``return_values`` + parameter. + + Unless you specify conditions, the *DeleteItem* is an idempotent + operation; running it multiple times on the same item or attribute does + not result in an error response. + + Conditional deletes are useful for deleting items only if specific + conditions are met. If those conditions are met, DynamoDB performs the + delete. Otherwise, the item is not deleted. + + :param str table_name: The name of the table from which to delete the + item. + :param dict key: A map of attribute names to ``AttributeValue`` + objects, representing the primary key of the item to delete. For + the primary key, you must provide all of the attributes. For + example, with a simple primary key, you only need to provide a + value for the partition key. For a composite primary key, you must + provide values for both the partition key and the sort key. + :param str condition_expression: A condition that must be satisfied in + order for a conditional *DeleteItem* to succeed. See the `AWS + documentation for ConditionExpression `_ for more information. + :param dict expression_attribute_names: One or more substitution tokens + for attribute names in an expression. See the `AWS documentation + for ExpressionAttributeNames `_ for more information. + :param dict expression_attribute_values: One or more values that can be + substituted in an expression. See the `AWS documentation + for ExpressionAttributeValues `_ for more information. + :param str return_consumed_capacity: Determines the level of detail + about provisioned throughput consumption that is returned in the + response. See the `AWS documentation + for ReturnConsumedCapacity `_ for more information. + :param bool return_item_collection_metrics: Determines whether item + collection metrics are returned. + :param bool return_values: Return the item attributes as they appeared + before they were deleted. + :returns: Response format: + + .. code:: json + + { + "Attributes": { + "string": { + "B": blob, + "BOOL": boolean, + "BS": [ + blob + ], + "L": [ + AttributeValue + ], + "M": { + "string": AttributeValue + }, + "N": "string", + "NS": [ + "string" + ], + "NULL": boolean, + "S": "string", + "SS": [ + "string" + ] + } + }, + "ConsumedCapacity": { + "CapacityUnits": number, + "GlobalSecondaryIndexes": { + "string": { + "CapacityUnits": number + } + }, + "LocalSecondaryIndexes": { + "string": { + "CapacityUnits": number + } + }, + "Table": { + "CapacityUnits": number + }, + "TableName": "string" + }, + "ItemCollectionMetrics": { + "ItemCollectionKey": { + "string": { + "B": blob, + "BOOL": boolean, + "BS": [ + blob + ], + "L": [ + AttributeValue + ], + "M": { + "string": AttributeValue + }, + "N": "string", + "NS": [ + "string" + ], + "NULL": boolean, + "S": "string", + "SS": [ + "string" + ] + } + }, + "SizeEstimateRangeGB": [ + number + ] + } + } + + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ThroughputExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` + + .. _GetItem: http://docs.aws.amazon.com/amazondynamodb/ + latest/APIReference/API_DeleteItem.html + + """ + raise NotImplementedError + + def batch_get_item(self): + """Invoke the `BatchGetItem`_ function. + + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ThroughputExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` + + .. _GetItem: http://docs.aws.amazon.com/amazondynamodb/ + latest/APIReference/API_BatchGetItem.html + + """ + raise NotImplementedError + + def batch_write_item(self): + """Invoke the `BatchWriteItem`_ function. + + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ThroughputExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` + + .. _GetItem: http://docs.aws.amazon.com/amazondynamodb/ + latest/APIReference/API_BatchWriteItem.html + + """ + raise NotImplementedError + + def query(self, table_name, consistent_read=False, + exclusive_start_key=None, expression_attribute_names=None, + expression_attribute_values=None, filter_expression=None, + projection_expression=None, index_name=None,limit=None, + return_consumed_capacity=None, scan_index_forward=True, + select=None): + """A `Query`_ operation uses the primary key of a table or a secondary + index to directly access items from that table or index. + + You can use the ``scan_index_forward`` parameter to get results in + forward or reverse order, by sort key. + + Queries that do not return results consume the minimum number of read + capacity units for that type of read operation. + + If the total number of items meeting the query criteria exceeds the + result set size limit of 1 MB, the query stops and results are returned + to the user with the ``LastEvaluatedKey`` element to continue the query + in a subsequent operation. Unlike a *Scan* operation, a Query operation + never returns both an empty result set and a ``LastEvaluatedKey`` + value. ``LastEvaluatedKey`` is only provided if the results exceed + 1 MB, or if you have used the ``limit`` parameter. + + You can query a table, a local secondary index, or a global secondary + index. For a query on a table or on a local secondary index, you can + set the ``consistent_read`` parameter to true and obtain a strongly + consistent result. Global secondary indexes support eventually + consistent reads only, so do not specify ``consistent_read`` when + querying a global secondary index. + + :param str table_name: The name of the table containing the requested + items. + :param bool consistent_read: Determines the read consistency model: If + set to ``True``, then the operation uses strongly consistent reads; + otherwise, the operation uses eventually consistent reads. Strongly + consistent reads are not supported on global secondary indexes. If + you query a global secondary index with ``consistent_read`` set to + ``True``, you will receive a + :exc:`~tornado_dynamodb.exceptions.ValidationException`. + :param str|bytes|int exclusive_start_key: The primary key of the first + item that this operation will evaluate. Use the value that was + returned for ``LastEvaluatedKey`` in the previous operation. In a + parallel scan, a *Scan* request that includes + ``exclusive_start_key`` must specify the same segment whose + previous *Scan* returned the corresponding value of + ``LastEvaluatedKey``. + :param dict expression_attribute_names: One or more substitution tokens + for attribute names in an expression. + :param dict expression_attribute_values: One or more values that can be + substituted in an expression. + :param str filter_expression: A string that contains conditions that + DynamoDB applies after the *Query* operation, but before the data + is returned to you. Items that do not satisfy the criteria are not + returned. Note that a filter expression is applied after the items + have already been read; the process of filtering does not consume + any additional read capacity units. For more information, see + `Filter Expressions `_ in the + Amazon DynamoDB Developer Guide. + :param str projection_expression: + :param str index_name: The name of a secondary index to query. This + index can be any local secondary index or global secondary index. + Note that if you use this parameter, you must also provide + ``table_name``. + :param int limit: The maximum number of items to evaluate (not + necessarily the number of matching items). If DynamoDB processes + the number of items up to the limit while processing the results, + it stops the operation and returns the matching values up to that + point, and a key in ``LastEvaluatedKey`` to apply in a subsequent + operation, so that you can pick up where you left off. Also, if the + processed data set size exceeds 1 MB before DynamoDB reaches this + limit, it stops the operation and returns the matching values up to + the limit, and a key in ``LastEvaluatedKey`` to apply in a + subsequent operation to continue the operation. For more + information, see `Query and Scan `_ in the Amazon + DynamoDB Developer Guide. + :param str return_consumed_capacity: Determines the level of detail + about provisioned throughput consumption that is returned in the + response: + + - ``INDEXES``: The response includes the aggregate consumed + capacity for the operation, together with consumed capacity for + each table and secondary index that was accessed. Note that + some operations, such as *GetItem* and *BatchGetItem*, do not + access any indexes at all. In these cases, specifying + ``INDEXES`` will only return consumed capacity information for + table(s). + - ``TOTAL``: The response includes only the aggregate consumed + capacity for the operation. + - ``NONE``: No consumed capacity details are included in the + response. + :param bool scan_index_forward: Specifies the order for index + traversal: If ``True`` (default), the traversal is performed in + ascending order; if ``False``, the traversal is performed in + descending order. Items with the same partition key value are + stored in sorted order by sort key. If the sort key data type is + *Number*, the results are stored in numeric order. For type + *String*, the results are stored in order of ASCII character code + values. For type *Binary*, DynamoDB treats each byte of the binary + data as unsigned. If set to ``True``, DynamoDB returns the results + in the order in which they are stored (by sort key value). This is + the default behavior. If set to ``False``, DynamoDB reads the + results in reverse order by sort key value, and then returns the + results to the client. + :param str select: The attributes to be returned in the result. You can + retrieve all item attributes, specific item attributes, the count + of matching items, or in the case of an index, some or all of the + attributes projected into the index. Possible values are: + + - ``ALL_ATTRIBUTES``: Returns all of the item attributes from the + specified table or index. If you query a local secondary index, + then for each matching item in the index DynamoDB will fetch + the entire item from the parent table. If the index is + configured to project all item attributes, then all of the data + can be obtained from the local secondary index, and no fetching + is required. + - ``ALL_PROJECTED_ATTRIBUTES``: Allowed only when querying an + index. Retrieves all attributes that have been projected into + the index. If the index is configured to project all + attributes, this return value is equivalent to specifying + ``ALL_ATTRIBUTES``. + - ``COUNT``: Returns the number of matching items, rather than + the matching items themselves. + :rtype: dict + + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ThroughputExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` + + .. _Query: http://docs.aws.amazon.com/amazondynamodb/ + latest/APIReference/API_Query.html + + """ + raise NotImplementedError + + def scan(self, table_name, consistent_read=False, exclusive_start_key=None, + expression_attribute_names=None, expression_attribute_values=None, + filter_expression=None, projection_expression=None, + index_name=None, limit=None, return_consumed_capacity=None, + segment=None, total_segments=None): + """The `Scan`_ operation returns one or more items and item attributes + by accessing every item in a table or a secondary index. + + If the total number of scanned items exceeds the maximum data set size + limit of 1 MB, the scan stops and results are returned to the user as a + ``LastEvaluatedKey`` value to continue the scan in a subsequent + operation. The results also include the number of items exceeding the + limit. A scan can result in no table data meeting the filter criteria. + + By default, Scan operations proceed sequentially; however, for faster + performance on a large table or secondary index, applications can + request a parallel *Scan* operation by providing the ``segment`` and + ``total_segments`` parameters. For more information, see + `Parallel Scan `_ in the + Amazon DynamoDB Developer Guide. + + By default, *Scan* uses eventually consistent reads when accessing the + data in a table; therefore, the result set might not include the + changes to data in the table immediately before the operation began. If + you need a consistent copy of the data, as of the time that the *Scan* + begins, you can set the ``consistent_read`` parameter to ``True``. + + :param str table_name: The name of the table containing the requested + items; or, if you provide IndexName, the name of the table to which + that index belongs. + :param bool consistent_read: A Boolean value that determines the read + consistency model during the scan: + + - If set to ``False``, then the data returned from *Scan* might not + contain the results from other recently completed write + operations (*PutItem*, *UpdateItem*, or *DeleteItem*). + - If set to ``True``, then all of the write operations that + completed before the Scan began are guaranteed to be contained in + the *Scan* response. + + The default setting is ``False``. + + This parameter is not supported on global secondary indexes. If you + scan a global secondary index and set ``consistent_read`` to + ``true``, you will receive a + :exc:`~tornado_dynamodb.exceptions.ValidationException`. + :param str|bytes|int exclusive_start_key: The primary key of the first + item that this operation will evaluate. Use the value that was + returned for ``LastEvaluatedKey`` in the previous operation. + + In a parallel scan, a *Scan* request that includes + ``exclusive_start_key`` must specify the same segment whose + previous *Scan* returned the corresponding value of + ``LastEvaluatedKey``. + :param dict expression_attribute_names: One or more substitution tokens + for attribute names in an expression. + :param dict expression_attribute_values: One or more values that can be + substituted in an expression. + :param str filter_expression: A string that contains conditions that + DynamoDB applies after the Scan operation, but before the data is + returned to you. Items that do not satisfy the expression criteria + are not returned. + + .. note:: A filter expression is applied after the items have + already been read; the process of filtering does not consume + any additional read capacity units. + + For more information, see `Filter Expressions `_ in the Amazon DynamoDB Developer Guide. + :param str projection_expression: A string that identifies one or more + attributes to retrieve from the specified table or index. These + attributes can include scalars, sets, or elements of a JSON + document. The attributes in the expression must be separated by + commas. + + If no attribute names are specified, then all attributes will be + returned. If any of the requested attributes are not found, they + will not appear in the result. + + For more information, see `Accessing Item Attributes `_ in the Amazon DynamoDB Developer + Guide. + :param str index_name: The name of a secondary index to scan. This + index can be any local secondary index or global secondary index. + Note that if you use this parameter, you must also provide + ``table_name``. + :param int limit: The maximum number of items to evaluate (not + necessarily the number of matching items). If DynamoDB processes + the number of items up to the limit while processing the results, + it stops the operation and returns the matching values up to that + point, and a key in ``LastEvaluatedKey`` to apply in a subsequent + operation, so that you can pick up where you left off. Also, if the + processed data set size exceeds 1 MB before DynamoDB reaches this + limit, it stops the operation and returns the matching values up to + the limit, and a key in ``LastEvaluatedKey`` to apply in a + subsequent operation to continue the operation. For more + information, see `Query and Scan `_ in the Amazon + DynamoDB Developer Guide. + :param str return_consumed_capacity: Determines the level of detail + about provisioned throughput consumption that is returned in the + response. Should be ``None`` or one of ``INDEXES`` or ``TOTAL`` + :param int segment: For a parallel *Scan* request, ``segment`` + identifies an individual segment to be scanned by an application + worker. + + Segment IDs are zero-based, so the first segment is always ``0``. + For example, if you want to use four application threads to scan a + table or an index, then the first thread specifies a Segment value + of ``0``, the second thread specifies ``1``, and so on. + + The value of ``LastEvaluatedKey`` returned from a parallel *Scan* + request must be used as ``ExclusiveStartKey`` with the same segment + ID in a subsequent *Scan* operation. + + The value for ``segment`` must be greater than or equal to ``0``, + and less than the value provided for ``total_segments``. + + If you provide ``segment``, you must also provide + ``total_segments``. + :param int total_segments: For a parallel *Scan* request, + ``total_segments`` represents the total number of segments into + which the *Scan* operation will be divided. The value of + ``total_segments`` corresponds to the number of application workers + that will perform the parallel scan. For example, if you want to + use four application threads to scan a table or an index, specify a + ``total_segments`` value of 4. + + The value for ``total_segments`` must be greater than or equal to + ``1``, and less than or equal to ``1000000``. If you specify a + ``total_segments`` value of ``1``, the *Scan* operation will be + sequential rather than parallel. + + If you specify ``total_segments``, you must also specify + ``segments``. + :rtype: dict + + :raises: :exc:`~sprockets.clients.dynamodb.exceptions.DynamoDBException` + :exc:`~sprockets.clients.dynamodb.exceptions.ConfigNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.NoCredentialsError` + :exc:`~sprockets.clients.dynamodb.exceptions.NoProfileError` + :exc:`~sprockets.clients.dynamodb.exceptions.TimeoutException` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestException` + :exc:`~sprockets.clients.dynamodb.exceptions.InternalFailure` + :exc:`~sprockets.clients.dynamodb.exceptions.LimitExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.MissingParameter` + :exc:`~sprockets.clients.dynamodb.exceptions.OptInRequired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceInUse` + :exc:`~sprockets.clients.dynamodb.exceptions.RequestExpired` + :exc:`~sprockets.clients.dynamodb.exceptions.ResourceNotFound` + :exc:`~sprockets.clients.dynamodb.exceptions.ServiceUnavailable` + :exc:`~sprockets.clients.dynamodb.exceptions.ThroughputExceeded` + :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` + + .. _Scan: http://docs.aws.amazon.com/amazondynamodb/ + latest/APIReference/API_Scan.html + + """ + raise NotImplementedError + + @staticmethod + def _process_response(response): + error = response.exception() + if error: + if isinstance(error, aws_exceptions.AWSError): + if error.args[1]['type'] in exceptions.MAP: + raise exceptions.MAP[error.args[1]['type']]( + error.args[1]['message']) + raise error + http_response = response.result() + if not http_response or not http_response.body: + raise exceptions.DynamoDBException('empty response') + return json.loads(http_response.body.decode('utf-8')) def _unwrap_result(function, result): diff --git a/tests/api_tests.py b/tests/api_tests.py new file mode 100644 index 0000000..15c1116 --- /dev/null +++ b/tests/api_tests.py @@ -0,0 +1,205 @@ +import datetime +import os +import uuid + +import mock + +from tornado import concurrent +from tornado import httpclient +from tornado import testing +from tornado_aws import exceptions as aws_exceptions + +from sprockets.clients import dynamodb +from sprockets.clients.dynamodb import exceptions + + +class AsyncTestCase(testing.AsyncTestCase): + + def setUp(self): + super(AsyncTestCase, self).setUp() + self.client = self.get_client() + + @property + def endpoint(self): + return os.getenv('DYNAMODB_ENDPOINT') + + @staticmethod + def generic_table_definition(): + return { + 'TableName': str(uuid.uuid4()), + 'AttributeDefinitions': [{'AttributeName': 'id', + 'AttributeType': 'S'}], + 'KeySchema': [{'AttributeName': 'id', 'KeyType': 'HASH'}], + 'ProvisionedThroughput': { + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + } + } + + def get_client(self): + return dynamodb.DynamoDB(endpoint=self.endpoint) + + +class AWSClientTests(AsyncTestCase): + + @testing.gen_test + def test_raises_config_not_found_exception(self): + with mock.patch('tornado_aws.client.AsyncAWSClient.fetch') as fetch: + fetch.side_effect = aws_exceptions.ConfigNotFound(path='/test') + with self.assertRaises(exceptions.ConfigNotFound): + yield self.client.create_table(self.generic_table_definition()) + + @testing.gen_test + def test_raises_config_parser_error(self): + with mock.patch('tornado_aws.client.AsyncAWSClient.fetch') as fetch: + fetch.side_effect = aws_exceptions.ConfigParserError(path='/test') + with self.assertRaises(exceptions.ConfigParserError): + yield self.client.create_table(self.generic_table_definition()) + + @testing.gen_test + def test_raises_no_credentials_error(self): + with mock.patch('tornado_aws.client.AsyncAWSClient.fetch') as fetch: + fetch.side_effect = aws_exceptions.NoCredentialsError() + with self.assertRaises(exceptions.NoCredentialsError): + yield self.client.create_table(self.generic_table_definition()) + + @testing.gen_test + def test_raises_no_profile_error(self): + with mock.patch('tornado_aws.client.AsyncAWSClient.fetch') as fetch: + fetch.side_effect = aws_exceptions.NoProfileError(profile='test-1', + path='/test') + with self.assertRaises(exceptions.NoProfileError): + yield self.client.create_table(self.generic_table_definition()) + + @testing.gen_test + def test_raises_request_exception(self): + with mock.patch('tornado_aws.client.AsyncAWSClient.fetch') as fetch: + fetch.side_effect = httpclient.HTTPError(500, 'uh-oh') + with self.assertRaises(exceptions.RequestException): + yield self.client.create_table(self.generic_table_definition()) + + @testing.gen_test + def test_raises_timeout_exception(self): + with mock.patch('tornado_aws.client.AsyncAWSClient.fetch') as fetch: + fetch.side_effect = httpclient.HTTPError(599) + with self.assertRaises(exceptions.TimeoutException): + yield self.client.create_table(self.generic_table_definition()) + + @testing.gen_test + def test_fetch_future_exception(self): + with mock.patch('tornado_aws.client.AsyncAWSClient.fetch') as fetch: + future = concurrent.Future() + fetch.return_value = future + future.set_exception(exceptions.DynamoDBException()) + with self.assertRaises(exceptions.DynamoDBException): + yield self.client.create_table(self.generic_table_definition()) + + @testing.gen_test + def test_empty_fetch_response_raises_dynamodb_exception(self): + with mock.patch('tornado_aws.client.AsyncAWSClient.fetch') as fetch: + future = concurrent.Future() + fetch.return_value = future + future.set_result(None) + with self.assertRaises(exceptions.DynamoDBException): + yield self.client.create_table(self.generic_table_definition()) + + +class CreateTableTests(AsyncTestCase): + + @testing.gen_test + def test_simple_table(self): + definition = self.generic_table_definition() + response = yield self.client.create_table(definition) + self.assertEqual(response['TableName'], definition['TableName']) + self.assertIn(response['TableStatus'], + [dynamodb.TABLE_ACTIVE, + dynamodb.TABLE_CREATING]) + + @testing.gen_test + def test_invalid_request(self): + definition = { + 'TableName': str(uuid.uuid4()), + 'AttributeDefinitions': [{'AttributeName': 'id'}], + 'KeySchema': [], + 'ProvisionedThroughput': { + 'ReadCapacityUnits': 5, + 'WriteCapacityUnits': 5 + } + } + with self.assertRaises(exceptions.ValidationException): + yield self.client.create_table(definition) + + +class DeleteTableTests(AsyncTestCase): + + @testing.gen_test + def test_delete_table(self): + definition = self.generic_table_definition() + response = yield self.client.create_table(definition) + self.assertEqual(response['TableName'], definition['TableName']) + yield self.client.delete_table(definition['TableName']) + with self.assertRaises(exceptions.ResourceNotFound): + yield self.client.describe_table(definition['TableName']) + + @testing.gen_test + def test_table_not_found(self): + table = str(uuid.uuid4()) + with self.assertRaises(exceptions.ResourceNotFound): + yield self.client.delete_table(table) + + +class DescribeTableTests(AsyncTestCase): + + @testing.gen_test + def test_describe_table(self): + # Create the table first + definition = self.generic_table_definition() + response = yield self.client.create_table(definition) + self.assertEqual(response['TableName'], definition['TableName']) + + # Describe the table + response = yield self.client.describe_table(definition['TableName']) + self.assertEqual(response['TableName'], definition['TableName']) + self.assertEqual(response['TableStatus'], + dynamodb.TABLE_ACTIVE) + + @testing.gen_test + def test_table_not_found(self): + table = str(uuid.uuid4()) + with self.assertRaises(exceptions.ResourceNotFound): + yield self.client.describe_table(table) + + +class ListTableTests(AsyncTestCase): + + @testing.gen_test + def test_list_tables(self): + # Create the table first + definition = self.generic_table_definition() + response = yield self.client.create_table(definition) + self.assertEqual(response['TableName'], definition['TableName']) + + # Describe the table + response = yield self.client.list_tables(limit=100) + self.assertIn(definition['TableName'], response['TableNames']) + + +class PutGetDeleteTests(AsyncTestCase): + + @testing.gen_test + def test_put_item(self): + # Create the table first + definition = self.generic_table_definition() + response = yield self.client.create_table(definition) + self.assertEqual(response['TableName'], definition['TableName']) + + row_id = uuid.uuid4() + + # Describe the table + yield self.client.put_item( + definition['TableName'], + {'id': row_id, 'created_at': datetime.datetime.utcnow()}) + + response = yield self.client.get_item(definition['TableName'], + {'id': row_id}) + self.assertEqual(response['id'], row_id) From 06ea813b8f15f972706eca3df03547a4a0370e76 Mon Sep 17 00:00:00 2001 From: "Gavin M. Roy" Date: Wed, 2 Mar 2016 11:41:09 -0500 Subject: [PATCH 10/13] Dont need stream view constants any more --- sprockets/clients/dynamodb/__init__.py | 7 ------- 1 file changed, 7 deletions(-) diff --git a/sprockets/clients/dynamodb/__init__.py b/sprockets/clients/dynamodb/__init__.py index 878d46b..8105ab0 100644 --- a/sprockets/clients/dynamodb/__init__.py +++ b/sprockets/clients/dynamodb/__init__.py @@ -14,10 +14,3 @@ TABLE_DELETING = 'DELETING' TABLE_DISABLED = 'DISABLED' TABLE_UPDATING = 'UPDATING' -# Table stream view type constants -STREAM_VIEW_NEW_IMAGE = 'NEW_IMAGE' -STREAM_VIEW_OLD_IMAGE = 'OLD_IMAGE' -STREAM_VIEW_NEW_AND_OLD_IMAGES = 'NEW_AND_OLD_IMAGES' -STREAM_VIEW_KEYS_ONLY = 'KEYS_ONLY' -_STREAM_VIEW_TYPES = (STREAM_VIEW_NEW_IMAGE, STREAM_VIEW_OLD_IMAGE, - STREAM_VIEW_NEW_AND_OLD_IMAGES, STREAM_VIEW_KEYS_ONLY) From 3e909eb2ba7bc53349cc97696309a80cd909cb30 Mon Sep 17 00:00:00 2001 From: "Gavin M. Roy" Date: Fri, 4 Mar 2016 09:35:43 -0500 Subject: [PATCH 11/13] Update travis, fix docstrings --- .travis.yml | 2 +- bootstrap | 2 +- sprockets/clients/dynamodb/connector.py | 8 ++++---- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index 8e23f99..834a198 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,4 @@ -sudo: reuired +sudo: required services: - docker language: python diff --git a/bootstrap b/bootstrap index 496a81e..a4b5458 100755 --- a/bootstrap +++ b/bootstrap @@ -67,7 +67,7 @@ set -e mkdir -p build -if test "$1" == 'shellinit' +if test "$1" = 'shellinit' then # just build the environment file from docker containers build_env_file build/test-environment diff --git a/sprockets/clients/dynamodb/connector.py b/sprockets/clients/dynamodb/connector.py index 564780e..a6e698a 100644 --- a/sprockets/clients/dynamodb/connector.py +++ b/sprockets/clients/dynamodb/connector.py @@ -734,7 +734,7 @@ class DynamoDB(object): :exc:`~sprockets.clients.dynamodb.exceptions.ThroughputExceeded` :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` - .. _GetItem: http://docs.aws.amazon.com/amazondynamodb/ + .. _UpdateItem: http://docs.aws.amazon.com/amazondynamodb/ latest/APIReference/API_UpdateItem.html """ @@ -890,7 +890,7 @@ class DynamoDB(object): :exc:`~sprockets.clients.dynamodb.exceptions.ThroughputExceeded` :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` - .. _GetItem: http://docs.aws.amazon.com/amazondynamodb/ + .. _DeleteItem: http://docs.aws.amazon.com/amazondynamodb/ latest/APIReference/API_DeleteItem.html """ @@ -916,7 +916,7 @@ class DynamoDB(object): :exc:`~sprockets.clients.dynamodb.exceptions.ThroughputExceeded` :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` - .. _GetItem: http://docs.aws.amazon.com/amazondynamodb/ + .. _BatchGetItem: http://docs.aws.amazon.com/amazondynamodb/ latest/APIReference/API_BatchGetItem.html """ @@ -942,7 +942,7 @@ class DynamoDB(object): :exc:`~sprockets.clients.dynamodb.exceptions.ThroughputExceeded` :exc:`~sprockets.clients.dynamodb.exceptions.ValidationException` - .. _GetItem: http://docs.aws.amazon.com/amazondynamodb/ + .. _BatchWriteItem: http://docs.aws.amazon.com/amazondynamodb/ latest/APIReference/API_BatchWriteItem.html """ From 21bb49fb3ab9c667d24592e035b76243dcef3c72 Mon Sep 17 00:00:00 2001 From: "Gavin M. Roy" Date: Fri, 4 Mar 2016 09:37:15 -0500 Subject: [PATCH 12/13] Test for the ResourceInUse exception when double creating --- tests/api_tests.py | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/tests/api_tests.py b/tests/api_tests.py index 15c1116..d585aa7 100644 --- a/tests/api_tests.py +++ b/tests/api_tests.py @@ -129,6 +129,17 @@ class CreateTableTests(AsyncTestCase): with self.assertRaises(exceptions.ValidationException): yield self.client.create_table(definition) + @testing.gen_test + def test_double_create(self): + definition = self.generic_table_definition() + response = yield self.client.create_table(definition) + self.assertEqual(response['TableName'], definition['TableName']) + self.assertIn(response['TableStatus'], + [dynamodb.TABLE_ACTIVE, + dynamodb.TABLE_CREATING]) + with self.assertRaises(exceptions.ResourceInUse): + response = yield self.client.create_table(definition) + class DeleteTableTests(AsyncTestCase): From 8f5817108ebd1d954220a8640ef59391b40e382b Mon Sep 17 00:00:00 2001 From: "Gavin M. Roy" Date: Fri, 4 Mar 2016 09:39:28 -0500 Subject: [PATCH 13/13] Update the MANIFEST for tests --- MANIFEST.in | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/MANIFEST.in b/MANIFEST.in index e03e8a4..6e62230 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,5 +1,5 @@ include LICENSE -include tests.py graft docs graft examples graft requires +graft tests