Initial WIP commit

This commit is contained in:
Gavin M. Roy 2020-04-06 17:39:52 -04:00
parent 0811cf66ab
commit 46f829fc4a
14 changed files with 761 additions and 23 deletions

21
.editorconfig Normal file
View file

@ -0,0 +1,21 @@
# top-most EditorConfig file
root = true
# Unix-style newlines with a newline ending every file
[*]
end_of_line = lf
insert_final_newline = true
# 4 space indentation
[*.py]
indent_style = space
indent_size = 4
# 2 space indentation
[*.yml]
indent_style = space
indent_size = 2
[bootstrap]
indent_style = space
indent_size = 2

20
.github/workflows/deploy.yaml vendored Normal file
View file

@ -0,0 +1,20 @@
name: Deployment
on:
push:
branches-ignore: ["*"]
tags: ["*"]
jobs:
deploy:
runs-on: ubuntu-latest
if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags')
container: python:3.8-alpine
steps:
- name: Checkout repository
uses: actions/checkout@v1
- name: Build package
run: python3 setup.py sdist
- name: Publish package
uses: pypa/gh-action-pypi-publish@master
with:
user: __token__
password: ${{ secrets.PYPI_PASSWORD }}

66
.github/workflows/testing.yaml vendored Normal file
View file

@ -0,0 +1,66 @@
name: Testing
on:
push:
branches: ["*"]
paths-ignore:
- 'docs/**'
- 'setup.*'
- '*.md'
- '*.rst'
tags-ignore: ["*"]
jobs:
test:
runs-on: ubuntu-latest
timeout-minutes: 3
services:
postgres:
image: postgres:12-alpine
options: >-
--health-cmd "pg_isready"
--health-interval 10s
--health-timeout 10s
--health-retries 5
ports:
- 5432
env:
POSTGRES_HOST_AUTH_METHOD: trust
strategy:
matrix:
python: [3.7, 3.8]
container:
image: python:${{ matrix.python }}-alpine
steps:
- name: Checkout repository
uses: actions/checkout@v1
- name: Install OS dependencies
run: apk --update add gcc make musl-dev linux-headers postgresql-dev
- name: Install testing dependencies
run: pip3 --no-cache-dir install -e '.[testing]'
- name: Create build directory
run: mkdir build
- name: Create build/test-environment
run: echo "export POSTGRES_URL=postgresql://postgres@postgres:5432/postgres" > build/test-environment
- name: Install UUID extension
run: psql -q -h postgres -U postgres -d postgres -c 'CREATE EXTENSION "uuid-ossp";'
- name: Run flake8 tests
run: flake8
- name: Run tests
run: coverage run
- name: Output coverage
run: coverage report && coverage xml
- name: Upload Coverage
uses: codecov/codecov-action@v1.0.2
with:
token: ${{secrets.CODECOV_TOKEN}}
file: build/coverage.xml
flags: unittests
fail_ci_if_error: true

40
LICENSE
View file

@ -1,29 +1,25 @@
BSD 3-Clause License
Copyright (c) 2020, Sprockets
Copyright (c) 2020 AWeber Communications
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
* Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
* Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
* Neither the name of the copyright holder nor the names of its contributors may
be used to endorse or promote products derived from this software without
specific prior written permission.
3. Neither the name of the copyright holder nor the names of its
contributors may be used to endorse or promote products derived from
this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

View file

@ -1 +0,0 @@
# sprockets-postgres

67
README.rst Normal file
View file

@ -0,0 +1,67 @@
Sprockets Postgres
==================
An asynchronous Postgres client mixin for Tornado applications
|Version| |Status| |Coverage| |License|
Installation
------------
``sprockets-postgres`` is available on the Python package index and is installable via pip:
.. code:: bash
pip install sprockets-postgres
Documentation
-------------
Documentation is available at `sprockets-postgres.readthedocs.io <https://sprockets-postgres.readthedocs.io>`_.
Configuration
-------------
The following table details the environment variable configuration options:
+---------------------------------+--------------------------------------------------+---------------------------------+
| Variable | Definition | Default |
+=================================+==================================================+=================================+
| ``PGSQL_URL`` | The PostgreSQL URL to connect to | ``postgresql://localhost:5432`` |
+---------------------------------+--------------------------------------------------+---------------------------------+
| ``POSTGRES_MAX_POOL_SIZE`` | Maximum connection count to Postgres per backend | ``0`` (Unlimited |
+---------------------------------+--------------------------------------------------+---------------------------------+
| ``POSTGRES_MIN_POOL_SIZE`` | Minimum or starting pool size. | ``1`` |
+---------------------------------+--------------------------------------------------+---------------------------------+
| ``POSTGRES_CONNECTION_TIMEOUT`` | The maximum time in seconds to spend attempting | ``10`` |
| | to create a new connection. | |
+---------------------------------+--------------------------------------------------+---------------------------------+
| ``POSTGRES_CONNECTION_TTL`` | Time-to-life in seconds for a pooled connection. | ``300`` |
+---------------------------------+--------------------------------------------------+---------------------------------+
| ``POSTGRES_QUERY_TIMEOUT`` | Maximum execution time for a query in seconds. | ``60`` |
+---------------------------------+--------------------------------------------------+---------------------------------+
| ``POSTGRES_HSTORE`` | Enable HSTORE support in the client. | ``FALSE`` |
+---------------------------------+--------------------------------------------------+---------------------------------+
| ``POSTGRES_JSON`` | Enable JSON support in the client. | ``FALSE`` |
+---------------------------------+--------------------------------------------------+---------------------------------+
| ``POSTGRES_UUID`` | Enable UUID support in the client. | ``TRUE`` |
+---------------------------------+--------------------------------------------------+---------------------------------+
Requirements
------------
- `aiopg <https://aioboto3.readthedocs.io/en/latest/>`_
- `sprockets.http <https://sprocketshttp.readthedocs.io/en/master/>`_
- `Tornado <https://tornadoweb.org>`_
Version History
---------------
Available at https://sprockets-postgres.readthedocs.org/en/latest/history.html
.. |Version| image:: https://img.shields.io/pypi/v/sprockets-postgres.svg?
:target: http://badge.fury.io/py/sprockets-postgres
.. |Status| image:: https://img.shields.io/travis/sprockets/sprockets-postgres.svg?
:target: https://travis-ci.org/sprockets/sprockets-postgres
.. |Coverage| image:: https://img.shields.io/codecov/c/github/sprockets/sprockets-postgres.svg?
:target: https://codecov.io/github/sprockets/sprockets-postgres?branch=master
.. |License| image:: https://img.shields.io/pypi/l/sprockets-postgres.svg?
:target: https://sprockets-postgres.readthedocs.org

75
bootstrap Executable file
View file

@ -0,0 +1,75 @@
#!/usr/bin/env sh
set -e
# Common constants
COLOR_RESET='\033[0m'
COLOR_GREEN='\033[0;32m'
COMPOSE_PROJECT_NAME="${COMPOSE_PROJECT_NAME:-${PWD##*/}}"
TEST_HOST="${TEST_HOST:-localhost}"
echo "Integration test host: ${TEST_HOST}"
get_exposed_port() {
if [ -z $3 ]
then
docker-compose port $1 $2 | cut -d: -f2
else
docker-compose port --index=$3 $1 $2 | cut -d: -f2
fi
}
report_start() {
printf "Waiting for $1 ... "
}
report_done() {
printf "${COLOR_GREEN}done${COLOR_RESET}\n"
}
wait_for_healthy_containers() {
IDs=$(docker-compose ps -q | paste -sd " " -)
report_start "${1} containers to report healthy"
counter="0"
while true
do
if [ "$(docker inspect -f "{{.State.Health.Status}}" ${IDs} | grep -c healthy)" -eq "${1}" ]; then
break
fi
counter=$((++counter))
if [ "${counter}" -eq 120 ]; then
echo " ERROR: containers failed to start"
exit 1
fi
sleep 1
done
report_done
}
# Ensure Docker is Running
echo "Docker Information:"
echo ""
docker version
echo ""
# Activate the virtual environment
if test -e env/bin/activate
then
. ./env/bin/activate
fi
mkdir -p build
# Stop any running instances and clean up after them, then pull images
docker-compose down --volumes --remove-orphans
docker-compose up -d --quiet-pull
wait_for_healthy_containers 1
docker-compose exec postgres psql -q -o /dev/null -U postgres -d postgres -c 'CREATE EXTENSION "uuid-ossp";'
cat > build/test-environment<<EOF
export ASYNC_TEST_TIMEOUT=5
export POSTGRES_URL=postgresql://postgres@${TEST_HOST}:$(get_exposed_port postgres 5432)/postgres
EOF
printf "\nBootstrap complete\n\nDon't forget to \"source build/test-environment\"\n"

13
docker-compose.yml Normal file
View file

@ -0,0 +1,13 @@
version: '3.3'
services:
postgres:
image: postgres:12-alpine
environment:
POSTGRES_HOST_AUTH_METHOD: trust
healthcheck:
test: pg_isready
interval: 30s
timeout: 10s
retries: 3
ports:
- 5432

77
setup.cfg Normal file
View file

@ -0,0 +1,77 @@
[metadata]
name = sprockets-postgres
version = attr: sprockets_postgres.__version__.version
description = An asynchronous Postgres client and mixin for Tornado applications
long_description = file: README.rst
long_description_content_type = text/x-rst; charset=UTF-8
license = BSD 3-Clause License
license-file = LICENSE
home-page = https://github.com/sprockets/sprockets-postgres
project_urls =
Bug Tracker = https://github.com/sprockets/sprockets-postgres/issues
Documentation = https://sprockets-postgres.readthedocs.io
Source Code = https://github.com/sprockets/sprockets-postgres/
classifiers =
Development Status :: 3 - Alpha
Intended Audience :: Developers
License :: OSI Approved :: BSD License
Natural Language :: English
Operating System :: OS Independent
Programming Language :: Python :: 3
Programming Language :: Python :: 3.7
Programming Language :: Python :: 3.8
Topic :: Communications
Topic :: Internet
Topic :: Software Development
Typing :: Typed
requires-dist = setuptools
keywords =
postgres
python3
tornado
[options]
include_package_data = True
install_requires =
aiopg>=1.0.0,<2
sprockets.http>=2.1.1,<3
tornado>=6,<7
packages =
sprockets_postgres
zip_safe = true
[options.extras_require]
testing =
coverage
flake8
flake8-comprehensions
flake8-deprecated
flake8-import-order
flake8-print
flake8-quotes
flake8-rst-docstrings
flake8-tuple
pygments
[coverage:run]
branch = True
command_line = -m unittest discover tests --verbose
data_file = build/.coverage
[coverage:report]
show_missing = True
include =
sprockets_postgres/*.py
[coverage:html]
directory = build/coverage
[coverage:xml]
output = build/coverage.xml
[flake8]
application-import-names = sprockets_postgres,tests
exclude = build,docs,env
ignore = W503
import-order-style = pycharm
rst-roles = attr,class,const,data,exc,func,meth,mod,obj,ref,yields

3
setup.py Normal file
View file

@ -0,0 +1,3 @@
import setuptools
setuptools.setup()

View file

@ -0,0 +1,238 @@
"""
:class:`sprockets.http.app.Application` mixin for handling the connection to
Postgres and exporting functions for querying the database, getting the status,
and proving a cursor.
Automatically creates and shuts down :class:`aio.pool.Pool` on startup
and shutdown.
"""
import asyncio
import contextlib
import functools
import logging
import os
import typing
from distutils import util
import aiopg
import psycopg2
from aiopg import pool
from psycopg2 import errors, extras, extensions
from tornado import ioloop, web
LOGGER = logging.getLogger('sprockets-postgres')
DEFAULT_POSTGRES_CONNECTION_TIMEOUT = 10
DEFAULT_POSTGRES_CONNECTION_TTL = 300
DEFAULT_POSTGRES_HSTORE = 'FALSE'
DEFAULT_POSTGRES_JSON = 'FALSE'
DEFAULT_POSTGRES_MAX_POOL_SIZE = 0
DEFAULT_POSTGRES_MIN_POOL_SIZE = 1
DEFAULT_POSTGRES_QUERY_TIMEOUT = 120
DEFAULT_POSTGRES_URL = 'postgresql://localhost:5432'
DEFAULT_POSTGRES_UUID = 'TRUE'
class ApplicationMixin:
"""Application mixin for setting up the PostgreSQL client pool"""
POSTGRES_STATUS_TIMEOUT = 3
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._postgres_pool: typing.Optional[pool.Pool] = None
self.runner_callbacks['on_start'].append(self._postgres_setup)
self.runner_callbacks['shutdown'].append(self._postgres_shutdown)
@contextlib.asynccontextmanager
async def postgres_cursor(self,
timeout: typing.Optional[int] = None,
raise_http_error: bool = True) \
-> typing.AsyncContextManager[extensions.cursor]:
"""Return a Postgres cursor for the pool"""
try:
async with self._postgres_pool.acquire() as conn:
async with conn.cursor(
cursor_factory=extras.RealDictCursor,
timeout=self._postgres_query_timeout(timeout)) as pgc:
yield pgc
except (asyncio.TimeoutError,
psycopg2.OperationalError,
psycopg2.Error) as error:
LOGGER.critical('Error connecting to Postgres: %s', error)
if raise_http_error:
raise web.HTTPError(503, 'Database Unavailable')
raise
async def postgres_callproc(self,
name: str,
params: typing.Union[list, tuple, None] = None,
timeout: typing.Optional[int] = None) \
-> typing.Union[dict, list, None]:
"""Execute a stored procedure, specifying the name, SQL, passing in
optional parameters.
:param name: The stored-proc to call
:param params: Optional parameters to pass into the function
:param timeout: Optional timeout to override the default query timeout
"""
async with self.postgres_cursor(timeout) as cursor:
return await self._postgres_query(
cursor, cursor.callproc, name, name, params)
async def postgres_execute(self, name: str, sql: str,
*args,
timeout: typing.Optional[int] = None) \
-> typing.Union[dict, list, None]:
"""Execute a query, specifying a name for the query, the SQL statement,
and optional positional arguments to pass in with the query.
Parameters may be provided as sequence or mapping and will be
bound to variables in the operation. Variables are specified
either with positional ``%s`` or named ``%({name})s`` placeholders.
:param name: The stored-proc to call
:param sql: The SQL statement to execute
:param timeout: Optional timeout to override the default query timeout
"""
async with self.postgres_cursor(timeout) as cursor:
return await self._postgres_query(
cursor, cursor.execute, name, sql, args)
async def postgres_status(self) -> dict:
"""Invoke from the ``/status`` RequestHandler to check that there is
a Postgres connection handler available and return info about the
pool.
"""
available = True
try:
async with self.postgres_cursor(
self.POSTGRES_STATUS_TIMEOUT, False) as cursor:
await cursor.execute('SELECT 1')
except (asyncio.TimeoutError, psycopg2.OperationalError):
available = False
return {
'available': available,
'pool_size': self._postgres_pool.size,
'pool_free': self._postgres_pool.freesize
}
async def _postgres_query(self,
cursor: aiopg.Cursor,
method: typing.Callable,
name: str,
sql: str,
parameters: typing.Union[dict, list, tuple]) \
-> typing.Union[dict, list, None]:
"""Execute a query, specifying the name, SQL, passing in
"""
try:
await method(sql, parameters)
except asyncio.TimeoutError as err:
LOGGER.error('Query timeout for %s: %s',
name, str(err).split('\n')[0])
raise web.HTTPError(500, reason='Query Timeout')
except errors.UniqueViolation as err:
LOGGER.error('Database error for %s: %s',
name, str(err).split('\n')[0])
raise web.HTTPError(409, reason='Unique Violation')
except psycopg2.Error as err:
LOGGER.error('Database error for %s: %s',
name, str(err).split('\n')[0])
raise web.HTTPError(500, reason='Database Error')
try:
return await self._postgres_query_results(cursor)
except psycopg2.ProgrammingError:
return
@staticmethod
async def _postgres_query_results(cursor: aiopg.Cursor) \
-> typing.Union[dict, list, None]:
"""Invoked by self.postgres_query to return all of the query results
as either a ``dict`` or ``list`` depending on the quantity of rows.
This can raise a ``psycopg2.ProgrammingError`` for an INSERT/UPDATE
without RETURNING or a DELETE. That exception is caught by the caller.
:raises psycopg2.ProgrammingError: when there are no rows to fetch
even though the rowcount is > 0
"""
if cursor.rowcount == 1:
return await cursor.fetchone()
elif cursor.rowcount > 1:
return await cursor.fetchall()
return None
@functools.lru_cache
def _postgres_query_timeout(self,
timeout: typing.Optional[int] = None) -> int:
"""Return query timeout, either from the specified value or
``POSTGRES_QUERY_TIMEOUT`` environment variable, if set.
Defaults to sprockets_postgres.DEFAULT_POSTGRES_QUERY_TIMEOUT.
"""
return timeout if timeout else int(
os.environ.get(
'POSTGRES_QUERY_TIMEOUT',
DEFAULT_POSTGRES_QUERY_TIMEOUT))
async def _postgres_setup(self,
_app: web.Application,
_ioloop: ioloop.IOLoop) -> None:
"""Setup the Postgres pool of connections and log if there is an error.
This is invoked by the Application on start callback mechanism.
"""
url = os.environ.get('POSTGRES_URL', DEFAULT_POSTGRES_URL)
LOGGER.debug('Connecting to PostgreSQL: %s', url)
self._postgres_pool = pool.Pool(
url,
minsize=int(
os.environ.get(
'POSTGRES_MIN_POOL_SIZE',
DEFAULT_POSTGRES_MIN_POOL_SIZE)),
maxsize=int(
os.environ.get(
'POSTGRES_MAX_POOL_SIZE',
DEFAULT_POSTGRES_MAX_POOL_SIZE)),
timeout=int(
os.environ.get(
'POSTGRES_CONNECT_TIMEOUT',
DEFAULT_POSTGRES_CONNECTION_TIMEOUT)),
enable_hstore=util.strtobool(
os.environ.get(
'POSTGRES_HSTORE', DEFAULT_POSTGRES_HSTORE)),
enable_json=util.strtobool(
os.environ.get('POSTGRES_JSON', DEFAULT_POSTGRES_JSON)),
enable_uuid=util.strtobool(
os.environ.get('POSTGRES_UUID', DEFAULT_POSTGRES_UUID)),
echo=False,
on_connect=None,
pool_recycle=int(
os.environ.get(
'POSTGRES_CONNECTION_TTL',
DEFAULT_POSTGRES_CONNECTION_TTL)))
try:
async with self._postgres_pool._cond:
await self._postgres_pool._fill_free_pool(False)
except (psycopg2.OperationalError,
psycopg2.Error) as error: # pragma: nocover
LOGGER.warning('Error connecting to PostgreSQL on startup: %s',
error)
async def _postgres_shutdown(self, _ioloop: ioloop.IOLoop) -> None:
"""Shutdown the Postgres connections and wait for them to close.
This is invoked by the Application shutdown callback mechanism.
"""
self._postgres_pool.close()
await self._postgres_pool.wait_closed()

View file

@ -0,0 +1,5 @@
"""
sprockets-postgres Version
"""
version = '1.0.0a1'

0
tests/__init__.py Normal file
View file

158
tests/test_application.py Normal file
View file

@ -0,0 +1,158 @@
import asyncio
import json
import os
from unittest import mock
import uuid
import psycopg2
from psycopg2 import errors
from sprockets.http import app, testing
from tornado import web
import sprockets_postgres
class CallprocRequestHandler(web.RequestHandler):
async def get(self):
result = await self.application.postgres_callproc('uuid_generate_v4')
await self.finish({'value': str(result['uuid_generate_v4'])})
class ExecuteRequestHandler(web.RequestHandler):
GET_SQL = 'SELECT %s::TEXT AS value;'
async def get(self):
result = await self.application.postgres_execute(
'get', self.GET_SQL, self.get_argument('value'))
await self.finish({'value': result['value'] if result else None})
class MultiRowRequestHandler(web.RequestHandler):
GET_SQL = 'SELECT * FROM information_schema.enabled_roles;'
async def get(self):
rows = await self.application.postgres_execute('get', self.GET_SQL)
await self.finish({'rows': [row['role_name'] for row in rows]})
class NoRowRequestHandler(web.RequestHandler):
GET_SQL = """\
SELECT * FROM information_schema.tables WHERE table_schema = 'public';"""
async def get(self):
rows = await self.application.postgres_execute('get', self.GET_SQL)
await self.finish({'rows': rows})
class StatusRequestHandler(web.RequestHandler):
async def get(self):
result = await self.application.postgres_status()
if not result['available']:
self.set_status(503, 'Database Unavailable')
await self.finish(dict(result))
class Application(sprockets_postgres.ApplicationMixin,
app.Application):
pass
class ExecuteTestCase(testing.SprocketsHttpTestCase):
@classmethod
def setUpClass(cls):
with open('build/test-environment') as f:
for line in f:
if line.startswith('export '):
line = line[7:]
name, _, value = line.strip().partition('=')
os.environ[name] = value
def get_app(self):
self.app = Application(handlers=[
web.url('/callproc', CallprocRequestHandler),
web.url('/execute', ExecuteRequestHandler),
web.url('/multi-row', MultiRowRequestHandler),
web.url('/no-row', NoRowRequestHandler),
web.url('/status', StatusRequestHandler)
])
return self.app
def test_postgres_status(self):
response = self.fetch('/status')
data = json.loads(response.body)
self.assertTrue(data['available'])
self.assertGreaterEqual(data['pool_size'], 1)
self.assertGreaterEqual(data['pool_free'], 1)
@mock.patch('aiopg.cursor.Cursor.execute')
def test_postgres_status_error(self, execute):
execute.side_effect = asyncio.TimeoutError()
response = self.fetch('/status')
self.assertEqual(response.code, 503)
self.assertFalse(json.loads(response.body)['available'])
def test_postgres_callproc(self):
response = self.fetch('/callproc')
self.assertEqual(response.code, 200)
self.assertIsInstance(
uuid.UUID(json.loads(response.body)['value']), uuid.UUID)
def test_postgres_execute(self):
expectation = str(uuid.uuid4())
response = self.fetch('/execute?value={}'.format(expectation))
self.assertEqual(response.code, 200)
self.assertEqual(json.loads(response.body)['value'], expectation)
def test_postgres_multirow(self):
response = self.fetch('/multi-row')
self.assertEqual(response.code, 200)
body = json.loads(response.body)
self.assertIsInstance(body['rows'], list)
self.assertIn('postgres', body['rows'])
def test_postgres_norow(self):
response = self.fetch('/no-row')
self.assertEqual(response.code, 200)
body = json.loads(response.body)
self.assertIsNone(body['rows'])
@mock.patch('aiopg.cursor.Cursor.execute')
def test_postgres_execute_timeout_error(self, execute):
execute.side_effect = asyncio.TimeoutError()
response = self.fetch('/execute?value=1')
self.assertEqual(response.code, 500)
self.assertIn(b'Query Timeout', response.body)
@mock.patch('aiopg.cursor.Cursor.execute')
def test_postgres_execute_unique_violation(self, execute):
execute.side_effect = errors.UniqueViolation()
response = self.fetch('/execute?value=1')
self.assertEqual(response.code, 409)
self.assertIn(b'Unique Violation', response.body)
@mock.patch('aiopg.cursor.Cursor.execute')
def test_postgres_execute_error(self, execute):
execute.side_effect = psycopg2.Error()
response = self.fetch('/execute?value=1')
self.assertEqual(response.code, 500)
self.assertIn(b'Database Error', response.body)
def test_postgres_programming_error(self):
with mock.patch.object(self.app, '_postgres_query_results') as pqr:
pqr.side_effect = psycopg2.ProgrammingError()
response = self.fetch('/execute?value=1')
self.assertEqual(response.code, 200)
self.assertIsNone(json.loads(response.body)['value'])
@mock.patch('aiopg.connection.Connection.cursor')
def test_postgres_cursor_raises(self, cursor):
cursor.side_effect = asyncio.TimeoutError()
response = self.fetch('/execute?value=1')
self.assertEqual(response.code, 503)