mirror of
https://github.com/sprockets/sprockets.mixins.mediatype.git
synced 2024-12-27 20:24:39 +00:00
Merge pull request #40 from dave-shawley/code-formatting
Reformat using yapf.
This commit is contained in:
commit
a528661c98
10 changed files with 105 additions and 87 deletions
3
.github/workflows/testing.yml
vendored
3
.github/workflows/testing.yml
vendored
|
@ -23,6 +23,9 @@ jobs:
|
|||
- name: Flake8
|
||||
run: |
|
||||
flake8 sprockets tests.py
|
||||
- name: Formatting
|
||||
run: |
|
||||
yapf -pqr docs setup.py sprockets tests.py
|
||||
|
||||
test:
|
||||
runs-on: ubuntu-latest
|
||||
|
|
|
@ -48,6 +48,10 @@ tool chest. It provides the following commands:
|
|||
**flake8 sprockets tests.py**
|
||||
Run flake8 over the code and report style violations.
|
||||
|
||||
**yapf -ri sprockets tests.py**
|
||||
Inline format the code. You might want to configure your editor to
|
||||
do this for you every time you save.
|
||||
|
||||
If any of the preceding commands give you problems, then you will have to
|
||||
fix them **before** your pull request will be accepted.
|
||||
|
||||
|
|
|
@ -3,11 +3,10 @@ import os
|
|||
import pkg_resources
|
||||
|
||||
needs_sphinx = '4.0'
|
||||
extensions = ['sphinx.ext.autodoc',
|
||||
'sphinx.ext.viewcode',
|
||||
'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.extlinks',
|
||||
'sphinxcontrib.httpdomain']
|
||||
extensions = [
|
||||
'sphinx.ext.autodoc', 'sphinx.ext.viewcode', 'sphinx.ext.intersphinx',
|
||||
'sphinx.ext.extlinks', 'sphinxcontrib.httpdomain'
|
||||
]
|
||||
master_doc = 'index'
|
||||
project = 'sprockets.mixins.mediatype'
|
||||
copyright = '2015-2021, AWeber Communications'
|
||||
|
|
|
@ -43,6 +43,7 @@ msgpack =
|
|||
ci =
|
||||
coverage==5.5
|
||||
flake8==3.9.2
|
||||
yapf==0.31.0
|
||||
dev =
|
||||
coverage==5.5
|
||||
flake8==3.9.2
|
||||
|
@ -50,6 +51,7 @@ dev =
|
|||
sphinx-rtd-theme==1.0.0
|
||||
sphinxcontrib-httpdomain==1.7.0
|
||||
tox==3.24.3
|
||||
yapf==0.31.0
|
||||
docs =
|
||||
sphinx==4.2.0
|
||||
sphinx-rtd-theme==1.0.0
|
||||
|
|
|
@ -1,15 +1,14 @@
|
|||
"""sprockets.mixins.mediatype"""
|
||||
try:
|
||||
from .content import (ContentMixin, ContentSettings, # noqa: F401
|
||||
add_binary_content_type, add_text_content_type,
|
||||
set_default_content_type)
|
||||
from .content import ( # noqa: F401
|
||||
ContentMixin, ContentSettings, add_binary_content_type,
|
||||
add_text_content_type, set_default_content_type)
|
||||
except ImportError: # pragma: no cover
|
||||
import warnings
|
||||
warnings.warn(
|
||||
'Missing runtime requirements for sprockets.mixins.mediatype',
|
||||
UserWarning)
|
||||
|
||||
|
||||
version_info = (3, 0, 4)
|
||||
version = '.'.join(str(x) for x in version_info)
|
||||
__version__ = version # compatibility
|
||||
|
|
|
@ -34,7 +34,6 @@ from tornado import web
|
|||
|
||||
from . import handlers
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
SETTINGS_KEY = 'sprockets.mixins.mediatype.ContentSettings'
|
||||
"""Key in application.settings to store the ContentSettings instance."""
|
||||
|
@ -87,7 +86,6 @@ class ContentSettings:
|
|||
instead.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
self._handlers = {}
|
||||
self._available_types = []
|
||||
|
@ -102,8 +100,8 @@ class ContentSettings:
|
|||
parsed = headers.parse_content_type(content_type)
|
||||
content_type = str(parsed)
|
||||
if content_type in self._handlers:
|
||||
logger.warning('handler for %s already set to %r',
|
||||
content_type, self._handlers[content_type])
|
||||
logger.warning('handler for %s already set to %r', content_type,
|
||||
self._handlers[content_type])
|
||||
return
|
||||
|
||||
self._available_types.append(parsed)
|
||||
|
@ -182,8 +180,8 @@ def add_binary_content_type(application, content_type, pack, unpack):
|
|||
handlers.BinaryContentHandler(content_type, pack, unpack))
|
||||
|
||||
|
||||
def add_text_content_type(application, content_type, default_encoding,
|
||||
dumps, loads):
|
||||
def add_text_content_type(application, content_type, default_encoding, dumps,
|
||||
loads):
|
||||
"""
|
||||
Add handler for a text content type.
|
||||
|
||||
|
@ -202,9 +200,10 @@ def add_text_content_type(application, content_type, default_encoding,
|
|||
parsed = headers.parse_content_type(content_type)
|
||||
parsed.parameters.pop('charset', None)
|
||||
normalized = str(parsed)
|
||||
add_transcoder(application,
|
||||
handlers.TextContentHandler(normalized, dumps, loads,
|
||||
default_encoding))
|
||||
add_transcoder(
|
||||
application,
|
||||
handlers.TextContentHandler(normalized, dumps, loads,
|
||||
default_encoding))
|
||||
|
||||
|
||||
def add_transcoder(application, transcoder, content_type=None):
|
||||
|
@ -277,7 +276,6 @@ class ContentMixin:
|
|||
using ``self.write()``.
|
||||
|
||||
"""
|
||||
|
||||
def initialize(self):
|
||||
super().initialize()
|
||||
self._request_body = None
|
||||
|
@ -290,8 +288,7 @@ class ContentMixin:
|
|||
settings = get_settings(self.application, force_instance=True)
|
||||
acceptable = headers.parse_accept(
|
||||
self.request.headers.get(
|
||||
'Accept',
|
||||
settings.default_content_type
|
||||
'Accept', settings.default_content_type
|
||||
if settings.default_content_type else '*/*'))
|
||||
try:
|
||||
selected, _ = algorithms.select_content_type(
|
||||
|
@ -327,11 +324,13 @@ class ContentMixin:
|
|||
except ValueError:
|
||||
raise web.HTTPError(400, 'failed to parse content type %s',
|
||||
content_type)
|
||||
content_type = '/'.join([content_type_header.content_type,
|
||||
content_type_header.content_subtype])
|
||||
content_type = '/'.join([
|
||||
content_type_header.content_type,
|
||||
content_type_header.content_subtype
|
||||
])
|
||||
if content_type_header.content_suffix is not None:
|
||||
content_type = '+'.join([content_type,
|
||||
content_type_header.content_suffix])
|
||||
content_type = '+'.join(
|
||||
[content_type, content_type_header.content_suffix])
|
||||
try:
|
||||
handler = settings[content_type]
|
||||
except KeyError:
|
||||
|
|
|
@ -24,7 +24,6 @@ class BinaryContentHandler:
|
|||
and unpacking functions.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, content_type, pack, unpack):
|
||||
self._pack = pack
|
||||
self._unpack = unpack
|
||||
|
@ -77,7 +76,6 @@ class TextContentHandler:
|
|||
that tornado expects.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, content_type, dumps, loads, default_encoding):
|
||||
self._dumps = dumps
|
||||
self._loads = loads
|
||||
|
|
|
@ -47,8 +47,8 @@ class JSONTranscoder(handlers.TextContentHandler):
|
|||
:meth:`.loads` is called.
|
||||
|
||||
"""
|
||||
|
||||
def __init__(self, content_type='application/json',
|
||||
def __init__(self,
|
||||
content_type='application/json',
|
||||
default_encoding='utf-8'):
|
||||
super().__init__(content_type, self.dumps, self.loads,
|
||||
default_encoding)
|
||||
|
@ -240,5 +240,5 @@ class MsgPackTranscoder(handlers.BinaryContentHandler):
|
|||
out[k] = self.normalize_datum(v)
|
||||
return out
|
||||
|
||||
raise TypeError(
|
||||
'{} is not msgpackable'.format(datum.__class__.__name__))
|
||||
raise TypeError('{} is not msgpackable'.format(
|
||||
datum.__class__.__name__))
|
||||
|
|
125
tests.py
125
tests.py
|
@ -37,11 +37,11 @@ def pack_string(obj):
|
|||
"""Optimally pack a string according to msgpack format"""
|
||||
payload = str(obj).encode('ASCII')
|
||||
pl = len(payload)
|
||||
if pl < (2 ** 5):
|
||||
if pl < (2**5):
|
||||
prefix = struct.pack('B', 0b10100000 | pl)
|
||||
elif pl < (2 ** 8):
|
||||
elif pl < (2**8):
|
||||
prefix = struct.pack('BB', 0xD9, pl)
|
||||
elif pl < (2 ** 16):
|
||||
elif pl < (2**16):
|
||||
prefix = struct.pack('>BH', 0xDA, pl)
|
||||
else:
|
||||
prefix = struct.pack('>BI', 0xDB, pl)
|
||||
|
@ -51,9 +51,9 @@ def pack_string(obj):
|
|||
def pack_bytes(payload):
|
||||
"""Optimally pack a byte string according to msgpack format"""
|
||||
pl = len(payload)
|
||||
if pl < (2 ** 8):
|
||||
if pl < (2**8):
|
||||
prefix = struct.pack('BB', 0xC4, pl)
|
||||
elif pl < (2 ** 16):
|
||||
elif pl < (2**16):
|
||||
prefix = struct.pack('>BH', 0xC5, pl)
|
||||
else:
|
||||
prefix = struct.pack('>BI', 0xC6, pl)
|
||||
|
@ -61,42 +61,55 @@ def pack_bytes(payload):
|
|||
|
||||
|
||||
class SendResponseTests(testing.AsyncHTTPTestCase):
|
||||
|
||||
def get_app(self):
|
||||
return examples.make_application()
|
||||
|
||||
def test_that_content_type_default_works(self):
|
||||
response = self.fetch('/', method='POST', body='{}',
|
||||
response = self.fetch('/',
|
||||
method='POST',
|
||||
body='{}',
|
||||
headers={'Content-Type': 'application/json'})
|
||||
self.assertEqual(response.code, 200)
|
||||
self.assertEqual(response.headers['Content-Type'],
|
||||
'application/json; charset="utf-8"')
|
||||
|
||||
def test_that_missing_content_type_uses_default(self):
|
||||
response = self.fetch('/', method='POST', body='{}',
|
||||
headers={'Accept': 'application/xml',
|
||||
'Content-Type': 'application/json'})
|
||||
response = self.fetch('/',
|
||||
method='POST',
|
||||
body='{}',
|
||||
headers={
|
||||
'Accept': 'application/xml',
|
||||
'Content-Type': 'application/json'
|
||||
})
|
||||
self.assertEqual(response.code, 200)
|
||||
self.assertEqual(response.headers['Content-Type'],
|
||||
'application/json; charset="utf-8"')
|
||||
|
||||
def test_that_accept_header_is_obeyed(self):
|
||||
response = self.fetch('/', method='POST', body='{}',
|
||||
headers={'Accept': 'application/msgpack',
|
||||
'Content-Type': 'application/json'})
|
||||
response = self.fetch('/',
|
||||
method='POST',
|
||||
body='{}',
|
||||
headers={
|
||||
'Accept': 'application/msgpack',
|
||||
'Content-Type': 'application/json'
|
||||
})
|
||||
self.assertEqual(response.code, 200)
|
||||
self.assertEqual(response.headers['Content-Type'],
|
||||
'application/msgpack')
|
||||
|
||||
def test_that_default_content_type_is_set_on_response(self):
|
||||
response = self.fetch('/', method='POST', body=umsgpack.packb({}),
|
||||
response = self.fetch('/',
|
||||
method='POST',
|
||||
body=umsgpack.packb({}),
|
||||
headers={'Content-Type': 'application/msgpack'})
|
||||
self.assertEqual(response.code, 200)
|
||||
self.assertEqual(response.headers['Content-Type'],
|
||||
'application/json; charset="utf-8"')
|
||||
|
||||
def test_that_vary_header_is_set(self):
|
||||
response = self.fetch('/', method='POST', body=umsgpack.packb({}),
|
||||
response = self.fetch('/',
|
||||
method='POST',
|
||||
body=umsgpack.packb({}),
|
||||
headers={'Content-Type': 'application/msgpack'})
|
||||
self.assertEqual(response.code, 200)
|
||||
self.assertEqual(response.headers['Vary'], 'Accept')
|
||||
|
@ -106,9 +119,13 @@ class SendResponseTests(testing.AsyncHTTPTestCase):
|
|||
self._app,
|
||||
transcoders.MsgPackTranscoder(content_type='expected/content'),
|
||||
'application/vendor+msgpack')
|
||||
response = self.fetch('/', method='POST', body='{}',
|
||||
headers={'Accept': 'application/vendor+msgpack',
|
||||
'Content-Type': 'application/json'})
|
||||
response = self.fetch('/',
|
||||
method='POST',
|
||||
body='{}',
|
||||
headers={
|
||||
'Accept': 'application/vendor+msgpack',
|
||||
'Content-Type': 'application/json'
|
||||
})
|
||||
self.assertEqual(response.code, 200)
|
||||
self.assertEqual(response.headers['Content-Type'], 'expected/content')
|
||||
|
||||
|
@ -123,54 +140,55 @@ class GetRequestBodyTests(testing.AsyncHTTPTestCase):
|
|||
return self.app
|
||||
|
||||
def test_that_request_with_unhandled_type_results_in_415(self):
|
||||
response = self.fetch(
|
||||
'/', method='POST', headers={'Content-Type': 'application/xml'},
|
||||
body=('<request><name>value</name>'
|
||||
'<embedded><utf8>\u2731</utf8></embedded>'
|
||||
'</request>').encode('utf-8'))
|
||||
response = self.fetch('/',
|
||||
method='POST',
|
||||
headers={'Content-Type': 'application/xml'},
|
||||
body=('<request><name>value</name>'
|
||||
'<embedded><utf8>\u2731</utf8></embedded>'
|
||||
'</request>').encode('utf-8'))
|
||||
self.assertEqual(response.code, 415)
|
||||
|
||||
def test_that_msgpack_request_returns_default_type(self):
|
||||
body = {
|
||||
'name': 'value',
|
||||
'embedded': {
|
||||
'utf8': '\u2731'
|
||||
}
|
||||
}
|
||||
response = self.fetch('/', method='POST', body=umsgpack.packb(body),
|
||||
body = {'name': 'value', 'embedded': {'utf8': '\u2731'}}
|
||||
response = self.fetch('/',
|
||||
method='POST',
|
||||
body=umsgpack.packb(body),
|
||||
headers={'Content-Type': 'application/msgpack'})
|
||||
self.assertEqual(response.code, 200)
|
||||
self.assertEqual(json.loads(response.body.decode('utf-8')), body)
|
||||
|
||||
def test_that_invalid_data_returns_400(self):
|
||||
response = self.fetch(
|
||||
'/', method='POST', headers={'Content-Type': 'application/json'},
|
||||
'/',
|
||||
method='POST',
|
||||
headers={'Content-Type': 'application/json'},
|
||||
body=('<?xml version="1.0"?><methodCall><methodName>echo'
|
||||
'</methodName><params><param><value><str>Hi</str></value>'
|
||||
'</param></params></methodCall>').encode('utf-8'))
|
||||
self.assertEqual(response.code, 400)
|
||||
|
||||
def test_that_content_type_suffix_is_handled(self):
|
||||
content.add_transcoder(
|
||||
self._app, transcoders.JSONTranscoder(),
|
||||
'application/vendor+json')
|
||||
content.add_transcoder(self._app, transcoders.JSONTranscoder(),
|
||||
'application/vendor+json')
|
||||
body = {'hello': 'world'}
|
||||
response = self.fetch(
|
||||
'/', method='POST', body=json.dumps(body),
|
||||
'/',
|
||||
method='POST',
|
||||
body=json.dumps(body),
|
||||
headers={'Content-Type': 'application/vendor+json'})
|
||||
self.assertEqual(response.code, 200)
|
||||
self.assertEqual(json.loads(response.body.decode()), body)
|
||||
|
||||
def test_that_invalid_content_types_result_in_bad_request(self):
|
||||
content.set_default_content_type(self.app, None, None)
|
||||
response = self.fetch(
|
||||
'/', method='POST', body='{"hi":"there"}',
|
||||
headers={'Content-Type': 'application-json'})
|
||||
response = self.fetch('/',
|
||||
method='POST',
|
||||
body='{"hi":"there"}',
|
||||
headers={'Content-Type': 'application-json'})
|
||||
self.assertEqual(response.code, 400)
|
||||
|
||||
|
||||
class JSONTranscoderTests(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.transcoder = transcoders.JSONTranscoder()
|
||||
|
@ -211,7 +229,6 @@ class JSONTranscoderTests(unittest.TestCase):
|
|||
|
||||
|
||||
class ContentSettingsTests(unittest.TestCase):
|
||||
|
||||
def test_that_handler_listed_in_available_content_types(self):
|
||||
settings = content.ContentSettings()
|
||||
settings['application/json'] = object()
|
||||
|
@ -249,14 +266,12 @@ class ContentSettingsTests(unittest.TestCase):
|
|||
|
||||
|
||||
class ContentFunctionTests(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.context = Context()
|
||||
|
||||
def test_that_add_binary_content_type_creates_binary_handler(self):
|
||||
settings = content.install(self.context,
|
||||
'application/octet-stream')
|
||||
settings = content.install(self.context, 'application/octet-stream')
|
||||
content.add_binary_content_type(self.context,
|
||||
'application/vnd.python.pickle',
|
||||
pickle.dumps, pickle.loads)
|
||||
|
@ -304,7 +319,6 @@ class ContentFunctionTests(unittest.TestCase):
|
|||
|
||||
|
||||
class MsgPackTranscoderTests(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
super().setUp()
|
||||
self.transcoder = transcoders.MsgPackTranscoder()
|
||||
|
@ -322,22 +336,21 @@ class MsgPackTranscoderTests(unittest.TestCase):
|
|||
self.assertEqual(self.transcoder.packb(True), b'\xC3')
|
||||
|
||||
def test_that_ints_are_packed_appropriately(self):
|
||||
self.assertEqual(self.transcoder.packb((2 ** 7) - 1), b'\x7F')
|
||||
self.assertEqual(self.transcoder.packb(2 ** 7), b'\xCC\x80')
|
||||
self.assertEqual(self.transcoder.packb(2 ** 8), b'\xCD\x01\x00')
|
||||
self.assertEqual(self.transcoder.packb(2 ** 16),
|
||||
b'\xCE\x00\x01\x00\x00')
|
||||
self.assertEqual(self.transcoder.packb(2 ** 32),
|
||||
self.assertEqual(self.transcoder.packb((2**7) - 1), b'\x7F')
|
||||
self.assertEqual(self.transcoder.packb(2**7), b'\xCC\x80')
|
||||
self.assertEqual(self.transcoder.packb(2**8), b'\xCD\x01\x00')
|
||||
self.assertEqual(self.transcoder.packb(2**16), b'\xCE\x00\x01\x00\x00')
|
||||
self.assertEqual(self.transcoder.packb(2**32),
|
||||
b'\xCF\x00\x00\x00\x01\x00\x00\x00\x00')
|
||||
|
||||
def test_that_negative_ints_are_packed_accordingly(self):
|
||||
self.assertEqual(self.transcoder.packb(-(2 ** 0)), b'\xFF')
|
||||
self.assertEqual(self.transcoder.packb(-(2 ** 5)), b'\xE0')
|
||||
self.assertEqual(self.transcoder.packb(-(2 ** 7)), b'\xD0\x80')
|
||||
self.assertEqual(self.transcoder.packb(-(2 ** 15)), b'\xD1\x80\x00')
|
||||
self.assertEqual(self.transcoder.packb(-(2 ** 31)),
|
||||
self.assertEqual(self.transcoder.packb(-(2**0)), b'\xFF')
|
||||
self.assertEqual(self.transcoder.packb(-(2**5)), b'\xE0')
|
||||
self.assertEqual(self.transcoder.packb(-(2**7)), b'\xD0\x80')
|
||||
self.assertEqual(self.transcoder.packb(-(2**15)), b'\xD1\x80\x00')
|
||||
self.assertEqual(self.transcoder.packb(-(2**31)),
|
||||
b'\xD2\x80\x00\x00\x00')
|
||||
self.assertEqual(self.transcoder.packb(-(2 ** 63)),
|
||||
self.assertEqual(self.transcoder.packb(-(2**63)),
|
||||
b'\xD3\x80\x00\x00\x00\x00\x00\x00\x00')
|
||||
|
||||
def test_that_lists_are_treated_as_arrays(self):
|
||||
|
|
1
tox.ini
1
tox.ini
|
@ -25,3 +25,4 @@ commands =
|
|||
[testenv:lint]
|
||||
commands =
|
||||
flake8 sprockets tests.py
|
||||
yapf -dr docs setup.py sprockets tests.py
|
||||
|
|
Loading…
Reference in a new issue