Compare commits

...

4 commits

Author SHA1 Message Date
f7277eb250 Display collection statistics in footer 2022-08-03 18:07:56 -04:00
362bef913c Add collection stats endpoint 2022-08-03 17:26:14 -04:00
f1a0f8233d Use psycopg connection pooling 2022-08-03 17:21:55 -04:00
2040b25434 Fix deck imports 2022-08-03 16:49:37 -04:00
7 changed files with 151 additions and 42 deletions

39
poetry.lock generated
View file

@ -25,17 +25,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
[[package]]
name = "attrs"
version = "21.4.0"
version = "22.1.0"
description = "Classes Without Boilerplate"
category = "dev"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*"
python-versions = ">=3.5"
[package.extras]
dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"]
docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"]
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"]
tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "zope.interface", "cloudpickle"]
tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "mypy (>=0.900,!=0.940)", "pytest-mypy-plugins", "cloudpickle"]
[[package]]
name = "black"
@ -242,32 +242,41 @@ testing = ["pytest", "pytest-benchmark"]
[[package]]
name = "psycopg"
version = "3.0.15"
version = "3.0.16"
description = "PostgreSQL database adapter for Python"
category = "main"
optional = false
python-versions = ">=3.6"
[package.dependencies]
psycopg-c = {version = "3.0.15", optional = true, markers = "extra == \"c\""}
psycopg-c = {version = "3.0.16", optional = true, markers = "extra == \"c\""}
psycopg-pool = {version = "*", optional = true, markers = "extra == \"pool\""}
tzdata = {version = "*", markers = "sys_platform == \"win32\""}
[package.extras]
binary = ["psycopg-binary (==3.0.15)"]
c = ["psycopg-c (==3.0.15)"]
binary = ["psycopg-binary (==3.0.16)"]
c = ["psycopg-c (==3.0.16)"]
dev = ["black (>=22.3.0)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=0.920,!=0.930,!=0.931)", "types-setuptools (>=57.4)", "wheel (>=0.37)"]
docs = ["Sphinx (>=4.2)", "furo (==2021.11.23)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)", "dnspython (>=2.1)", "shapely (>=1.7)"]
docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"]
pool = ["psycopg-pool"]
test = ["mypy (>=0.920,!=0.930,!=0.931)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-asyncio (>=0.16,<0.17)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.10)"]
[[package]]
name = "psycopg-c"
version = "3.0.15"
version = "3.0.16"
description = "PostgreSQL database adapter for Python -- C optimisation distribution"
category = "main"
optional = false
python-versions = ">=3.6"
[[package]]
name = "psycopg-pool"
version = "3.1.1"
description = "Connection Pool for Psycopg"
category = "main"
optional = false
python-versions = ">=3.7"
[[package]]
name = "py"
version = "1.11.0"
@ -382,15 +391,12 @@ python-versions = ">=2"
[metadata]
lock-version = "1.1"
python-versions = "^3.9"
content-hash = "caaf302d6a55e9ff876fbcf8041aafb8c6b2a68369552e5a2d1aec847fd8251b"
content-hash = "da053b15b994f3faa9d50bda1bfb459e64356cb2c9e06bad096cf57b67101894"
[metadata.files]
anyio = []
atomicwrites = []
attrs = [
{file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"},
{file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"},
]
attrs = []
black = []
certifi = []
click = []
@ -435,6 +441,7 @@ pluggy = [
]
psycopg = []
psycopg-c = []
psycopg-pool = []
py = [
{file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"},
{file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"},

View file

@ -13,7 +13,7 @@ httpx = "^0.18.2"
parsy = "^1.3.0"
jsonslicer = "^0.1.7"
tqdm = "^4.64.0"
psycopg = {extras = ["c"], version = "^3.0"}
psycopg = {extras = ["c", "pool"], version = "^3.0"}
[tool.poetry.dev-dependencies]
pytest = "*"

View file

@ -57,8 +57,8 @@ def main(ctx, database, log_level):
@click.option("--debug", is_flag=True)
@click.pass_context
def server(ctx, port, scheme, static, debug):
app = tutor.server.make_app(
{
app = tutor.server.Application(
**{
**ctx.obj,
"scheme": scheme,
"static": static,
@ -125,16 +125,19 @@ def import_deck(ctx, filename, name):
for line in bar:
if match := line_pattern.match(line.strip()):
groups = match.groupdict()
cards = await tutor.database.advanced_search(
copies = await tutor.database.advanced_search(
cursor,
tutor.search.Search(
[tutor.search.Name(text=groups["name"])]
),
limit=1,
)
if cards:
if copies:
await tutor.database.store_deck_card(
cursor, deck_id, cards[0].oracle_id, int(groups["quantity"])
cursor,
deck_id,
copies[0].card.oracle_id,
int(groups["quantity"]),
)
await conn.commit()

View file

@ -168,6 +168,8 @@ async def advanced_search(
else:
joins.append("LEFT JOIN copies ON (cards.scryfall_id = copies.scryfall_id)")
constraints.append("copies.id IS NULL")
else:
joins.append("LEFT JOIN copies ON (cards.scryfall_id = copies.scryfall_id)")
joins.append("JOIN sets ON (cards.set_code = sets.set_code)")
joins.append(
"JOIN card_prices ON (cards.scryfall_id = card_prices.scryfall_id "
@ -324,11 +326,12 @@ async def clear_copies(db: psycopg.Cursor, collection: typing.Optional[str] = No
async def store_deck(db: psycopg.Cursor, name: str) -> None:
cursor = await db.execute(
'INSERT INTO "decks" ("name") VALUES (%(name)s)',
await db.execute(
'INSERT INTO "decks" ("name") VALUES (%(name)s) RETURNING "deck_id"',
{"name": name},
)
return cursor.lastrowid
result = await db.fetchone()
return result[0]
async def store_deck_card(
@ -353,3 +356,23 @@ async def store_var(db: psycopg.Cursor, key: str, value: str) -> None:
""",
{"key": key, "value": value},
)
async def collection_stats(db: psycopg.Cursor) -> dict:
db.row_factory = psycopg.rows.dict_row
await db.execute(
"""
SELECT COUNT("copies"."id") AS cards
, SUM(
CASE WHEN "copies"."isFoil"
THEN "card_prices"."usd_foil"
ELSE "card_prices"."usd"
END
) AS value
, COUNT(DISTINCT cards.set_code) AS sets
FROM "copies"
JOIN "cards" USING ("scryfall_id")
LEFT JOIN "card_prices" USING ("scryfall_id")
"""
)
return await db.fetchone()

View file

@ -4,6 +4,9 @@ import typing
import urllib.parse
import psycopg
import psycopg.rows
import psycopg_pool
import tornado.ioloop
import tornado.web
import tutor.database
@ -56,7 +59,7 @@ class SearchHandler(tornado.web.RequestHandler):
)
async def get(self) -> None:
async with await psycopg.AsyncConnection.connect(self.application.settings["database"]) as conn:
async with self.application.pool.connection() as conn:
async with conn.cursor() as cursor:
query = self.get_argument("q", "")
in_collection = self.get_argument("in_collection", None)
@ -83,11 +86,13 @@ class SearchHandler(tornado.web.RequestHandler):
if has_more:
links["next"] = update_args(self.request.full_url(), page=page + 1)
self.set_links(**links)
def price(amount: typing.Optional[decimal.Decimal]) -> typing.Optional[str]:
if amount is not None:
return str(amount)
else:
return None
self.write(
json.dumps(
[
@ -117,18 +122,35 @@ class SearchHandler(tornado.web.RequestHandler):
)
def make_app(settings) -> tornado.web.Application:
paths = [
(r"/search", SearchHandler),
]
if static_path := settings.get("static"):
paths.extend(
[
(
r"/(.*)",
tornado.web.StaticFileHandler,
{"path": static_path, "default_filename": "index.html"},
),
]
)
return tornado.web.Application(paths, **settings)
class CollectionHandler(tornado.web.RequestHandler):
async def get(self) -> None:
async with self.application.pool.connection() as conn:
async with conn.cursor() as cursor:
self.write(
json.dumps(
await tutor.database.collection_stats(cursor), default=str
)
)
class Application(tornado.web.Application):
def __init__(self, **settings):
paths = [
(r"/search", SearchHandler),
(r"/collection", CollectionHandler),
]
if static_path := settings.get("static"):
paths.extend(
[
(
r"/(.*)",
tornado.web.StaticFileHandler,
{"path": static_path, "default_filename": "index.html"},
),
]
)
tornado.ioloop.IOLoop.current().add_callback(self.async_init)
super().__init__(paths, **settings)
async def async_init(self):
self.pool = psycopg_pool.AsyncConnectionPool(self.settings["database"])

View file

@ -5,6 +5,7 @@ import Browser.Dom
import Browser.Events
import Browser.Navigation
import Card
import Collection
import Color
import Dict
import Element as E
@ -47,6 +48,7 @@ type alias Model =
, criteria : Criteria
, cardPage : CardPage
, activeCard : Maybe Card.Card
, collectionStatistics : Maybe Collection.Statistics
}
@ -58,6 +60,7 @@ type Msg
| UpdateCriteria CriteriaMsg
| Search
| GetPage Url.Url
| GotStatistics (Result Http.Error Collection.Statistics)
| FoundCards (Result Http.Error (Paginated.Page Card.Card))
| ShowCardDetails Card.Card
| ClearCardDetails
@ -148,6 +151,14 @@ loadPage url =
}
getCollectionStatistics : Cmd Msg
getCollectionStatistics =
Http.get
{ url = Url.Builder.absolute [ "collection" ] []
, expect = Http.expectJson GotStatistics Collection.decodeStatistics
}
parseUrl : Url.Parser.Parser (Criteria -> a) a
parseUrl =
let
@ -204,9 +215,11 @@ init _ url key =
, criteria = criteria
, cardPage = Loading Paginated.empty
, activeCard = Nothing
, collectionStatistics = Nothing
}
, Cmd.batch
[ search criteria
, getCollectionStatistics
, Task.perform
(\x ->
ViewportChanged
@ -287,6 +300,12 @@ update msg model =
GetPage url ->
( { model | cardPage = toLoading model.cardPage }, loadPage (Url.toString url) )
GotStatistics (Ok statistics) ->
( { model | collectionStatistics = Just statistics }, Cmd.none )
GotStatistics (Err _) ->
( model, Cmd.none )
FoundCards (Ok cardPage) ->
( { model | cardPage = Ready cardPage }, Cmd.none )
@ -755,11 +774,27 @@ view model =
, E.el
[ E.height (E.px 50)
, E.width E.fill
, E.padding 10
, Font.color colors.text
, Background.color colors.navBar
, E.alignBottom
]
<|
E.none
case model.collectionStatistics of
Just statistics ->
E.el [ E.centerY, Font.size 16, Font.italic ] <|
E.text <|
String.concat
[ String.fromInt statistics.cards
, " cards in collection spanning "
, String.fromInt statistics.sets
, " sets (Estimated value: $"
, statistics.value
, ")"
]
Nothing ->
E.none
]
]
}

19
www/src/Collection.elm Normal file
View file

@ -0,0 +1,19 @@
module Collection exposing (..)
import Json.Decode
import Json.Decode.Pipeline as JDP
type alias Statistics =
{ cards : Int
, sets : Int
, value : String
}
decodeStatistics : Json.Decode.Decoder Statistics
decodeStatistics =
Json.Decode.succeed Statistics
|> JDP.required "cards" Json.Decode.int
|> JDP.required "sets" Json.Decode.int
|> JDP.required "value" Json.Decode.string