Revert "Add Postgres support (#67)"

This reverts commit 2c2e51ddd3.
This commit is contained in:
ZeldaZach 2021-01-20 13:16:25 -05:00
parent 2c2e51ddd3
commit debc6ce2cb
2 changed files with 112 additions and 150 deletions

View file

@ -37,12 +37,6 @@ if __name__ == "__main__":
action="store_true", action="store_true",
required=False, required=False,
) )
parser.add_argument(
"-e",
help="SQL database engine ('postgres' or 'mysql'). Only used if output file has .sql extension.",
default="postgres",
required=False,
)
args = parser.parse_args() args = parser.parse_args()
# Define our I/O paths # Define our I/O paths
@ -65,7 +59,6 @@ if __name__ == "__main__":
input_file, input_file,
{"path": output_file["path"].joinpath("AllPrintings.sql"), "handle": None}, {"path": output_file["path"].joinpath("AllPrintings.sql"), "handle": None},
args.x, args.x,
args.e,
) )
logging.info("> Creating AllPrintings CSV components") logging.info("> Creating AllPrintings CSV components")
@ -76,4 +69,4 @@ if __name__ == "__main__":
elif str(input_file).endswith(".sqlite"): elif str(input_file).endswith(".sqlite"):
sql2csv.execute(input_file, output_file) sql2csv.execute(input_file, output_file)
else: else:
json2sql.execute(input_file, output_file, args.x, args.e) json2sql.execute(input_file, output_file, args.x)

View file

@ -7,20 +7,18 @@ import logging
import pathlib import pathlib
import sqlite3 import sqlite3
import time import time
from typing import Any, Dict, List, Union, Literal from typing import Any, Dict, List, Union
LOGGER = logging.getLogger(__name__) LOGGER = logging.getLogger(__name__)
JsonDict = Dict[str, any] JsonDict = Dict[str, any]
Engine = Literal["postgres", "mysql", "sqlite"]
def execute(json_input, output_file, check_extras=False, engine: Engine = "postgres") -> None: def execute(json_input, output_file, check_extras=False) -> None:
"""Main function to handle the logic """Main function to handle the logic
:param json_input: Input file (JSON) :param json_input: Input file (JSON)
:param output_file: Output dir :param output_file: Output dir
:param check_extras: additional json files to process :param extras: additional json files to process
:param engine: SQL database engine
""" """
if not valid_input_output(json_input, output_file): if not valid_input_output(json_input, output_file):
exit(1) exit(1)
@ -29,10 +27,10 @@ def execute(json_input, output_file, check_extras=False, engine: Engine = "postg
LOGGER.info("Loading json file into memory") LOGGER.info("Loading json file into memory")
with json_input.open("r", encoding="utf8") as json_file: with json_input.open("r", encoding="utf8") as json_file:
json_data = json.load(json_file) json_data = json.load(json_file)
build_sql_database(output_file, json_data, engine) build_sql_database(output_file, json_data)
build_sql_schema(json_data, output_file, engine) build_sql_schema(json_data, output_file)
parse_and_import_cards(json_data, json_input, output_file, engine) parse_and_import_cards(json_data, json_input, output_file)
parse_and_import_extras(json_input, output_file, engine) parse_and_import_extras(json_input, output_file)
commit_changes_and_close_db(output_file) commit_changes_and_close_db(output_file)
@ -78,7 +76,7 @@ def check_extra_inputs(input_file: pathlib.Path,
output_dir[extra] = True output_dir[extra] = True
def build_sql_database(output_file: Dict, json_data: JsonDict, engine: Engine) -> None: def build_sql_database(output_file: str, json_data: JsonDict) -> None:
if output_file["path"].suffix == ".sql": if output_file["path"].suffix == ".sql":
version = get_version(json_data) version = get_version(json_data)
output_file["handle"] = open(output_file["path"], "w", encoding="utf8") output_file["handle"] = open(output_file["path"], "w", encoding="utf8")
@ -91,7 +89,7 @@ def build_sql_database(output_file: Dict, json_data: JsonDict, engine: Engine) -
"-- MTGJSON Version: {}".format(version), "-- MTGJSON Version: {}".format(version),
"", "",
"START TRANSACTION;", "START TRANSACTION;",
"SET names 'utf8mb4';" if engine != "postgres" else "", "SET names 'utf8mb4';",
"", "",
"", "",
) )
@ -114,24 +112,24 @@ def get_version(json_data: Dict) -> str:
return "Unknown" return "Unknown"
def build_sql_schema(json_data: Dict, output_file: Dict, engine: Engine) -> None: def build_sql_schema(json_data: Dict, output_file: Dict) -> None:
""" """
Create the SQLite DB schema Create the SQLite DB schema
""" """
LOGGER.info("Building SQLite schema")
if output_file["path"].suffix == ".sql": if output_file["path"].suffix == ".sql":
LOGGER.info("Building SQL schema") schema = generate_sql_schema(json_data, output_file, "mysql")
schema = generate_sql_schema(json_data, output_file, engine)
output_file["handle"].write(schema) output_file["handle"].write(schema)
output_file["handle"].write("COMMIT;\n\n") output_file["handle"].write("COMMIT;\n\n")
else: else:
LOGGER.info("Building SQLite schema")
schema = generate_sql_schema(json_data, output_file, "sqlite") schema = generate_sql_schema(json_data, output_file, "sqlite")
cursor = output_file["handle"].cursor() cursor = output_file["handle"].cursor()
cursor.executescript(schema) cursor.executescript(schema)
output_file["handle"].commit() output_file["handle"].commit()
def generate_sql_schema(json_data: Dict, output_file: Dict, engine: Engine) -> str: def generate_sql_schema(json_data: Dict,
output_file: Dict, engine: str) -> str:
""" """
Generate the SQL database schema from the JSON input Generate the SQL database schema from the JSON input
@ -153,23 +151,19 @@ def generate_sql_schema(json_data: Dict, output_file: Dict, engine: Engine) -> s
"date": {"type": "DATE"}, "date": {"type": "DATE"},
}, },
"legalities": { "legalities": {
"format": { "format": {"type": "TEXT" if engine == "sqlite" else "ENUM"},
"type": "legalities_format" if engine == "postgres" else "TEXT" if engine == "sqlite" else "ENUM"}, "status": {"type": "TEXT" if engine == "sqlite" else "ENUM"},
"status": {
"type": "legalities_status" if engine == "postgres" else "TEXT" if engine == "sqlite" else "ENUM"},
}, },
"foreign_data": { "foreign_data": {
"flavorText": {"type": "TEXT"}, "flavorText": {"type": "TEXT"},
"language": { "language": {"type": "TEXT" if engine == "sqlite" else "ENUM"},
"type": "foreign_data_language" if engine == "postgres" else "TEXT" if engine == "sqlite" else "ENUM"},
"multiverseid": {"type": "INTEGER"}, "multiverseid": {"type": "INTEGER"},
"name": {"type": "TEXT"}, "name": {"type": "TEXT"},
"text": {"type": "TEXT"}, "text": {"type": "TEXT"},
"type": {"type": "TEXT"}, "type": {"type": "TEXT"},
}, },
"set_translations": { "set_translations": {
"language": { "language": {"type": "TEXT" if engine == "sqlite" else "ENUM"},
"type": "set_translations_language" if engine == "postgres" else "TEXT" if engine == "sqlite" else "ENUM"},
"translation": {"type": "TEXT"}, "translation": {"type": "TEXT"},
}, },
} }
@ -219,23 +213,22 @@ def generate_sql_schema(json_data: Dict, output_file: Dict, engine: Engine) -> s
# handle enum options # handle enum options
if cardKey in enums: if cardKey in enums:
if cardKey == "foreign_data": if cardKey == "foreign_data":
if schema[cardKey]["language"]["type"] != "TEXT": if schema[cardKey]["language"]["type"] == "ENUM":
for foreign in cardValue: for foreign in cardValue:
if "options" in schema[cardKey]["language"]: if "options" in schema[cardKey]["language"]:
if foreign["language"] not in schema[cardKey]["language"]["options"]: if foreign["language"] not in schema[cardKey]["language"]["options"]:
schema[cardKey]["language"]["options"].append( schema[cardKey]["language"]["options"].append(foreign["language"])
foreign["language"])
else: else:
schema[cardKey]["language"]["options"] = [foreign["language"]] schema[cardKey]["language"]["options"] = [foreign["language"]]
elif cardKey == "legalities": elif cardKey == "legalities":
if schema[cardKey]["format"]["type"] != "TEXT": if schema[cardKey]["format"]["type"] == "ENUM":
for format in cardValue.keys(): for format in cardValue.keys():
if "options" in schema[cardKey]["format"]: if "options" in schema[cardKey]["format"]:
if format not in schema[cardKey]["format"]["options"]: if format not in schema[cardKey]["format"]["options"]:
schema[cardKey]["format"]["options"].append(format) schema[cardKey]["format"]["options"].append(format)
else: else:
schema[cardKey]["format"]["options"] = [format] schema[cardKey]["format"]["options"] = [format]
if schema[cardKey]["status"]["type"] != "TEXT": if schema[cardKey]["status"]["type"] == "ENUM":
for status in cardValue.values(): for status in cardValue.values():
if "options" in schema[cardKey]["status"]: if "options" in schema[cardKey]["status"]:
if status not in schema[cardKey]["status"]["options"]: if status not in schema[cardKey]["status"]["options"]:
@ -243,7 +236,7 @@ def generate_sql_schema(json_data: Dict, output_file: Dict, engine: Engine) -> s
else: else:
schema[cardKey]["status"]["options"] = [status] schema[cardKey]["status"]["options"] = [status]
elif cardKey == "prices": elif cardKey == "prices":
if schema[cardKey]["type"]["type"] != "TEXT": if schema[cardKey]["type"]["type"] == "ENUM":
for type in cardValue.keys(): for type in cardValue.keys():
if "options" in schema[cardKey]["type"]: if "options" in schema[cardKey]["type"]:
if type not in schema[cardKey]["type"]["options"]: if type not in schema[cardKey]["type"]["options"]:
@ -256,10 +249,6 @@ def generate_sql_schema(json_data: Dict, output_file: Dict, engine: Engine) -> s
schema[cardKey]["uuid"] = { schema[cardKey]["uuid"] = {
"type": "TEXT(36) REFERENCES cards(uuid) ON UPDATE CASCADE ON DELETE CASCADE" "type": "TEXT(36) REFERENCES cards(uuid) ON UPDATE CASCADE ON DELETE CASCADE"
} }
if engine == "postgres":
schema[cardKey]["uuid"] = {
"type": "CHAR(36) NOT NULL,\n FOREIGN KEY (uuid) REFERENCES cards(uuid) ON UPDATE CASCADE ON DELETE CASCADE"
}
else: else:
schema[cardKey]["uuid"] = { schema[cardKey]["uuid"] = {
"type": "CHAR(36) NOT NULL,\n INDEX(uuid),\n FOREIGN KEY (uuid) REFERENCES cards(uuid) ON UPDATE CASCADE ON DELETE CASCADE" "type": "CHAR(36) NOT NULL,\n INDEX(uuid),\n FOREIGN KEY (uuid) REFERENCES cards(uuid) ON UPDATE CASCADE ON DELETE CASCADE"
@ -271,11 +260,7 @@ def generate_sql_schema(json_data: Dict, output_file: Dict, engine: Engine) -> s
if cardValue not in schema[setKey][cardKey]["options"]: if cardValue not in schema[setKey][cardKey]["options"]:
schema[setKey][cardKey]["options"].append(cardValue) schema[setKey][cardKey]["options"].append(cardValue)
else: else:
if cardKey in enums[setKey]: if cardKey in enums[setKey] and not engine == "sqlite":
if engine == "postgres":
schema[setKey][cardKey] = {"type": f"{setKey}_{cardKey}",
"options": [cardValue]}
if engine == "mysql":
schema[setKey][cardKey] = {"type": "ENUM", "options": [cardValue]} schema[setKey][cardKey] = {"type": "ENUM", "options": [cardValue]}
else: else:
# determine type of the property # determine type of the property
@ -295,7 +280,7 @@ def generate_sql_schema(json_data: Dict, output_file: Dict, engine: Engine) -> s
+ " NOT NULL" + " NOT NULL"
) )
if setKey == "set_translations": if setKey == "set_translations":
if schema[setKey]["language"]["type"] != "TEXT": if schema[setKey]["language"]["type"] == "ENUM":
if setValue: if setValue:
for language in setValue.keys(): for language in setValue.keys():
if "options" not in schema[setKey]["language"]: if "options" not in schema[setKey]["language"]:
@ -309,10 +294,6 @@ def generate_sql_schema(json_data: Dict, output_file: Dict, engine: Engine) -> s
schema[setKey]["setCode"] = { schema[setKey]["setCode"] = {
"type": "TEXT(8) REFERENCES sets(code) ON UPDATE CASCADE ON DELETE CASCADE" "type": "TEXT(8) REFERENCES sets(code) ON UPDATE CASCADE ON DELETE CASCADE"
} }
if engine == "postgres":
schema[setKey]["setCode"] = {
"type": "VARCHAR(8) NOT NULL,\n FOREIGN KEY (setCode) REFERENCES sets(code) ON UPDATE CASCADE ON DELETE CASCADE"
}
else: else:
schema[setKey]["setCode"] = { schema[setKey]["setCode"] = {
"type": "VARCHAR(8) NOT NULL,\n INDEX(setCode),\n FOREIGN KEY (setCode) REFERENCES sets(code) ON UPDATE CASCADE ON DELETE CASCADE" "type": "VARCHAR(8) NOT NULL,\n INDEX(setCode),\n FOREIGN KEY (setCode) REFERENCES sets(code) ON UPDATE CASCADE ON DELETE CASCADE"
@ -326,16 +307,13 @@ def generate_sql_schema(json_data: Dict, output_file: Dict, engine: Engine) -> s
else: else:
# handle boosters # handle boosters
if setKey == "booster": if setKey == "booster":
if engine == "sqlite" or engine == "postgres": if engine == "sqlite":
schema["sets"]["booster"] = {"type": "TEXT"} schema["sets"]["booster"] = {"type": "TEXT"}
else: else:
schema["sets"]["booster"] = {"type": "LONGTEXT"} schema["sets"]["booster"] = {"type": "LONGTEXT"}
continue continue
# determine type of the set property # determine type of the set property
if setKey in enums["sets"]: if setKey in enums["sets"] and not engine == "sqlite":
if engine == "postgres":
schema["sets"][setKey] = {"type": f"sets_{setKey}", "options": [setValue]}
if engine == "mysql":
schema["sets"][setKey] = {"type": "ENUM", "options": [setValue]} schema["sets"][setKey] = {"type": "ENUM", "options": [setValue]}
elif setKey == "releaseDate": elif setKey == "releaseDate":
schema["sets"][setKey] = {"type": "DATE"} schema["sets"][setKey] = {"type": "DATE"}
@ -361,8 +339,7 @@ def generate_sql_schema(json_data: Dict, output_file: Dict, engine: Engine) -> s
} }
if output_file["AllPrices.json"] or version.startswith("4"): if output_file["AllPrices.json"] or version.startswith("4"):
schema["prices"] = { schema["prices"] = {
"uuid": { "uuid": { "type": "TEXT(36) REFERENCES cards(uuid) ON UPDATE CASCADE ON DELETE CASCADE" if engine == "sqlite" else "CHAR(36) NOT NULL,\n INDEX(uuid),\n FOREIGN KEY (uuid) REFERENCES cards(uuid) ON UPDATE CASCADE ON DELETE CASCADE" },
"type": "TEXT(36) REFERENCES cards(uuid) ON UPDATE CASCADE ON DELETE CASCADE" if engine == "sqlite" else "CHAR(36) NOT NULL,\n INDEX(uuid),\n FOREIGN KEY (uuid) REFERENCES cards(uuid) ON UPDATE CASCADE ON DELETE CASCADE"},
"price": {"type": "FLOAT" if engine == "sqlite" else "DECIMAL(8,2)"}, "price": {"type": "FLOAT" if engine == "sqlite" else "DECIMAL(8,2)"},
"type": {"type": "TEXT" if engine == "sqlite" else "ENUM"}, "type": {"type": "TEXT" if engine == "sqlite" else "ENUM"},
"date": {"type": "DATE"}, "date": {"type": "DATE"},
@ -396,20 +373,16 @@ def generate_sql_schema(json_data: Dict, output_file: Dict, engine: Engine) -> s
return get_query_from_dict(schema, engine) return get_query_from_dict(schema, engine)
def get_sql_type(mixed, engine: Engine) -> str: def get_sql_type(mixed, engine: str) -> str:
""" """
Return a string with the type of the parameter mixed Return a string with the type of the parameter mixed
The type depends on the SQL engine in some cases The type depends on the SQL engine in some cases
""" """
if isinstance(mixed, list) and engine == "postgres": if isinstance(mixed, str) or isinstance(mixed, list) or isinstance(mixed, dict):
return "TEXT[]"
elif isinstance(mixed, str) or isinstance(mixed, list) or isinstance(mixed, dict):
return "TEXT" return "TEXT"
elif isinstance(mixed, bool): elif isinstance(mixed, bool):
if engine == "postgres": if engine == "sqlite":
return "BOOLEAN NOT NULL DEFAULT false"
elif engine == "sqlite":
return "INTEGER NOT NULL DEFAULT 0" return "INTEGER NOT NULL DEFAULT 0"
else: else:
return "TINYINT(1) NOT NULL DEFAULT 0" return "TINYINT(1) NOT NULL DEFAULT 0"
@ -420,21 +393,11 @@ def get_sql_type(mixed, engine: Engine) -> str:
return "TEXT" return "TEXT"
def get_query_from_dict(schema, engine: Engine): def get_query_from_dict(schema, engine):
q = "" q = ""
for table_name, table_data in schema.items(): for table_name, table_data in schema.items():
if engine == "postgres":
for attribute in sorted(table_data.keys()):
if "options" in table_data[attribute]:
q += f"CREATE TYPE {table_data[attribute]['type']} AS ENUM ('" + "', '".join(
table_data[attribute]["options"]) + "');\n"
q += f"CREATE TABLE \"{table_name}\" (\n"
else:
q += f"CREATE TABLE `{table_name}` (\n" q += f"CREATE TABLE `{table_name}` (\n"
if engine == "postgres": if engine == "sqlite":
q += " id SERIAL PRIMARY KEY,\n"
elif engine == "sqlite":
q += " id INTEGER PRIMARY KEY AUTOINCREMENT,\n" q += " id INTEGER PRIMARY KEY AUTOINCREMENT,\n"
else: else:
q += " id INTEGER PRIMARY KEY AUTO_INCREMENT,\n" q += " id INTEGER PRIMARY KEY AUTO_INCREMENT,\n"
@ -445,7 +408,7 @@ def get_query_from_dict(schema, engine: Engine):
if table_data[attribute]["type"] == "ENUM": if table_data[attribute]["type"] == "ENUM":
q += "('" + "', '".join(table_data[attribute]["options"]) + "')" q += "('" + "', '".join(table_data[attribute]["options"]) + "')"
q += ",\n" q += ",\n"
if engine == "sqlite" or engine == "postgres": if engine == "sqlite":
q = q[:-2] + "\n);\n\n" q = q[:-2] + "\n);\n\n"
else: else:
q = q[:-2] + "\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n\n" q = q[:-2] + "\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;\n\n"
@ -453,7 +416,9 @@ def get_query_from_dict(schema, engine: Engine):
return q return q
def parse_and_import_cards(json_data: Dict, input_file: pathlib.Path, output_file: Dict, engine: Engine) -> None: def parse_and_import_cards(
json_data: Dict, input_file: pathlib.Path, output_file: Dict
) -> None:
""" """
Parse the JSON cards and input them into the database Parse the JSON cards and input them into the database
@ -462,32 +427,32 @@ def parse_and_import_cards(json_data: Dict, input_file: pathlib.Path, output_fil
""" """
LOGGER.info("Building sets") LOGGER.info("Building sets")
if "data" in json_data: if "data" in json_data:
sql_dict_insert(json_data["meta"], "meta", output_file, engine) sql_dict_insert(json_data["meta"], "meta", output_file)
json_data = json_data["data"] json_data = json_data["data"]
for set_code, set_data in json_data.items(): for set_code, set_data in json_data.items():
LOGGER.info(f"Inserting set row for {set_code}") LOGGER.info(f"Inserting set row for {set_code}")
set_insert_values = handle_set_row_insertion(set_data, engine) set_insert_values = handle_set_row_insertion(set_data)
sql_dict_insert(set_insert_values, "sets", output_file, engine) sql_dict_insert(set_insert_values, "sets", output_file)
for card in set_data.get("cards"): for card in set_data.get("cards"):
LOGGER.debug(f"Inserting card row for {card.get('name')}") LOGGER.debug(f"Inserting card row for {card.get('name')}")
card_attr: JsonDict = handle_card_row_insertion(card, set_code, engine) card_attr: JsonDict = handle_card_row_insertion(card, set_code)
sql_insert_all_card_fields(card_attr, output_file, engine) sql_insert_all_card_fields(card_attr, output_file)
for token in set_data.get("tokens"): for token in set_data.get("tokens"):
LOGGER.debug(f"Inserting token row for {token.get('name')}") LOGGER.debug(f"Inserting token row for {token.get('name')}")
token_attr = handle_token_row_insertion(token, set_code, engine) token_attr = handle_token_row_insertion(token, set_code)
sql_dict_insert(token_attr, "tokens", output_file, engine) sql_dict_insert(token_attr, "tokens", output_file)
for language, translation in set_data.get("translations", {}).items(): for language, translation in set_data.get("translations", {}).items():
LOGGER.debug(f"Inserting set_translation row for {language}") LOGGER.debug(f"Inserting set_translation row for {language}")
set_translation_attr = handle_set_translation_row_insertion( set_translation_attr = handle_set_translation_row_insertion(
language, translation, set_code language, translation, set_code
) )
sql_dict_insert(set_translation_attr, "set_translations", output_file, engine) sql_dict_insert(set_translation_attr, "set_translations", output_file)
def handle_set_row_insertion(set_data: JsonDict, engine: Engine) -> JsonDict: def handle_set_row_insertion(set_data: JsonDict) -> JsonDict:
""" """
This method will take the set data and convert it, This method will take the set data and convert it,
preparing for SQLite insertion preparing for SQLite insertion
@ -503,15 +468,15 @@ def handle_set_row_insertion(set_data: JsonDict, engine: Engine) -> JsonDict:
continue continue
if key == "boosterV3": if key == "boosterV3":
set_insert_values[key] = modify_for_sql_insert(str(value), engine) set_insert_values[key] = modify_for_sql_insert(str(value))
continue continue
set_insert_values[key] = modify_for_sql_insert(value, engine) set_insert_values[key] = modify_for_sql_insert(value)
return set_insert_values return set_insert_values
def handle_card_row_insertion(card_data: JsonDict, set_name: str, engine: Engine) -> JsonDict: def handle_card_row_insertion(card_data: JsonDict, set_name: str) -> JsonDict:
""" """
This method will take the card data and convert it, This method will take the card data and convert it,
preparing for SQLite insertion preparing for SQLite insertion
@ -529,9 +494,9 @@ def handle_card_row_insertion(card_data: JsonDict, set_name: str, engine: Engine
continue continue
if key == "identifiers": if key == "identifiers":
for idKey, idValue in value.items(): for idKey, idValue in value.items():
card_insert_values[idKey] = modify_for_sql_insert(idValue, engine) card_insert_values[idKey] = modify_for_sql_insert(idValue)
else: else:
card_insert_values[key] = modify_for_sql_insert(value, engine) card_insert_values[key] = modify_for_sql_insert(value)
foreign_insert_values: List[JsonDict] = [] foreign_insert_values: List[JsonDict] = []
if card_skip_keys[0] in card_data.keys(): if card_skip_keys[0] in card_data.keys():
@ -558,7 +523,9 @@ def handle_card_row_insertion(card_data: JsonDict, set_name: str, engine: Engine
} }
def sql_insert_all_card_fields(card_attributes: JsonDict, output_file: Dict, engine: Engine) -> None: def sql_insert_all_card_fields(
card_attributes: JsonDict, output_file: Dict
) -> None:
""" """
Given all of the card's data, insert the data into the Given all of the card's data, insert the data into the
appropriate SQLite tables. appropriate SQLite tables.
@ -566,26 +533,23 @@ def sql_insert_all_card_fields(card_attributes: JsonDict, output_file: Dict, eng
:param card_attributes: Tuple of data :param card_attributes: Tuple of data
:param output_file: Output info dictionary :param output_file: Output info dictionary
""" """
sql_dict_insert(card_attributes["cards"], "cards", output_file, engine) sql_dict_insert(card_attributes["cards"], "cards", output_file)
for foreign_val in card_attributes["foreign_data"]: for foreign_val in card_attributes["foreign_data"]:
sql_dict_insert(foreign_val, "foreign_data", output_file, engine) sql_dict_insert(foreign_val, "foreign_data", output_file)
for legal_val in card_attributes["legalities"]: for legal_val in card_attributes["legalities"]:
sql_dict_insert(legal_val, "legalities", output_file, engine) sql_dict_insert(legal_val, "legalities", output_file)
for rule_val in card_attributes["rulings"]: for rule_val in card_attributes["rulings"]:
sql_dict_insert(rule_val, "rulings", output_file, engine) sql_dict_insert(rule_val, "rulings", output_file)
if not output_file["AllPrices.json"]: if not output_file["AllPrices.json"]:
for price_val in card_attributes["prices"]: for price_val in card_attributes["prices"]:
sql_dict_insert(price_val, "prices", output_file, engine) sql_dict_insert(price_val, "prices", output_file)
def handle_token_row_insertion( def handle_token_row_insertion(token_data: JsonDict, set_name: str) -> JsonDict:
token_data: JsonDict,
set_name: str,
engine: Engine) -> JsonDict:
""" """
This method will take the token data and convert it, This method will take the token data and convert it,
preparing for SQLite insertion preparing for SQLite insertion
@ -598,14 +562,16 @@ def handle_token_row_insertion(
for key, value in token_data.items(): for key, value in token_data.items():
if key == "identifiers": if key == "identifiers":
for idKey, idValue in value.items(): for idKey, idValue in value.items():
token_insert_values[idKey] = modify_for_sql_insert(idValue, engine) token_insert_values[idKey] = modify_for_sql_insert(idValue)
else: else:
token_insert_values[key] = modify_for_sql_insert(value, engine) token_insert_values[key] = modify_for_sql_insert(value)
return token_insert_values return token_insert_values
def handle_set_translation_row_insertion(language: str, translation: str, set_name: str) -> JsonDict: def handle_set_translation_row_insertion(
language: str, translation: str, set_name: str
) -> JsonDict:
""" """
This method will take the set translation data and convert it, This method will take the set translation data and convert it,
preparing for SQLite insertion preparing for SQLite insertion
@ -624,7 +590,7 @@ def handle_set_translation_row_insertion(language: str, translation: str, set_na
return set_translation_insert_values return set_translation_insert_values
def parse_and_import_extras(input_file: pathlib.Path, output_file: Dict, engine: Engine) -> None: def parse_and_import_extras(input_file: pathlib.Path, output_file: Dict) -> None:
""" """
Parse the extra data files and input them into the database Parse the extra data files and input them into the database
@ -651,7 +617,6 @@ def parse_and_import_extras(input_file: pathlib.Path, output_file: Dict, engine:
}, },
"prices", "prices",
output_file, output_file,
engine
) )
if output_file["AllDeckFiles"]: if output_file["AllDeckFiles"]:
@ -673,7 +638,7 @@ def parse_and_import_extras(input_file: pathlib.Path, output_file: Dict, engine:
deck_data[key] = value deck_data[key] = value
if "fileName" not in deck_data: if "fileName" not in deck_data:
deck_data["fileName"] = deck_file.stem deck_data["fileName"] = deck_file.stem
sql_dict_insert(deck_data, "decks", output_file, engine) sql_dict_insert(deck_data, "decks", output_file)
if output_file["Keywords.json"]: if output_file["Keywords.json"]:
LOGGER.info("Inserting Keyword rows") LOGGER.info("Inserting Keyword rows")
@ -686,7 +651,7 @@ def parse_and_import_extras(input_file: pathlib.Path, output_file: Dict, engine:
continue continue
for keyword in json_data[keyword_type]: for keyword in json_data[keyword_type]:
sql_dict_insert( sql_dict_insert(
{"word": keyword, "type": keyword_type}, "keywords", output_file, engine {"word": keyword, "type": keyword_type}, "keywords", output_file
) )
if output_file["CardTypes.json"]: if output_file["CardTypes.json"]:
@ -710,11 +675,12 @@ def parse_and_import_extras(input_file: pathlib.Path, output_file: Dict, engine:
}, },
"types", "types",
output_file, output_file,
engine
) )
def handle_foreign_rows(card_data: JsonDict, card_uuid: str) -> List[JsonDict]: def handle_foreign_rows(
card_data: JsonDict, card_uuid: str
) -> List[JsonDict]:
""" """
This method will take the card data and convert it, This method will take the card data and convert it,
preparing for SQLite insertion preparing for SQLite insertion
@ -741,7 +707,9 @@ def handle_foreign_rows(card_data: JsonDict, card_uuid: str) -> List[JsonDict]:
return foreign_entries return foreign_entries
def handle_legal_rows(card_data: JsonDict, card_uuid: str) -> List[JsonDict]: def handle_legal_rows(
card_data: JsonDict, card_uuid: str
) -> List[JsonDict]:
""" """
This method will take the card data and convert it, This method will take the card data and convert it,
preparing for SQLite insertion preparing for SQLite insertion
@ -759,7 +727,9 @@ def handle_legal_rows(card_data: JsonDict, card_uuid: str) -> List[JsonDict]:
return legalities return legalities
def handle_ruling_rows(card_data: JsonDict, card_uuid: str) -> List[JsonDict]: def handle_ruling_rows(
card_data: JsonDict, card_uuid: str
) -> List[JsonDict]:
"""This method will take the card data and convert it, """This method will take the card data and convert it,
preparing for SQLite insertion preparing for SQLite insertion
@ -779,7 +749,9 @@ def handle_ruling_rows(card_data: JsonDict, card_uuid: str) -> List[JsonDict]:
return rulings return rulings
def handle_price_rows(card_data: JsonDict, card_uuid: str) -> List[JsonDict]: def handle_price_rows(
card_data: JsonDict, card_uuid: str
) -> List[JsonDict]:
"""This method will take the card data and convert it, """This method will take the card data and convert it,
preparing for SQLite insertion preparing for SQLite insertion
@ -803,12 +775,11 @@ def handle_price_rows(card_data: JsonDict, card_uuid: str) -> List[JsonDict]:
return prices return prices
def modify_for_sql_insert(data: Any, engine: Engine) -> Union[str, int, float, None]: def modify_for_sql_insert(data: Any) -> Union[str, int, float, None]:
""" """
Arrays and booleans can't be inserted, so we need to stringify Arrays and booleans can't be inserted, so we need to stringify
:param data: Data to modify :param data: Data to modify
:param engine: SQL engine in use
:return: string value :return: string value
""" """
if isinstance(data, (str, int, float)): if isinstance(data, (str, int, float)):
@ -818,13 +789,11 @@ def modify_for_sql_insert(data: Any, engine: Engine) -> Union[str, int, float, N
if not data: if not data:
return None return None
if isinstance(data, list) and engine == "postgres": if isinstance(data, list) and data and isinstance(data[0], str):
return "{\"" + "\",\"".join(data) + "\"}"
elif isinstance(data, list) and data and isinstance(data[0], str):
return ",".join(data) return ",".join(data)
if isinstance(data, bool): if isinstance(data, bool):
return data if engine == "postgres" else int(data) return int(data)
if isinstance(data, dict): if isinstance(data, dict):
return str(data) return str(data)
@ -832,20 +801,20 @@ def modify_for_sql_insert(data: Any, engine: Engine) -> Union[str, int, float, N
return "" return ""
def modify_for_sql_file(data: JsonDict, engine: Engine) -> JsonDict: def modify_for_sql_file(data: JsonDict) -> JsonDict:
for key in data.keys(): for key in data.keys():
if isinstance(data[key], str): if isinstance(data[key], str):
data[key] = "'" + data[key].replace("'", "''") + "'" data[key] = "'" + data[key].replace("'", "''") + "'"
if str(data[key]) == "False": if str(data[key]) == "False":
data[key] = "false" if engine == "postgres" else 0 data[key] = 0
if str(data[key]) == "True": if str(data[key]) == "True":
data[key] = "true" if engine == "postgres" else 1 data[key] = 1
if data[key] is None: if data[key] is None:
data[key] = "NULL" data[key] = "NULL"
return data return data
def sql_dict_insert(data: JsonDict, table: str, output_file: Dict, engine: Engine) -> None: def sql_dict_insert(data: JsonDict, table: str, output_file: Dict) -> None:
""" """
Insert a dictionary into a sqlite table Insert a dictionary into a sqlite table
@ -854,8 +823,8 @@ def sql_dict_insert(data: JsonDict, table: str, output_file: Dict, engine: Engin
:param output_file: Output info dictionary :param output_file: Output info dictionary
""" """
try: try:
if engine != "sqlite": if output_file["path"].suffix == ".sql":
data = modify_for_sql_file(data, engine) data = modify_for_sql_file(data)
query = ( query = (
"INSERT INTO " "INSERT INTO "
+ table + table