mirror of
https://github.com/correl/mtgsqlive.git
synced 2024-12-01 11:09:57 +00:00
fenhl fixes
This commit is contained in:
parent
26c12d4aa3
commit
54c713e7d6
3 changed files with 56 additions and 195 deletions
|
@ -4,36 +4,8 @@ import sqlite3
|
|||
import time
|
||||
import os
|
||||
import sys
|
||||
import re
|
||||
|
||||
def getVal(data, field):
|
||||
val = data.get(field)
|
||||
if val:
|
||||
return str(val).replace('”', '"').replace('“', '"').replace('’', "'").replace("\\", "")
|
||||
return val
|
||||
|
||||
def fixJson_foreign(data):
|
||||
if data:
|
||||
data = data.replace("'language'", '"language"')
|
||||
data = data.replace("'multiverseid'", '"multiverseid"')
|
||||
data = data.replace("'name'", '"name"')
|
||||
data = data.replace("', \"", '", "')
|
||||
data = data.replace("\": '", "\": \"")
|
||||
data = data.replace("'}", '"}')
|
||||
return data
|
||||
|
||||
def fixJson(data):
|
||||
if data:
|
||||
p = re.compile("[\w]'[\w]")
|
||||
for m in p.finditer(data):
|
||||
data = data.replace(m.group(), m.group().replace("'", "TMP_HOLD"))
|
||||
|
||||
p = re.compile("[\w]\"[\w]")
|
||||
for m in p.finditer(data):
|
||||
data = data.replace(m.group(), m.group().replace("'", "DREAK_HOLD"))
|
||||
data = data.replace("'", '"').replace("TMP_HOLD", "'").replace("DREAK_HOLD", '"')
|
||||
return data
|
||||
|
||||
# Create the database
|
||||
def create_db(database_connection):
|
||||
c = database_connection.cursor()
|
||||
c.execute('create table cards (id, layout, name, names, manaCost, cmc, colors, colorIdentity, type, supertypes, types, subtypes, rarity, text, flavor, artist, number, power, toughness, loyalty, multiverseid, variations, imageName, watermark, border, timeshifted, hand, life, reserved, releaseDate, starter, rulings, foreignNames, printings, originalText, originalType, legalities, source, setName, setCode, setReleaseDate, mciNumber)')
|
||||
|
@ -41,8 +13,15 @@ def create_db(database_connection):
|
|||
database_connection.commit()
|
||||
c.close()
|
||||
|
||||
def getVal(data, field):
|
||||
value = data.get(field)
|
||||
if value:
|
||||
return json.dumps(value)
|
||||
|
||||
def json_to_db(json_file_opened, database_connection):
|
||||
c = database_connection.cursor()
|
||||
|
||||
# Insert last updated time to database (so if you use the same people know when last updated)
|
||||
c.execute('insert into lastUpdated values (?)', [str(time.strftime("%Y-%m-%d %H:%M:%S"))])
|
||||
|
||||
# Get the setnames in the AllSets file and put them into a dictionary for later use
|
||||
|
@ -62,15 +41,15 @@ def json_to_db(json_file_opened, database_connection):
|
|||
thisCard_id = getVal(thisCard, "id")
|
||||
layout = getVal(thisCard, "layout")
|
||||
name = getVal(thisCard, "name")
|
||||
names = fixJson( getVal(thisCard, "names") )
|
||||
names = getVal(thisCard, "names")
|
||||
manaCost = getVal(thisCard, "manaCost")
|
||||
cmc = getVal(thisCard, "cmc")
|
||||
colors = fixJson( getVal(thisCard, "colors") )
|
||||
colorIdentity = fixJson( getVal(thisCard, "colorIdentity") )
|
||||
colors = getVal(thisCard, "colors")
|
||||
colorIdentity = getVal(thisCard, "colorIdentity")
|
||||
thisCard_type = getVal(thisCard, "type")
|
||||
supertypes = fixJson( getVal(thisCard, "supertypes") )
|
||||
types = fixJson( getVal(thisCard, "types") )
|
||||
subtypes = fixJson( getVal(thisCard, "subtypes") )
|
||||
supertypes = getVal(thisCard, "supertypes")
|
||||
types = getVal(thisCard, "types")
|
||||
subtypes = getVal(thisCard, "subtypes")
|
||||
rarity = getVal(thisCard, "rarity")
|
||||
text = getVal(thisCard, "text")
|
||||
flavor = getVal(thisCard, "flavor")
|
||||
|
@ -90,37 +69,38 @@ def json_to_db(json_file_opened, database_connection):
|
|||
reserved = getVal(thisCard, "reserved")
|
||||
releaseDate = getVal(thisCard, "releaseDate")
|
||||
starter = getVal(thisCard, "starter")
|
||||
rulings = ( getVal(thisCard, "rulings") )
|
||||
foreignNames = fixJson_foreign( getVal(thisCard, "foreignNames") )
|
||||
printings = fixJson( getVal(thisCard, "printings") )
|
||||
rulings = getVal(thisCard, "rulings")
|
||||
foreignNames = getVal(thisCard, "foreignNames")
|
||||
printings = getVal(thisCard, "printings")
|
||||
originalText = getVal(thisCard, "originalText")
|
||||
originalType = getVal(thisCard, "originalType")
|
||||
legalities = fixJson( getVal(thisCard, "legalities") )
|
||||
legalities = getVal(thisCard, "legalities")
|
||||
source = getVal(thisCard, "source")
|
||||
mciNumber = getVal(thisCard, "mciNumber")
|
||||
|
||||
thisCard_data = [thisCard_id, layout, name, names, manaCost, cmc, colors, colorIdentity, thisCard_type, supertypes, types, subtypes, rarity, text, flavor, artist, number, power, toughness, loyalty, multiverseid, variations, imageName, watermark, border, timeshifted, hand, life, reserved, releaseDate, starter, rulings, foreignNames, printings, originalText, originalType, legalities, source, setName, thisSet, setReleaseDate, mciNumber]
|
||||
|
||||
# Insert thisCard into the database
|
||||
c.execute('insert into cards values (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)', thisCard_data)
|
||||
database_connection.commit()
|
||||
c.close()
|
||||
|
||||
def main():
|
||||
i = sys.argv[1] # Should create new DB
|
||||
d = os.path.expanduser(sys.argv[2]) # File location for database
|
||||
db_path = os.path.expanduser(sys.argv[2]) # File location for database
|
||||
|
||||
if (i == '1'):
|
||||
if os.path.isfile(d):
|
||||
os.remove(d)
|
||||
d = sqlite3.connect(d)
|
||||
create_db(d)
|
||||
if os.path.isfile(db_path):
|
||||
os.remove(db_path)
|
||||
db_path = sqlite3.connect(db_path)
|
||||
create_db(db_path)
|
||||
else:
|
||||
d = sqlite3.connect(d)
|
||||
db_path = sqlite3.connect(db_path)
|
||||
|
||||
xml = os.path.expanduser(sys.argv[3]) # File location for input file
|
||||
xml = json.load(open(xml, 'r'))
|
||||
json_path = os.path.expanduser(sys.argv[3]) # File location for input file
|
||||
json_path = json.load(open(json_path, 'r'))
|
||||
|
||||
json_to_db(xml, d)
|
||||
json_to_db(json_path, db_path)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
160
sql_to_json.py
160
sql_to_json.py
|
@ -5,19 +5,18 @@ import os
|
|||
import fileinput
|
||||
import sys
|
||||
|
||||
|
||||
def id(x):
|
||||
return x
|
||||
|
||||
DECODERS = { 'setName': id, 'setCode': id, 'setReleaseDate': id }
|
||||
|
||||
def dict_from_row(row):
|
||||
return {k: DECODERS.get(k, json.loads)(v) for k, v in zip(row.keys(), row) if v is not None}
|
||||
|
||||
def set_dictionary(row):
|
||||
return dict(zip(row.keys(), row))
|
||||
|
||||
def remove_empty_keys(d):
|
||||
for k in list(d):
|
||||
if not d[k]:
|
||||
try:
|
||||
if d[k] == 0:
|
||||
continue
|
||||
del d[k]
|
||||
except:
|
||||
print("BOOBOO")
|
||||
return d
|
||||
|
||||
def db_to_json(database_connection):
|
||||
database_connection.row_factory = sqlite3.Row # Enable keys for the rows
|
||||
|
@ -28,16 +27,16 @@ def db_to_json(database_connection):
|
|||
returnData = []
|
||||
rows = cursor.fetchall()
|
||||
for setCode in rows:
|
||||
setCode = remove_empty_keys(dict_from_row(setCode))
|
||||
setCode = set_dictionary(setCode)
|
||||
cursor.execute("SELECT * FROM cards WHERE setCode = '%s'" % setCode["setCode"])
|
||||
card_rows = cursor.fetchall()
|
||||
|
||||
setName = None
|
||||
setReleaseDate = None
|
||||
for row in card_rows:
|
||||
row = remove_empty_keys(dict_from_row(row))
|
||||
row = dict_from_row(row)
|
||||
returnData.append(row)
|
||||
if not setName and not setReleaseDate:
|
||||
if not setName or not setReleaseDate:
|
||||
setName = row["setName"]
|
||||
setReleaseDate = row["setReleaseDate"]
|
||||
|
||||
|
@ -50,138 +49,13 @@ def db_to_json(database_connection):
|
|||
return mainDict
|
||||
|
||||
def main():
|
||||
d = os.path.expanduser(sys.argv[1]) # File location for database
|
||||
d = sqlite3.connect(d)
|
||||
db_path = sqlite3.connect(os.path.expanduser(sys.argv[1])) # File location for database
|
||||
file_path = os.path.expanduser(sys.argv[2]) # File location for output
|
||||
|
||||
xml = "/tmp/Output.tmp.json"
|
||||
xml2 = os.path.expanduser(sys.argv[2]) # File location for output
|
||||
dictionary = db_to_json(db_path)
|
||||
|
||||
json_code = json.dumps(db_to_json(d), sort_keys=True, indent=2)
|
||||
|
||||
writeFile = open(xml, 'w')
|
||||
writeFile.write(json_code)
|
||||
writeFile.close()
|
||||
|
||||
int_strings = ('cmc":', 'loyalty":', 'multiverseid":', 'hand":', 'life":')
|
||||
bool_strings = ('reserved":', 'starter":', 'timeshifted":')
|
||||
skip_strings = ('setCode":', 'setName":', 'setReleaseDate":')
|
||||
bonus_comma_strings = ('variations":', 'watermark":')
|
||||
fix_rarity = False
|
||||
# Additional hacks to now cleanup the file, needs to be redone / hopefully not needed
|
||||
with open(xml) as f:
|
||||
with open(xml2, 'w') as f2:
|
||||
for line in f.readlines():
|
||||
# These still need proper parsing
|
||||
if '"rulings":' in line: continue
|
||||
|
||||
if any(s in line for s in skip_strings): continue
|
||||
elif any(s in line for s in int_strings):
|
||||
line = str_to_int(line)
|
||||
|
||||
if replace_and_write_these_keys(f2, line, "colorIdentity"): continue
|
||||
if replace_and_write_these_keys(f2, line, "colors"): continue
|
||||
if replace_and_write_these_keys(f2, line, "printings"): continue
|
||||
if replace_and_write_these_keys(f2, line, "supertypes"): continue
|
||||
if replace_and_write_these_keys(f2, line, "subtypes"): continue
|
||||
if replace_and_write_these_keys(f2, line, "legalities"): continue
|
||||
if replace_and_write_these_keys(f2, line, "types"): continue
|
||||
if replace_and_write_these_keys(f2, line, "names"): continue
|
||||
|
||||
if replace_and_write_these_keys(f2, line, "rulings"): continue
|
||||
if replace_and_write_these_keys(f2, line, "foreignNames"): continue
|
||||
|
||||
if any(s in line for s in bool_strings):
|
||||
line = str_to_bool(line)
|
||||
|
||||
if 'token card' in line:
|
||||
fix_rarity = True
|
||||
elif fix_rarity and 'rarity":' in line:
|
||||
while line.strip()[-1:] != '"':
|
||||
line = line[:-1]
|
||||
fix_rarity = False
|
||||
|
||||
if any(s in line for s in bonus_comma_strings):
|
||||
f2.write(",")
|
||||
|
||||
if replace_and_write_these_keys(f2, line, "variations"): continue
|
||||
|
||||
f2.write(line)
|
||||
f2.close()
|
||||
cleanup_json(xml2)
|
||||
os.remove(xml)
|
||||
|
||||
def str_to_int(line):
|
||||
try:
|
||||
line_to_int = line.index('": "')
|
||||
line_after_int = line.index('",')
|
||||
except:
|
||||
return line
|
||||
|
||||
try:
|
||||
line = line[:line_to_int] + '": ' + line[line_to_int + 4:line_after_int] + line[line_after_int + 1:]
|
||||
except:
|
||||
print(line_to_int, line_after_int, line)
|
||||
|
||||
return line
|
||||
|
||||
def str_to_bool(line):
|
||||
try:
|
||||
line_to_int = line.index('": "')
|
||||
line_after_int = line.index('",')
|
||||
except:
|
||||
return line
|
||||
|
||||
try:
|
||||
line = line[:line_to_int] + '": ' + line[line_to_int + 4:line_after_int].lower() + line[line_after_int + 1:]
|
||||
except:
|
||||
print(line_to_int, line_after_int, line[line_to_int + 4:line_after_int], line)
|
||||
|
||||
return line
|
||||
|
||||
def cleanup_json(file_path):
|
||||
jsonFile = open(file_path, "r")
|
||||
data = json.load(jsonFile)
|
||||
jsonFile.close()
|
||||
|
||||
jsonFile = open(file_path, "w+")
|
||||
jsonFile.write(json.dumps(data, indent=4, sort_keys=True))
|
||||
jsonFile.close()
|
||||
|
||||
def replace_and_write_these_keys(file_opened, line, key_val):
|
||||
retVal = str_to_json(line, key_val)
|
||||
if retVal:
|
||||
if key_val != "variations":
|
||||
file_opened.write(retVal)
|
||||
else:
|
||||
while retVal.strip()[-1:] != "]":
|
||||
retVal = retVal[:-1]
|
||||
file_opened.write(retVal)
|
||||
return True
|
||||
|
||||
# Yes this is a mess, but it works for now
|
||||
def str_to_json(line, key_val):
|
||||
if '"' + key_val + '":' in line:
|
||||
line_index = line.index('"[')
|
||||
|
||||
if '377105' in line: # This card is just a mess
|
||||
line = line.replace("Schlafender Drache\\\"", "Schlafender Drache_v_v_")
|
||||
line = line[:line_index] + line[line_index:].replace('\\"', '"')[1:]
|
||||
line = line.replace("_v_v_", "\\\"")
|
||||
line = line.replace('Kongming, "Sleeping Dragon"', 'Kongming, \\"Sleeping Dragon\\"')
|
||||
else:
|
||||
line = line[:line_index] + line[line_index:].replace('\\"', '"')[1:]
|
||||
|
||||
while line.strip()[-1:] != "]":
|
||||
line = line[:-1]
|
||||
|
||||
try: line = line[:line_index] + json.dumps(json.loads(line[line_index:]), indent=2)
|
||||
except:
|
||||
line = line
|
||||
|
||||
if key_val != "types":
|
||||
line += ","
|
||||
line += "\n"
|
||||
return line
|
||||
with open(file_path, 'w') as json_f:
|
||||
json.dump(dictionary, json_f, sort_keys=True, indent=4)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
9
testing_mac.sh
Normal file → Executable file
9
testing_mac.sh
Normal file → Executable file
|
@ -1 +1,8 @@
|
|||
./json_to_sql.py 1 ~/Desktop/Real.db ~/Desktop/AllSets-x.json; ./sql_to_json.py ~/Desktop/Real.db ~/Desktop/MyOutput.json; ./json_to_sql.py 1 ~/Desktop/Mine.db ~/Desktop/MyOutput.json; ./sql_to_json.py ~/Desktop/Mine.db ~/Desktop/MySecondOutput.json; diff ~/Desktop/MyOutput.json ~/Desktop/MySecondOutput.json > ~/Desktop/Diff.txt; md5sum ~/Desktop/MyOutput.json ~/Desktop/MySecondOutput.json
|
||||
#!/usr/bin/env bash
|
||||
|
||||
./json_to_sql.py 1 ~/Desktop/Real.db ~/Desktop/AllSets-x.json;
|
||||
./sql_to_json.py ~/Desktop/Real.db ~/Desktop/MyOutput.json;
|
||||
./json_to_sql.py 1 ~/Desktop/Mine.db ~/Desktop/MyOutput.json;
|
||||
./sql_to_json.py ~/Desktop/Mine.db ~/Desktop/MySecondOutput.json;
|
||||
diff ~/Desktop/MyOutput.json ~/Desktop/MySecondOutput.json > ~/Desktop/Diff.txt;
|
||||
md5sum ~/Desktop/MyOutput.json ~/Desktop/MySecondOutput.json
|
Loading…
Reference in a new issue