1
0
Fork 0
mirror of https://github.com/ldericher/fftcgtool synced 2025-01-15 15:02:59 +00:00

Format strings, CLI interface, Single request to ffdecks.com basic API instead of per-card requests

This commit is contained in:
Jörn-Michael Miehe 2018-11-02 23:13:17 +01:00
parent 08bd5b60b4
commit 77c0e9d3fb
6 changed files with 142 additions and 160 deletions

View file

@ -7,3 +7,4 @@ __pycache__
Dockerfile Dockerfile
README.md README.md
ffdecks.json

View file

@ -3,26 +3,34 @@
Card import tool for [Final Fantasy TCG Complete](https://steamcommunity.com/sharedfiles/filedetails/?id=889160751) mod for the [Tabletop Simulator](http://berserk-games.com/tabletop-simulator/) Card import tool for [Final Fantasy TCG Complete](https://steamcommunity.com/sharedfiles/filedetails/?id=889160751) mod for the [Tabletop Simulator](http://berserk-games.com/tabletop-simulator/)
## Quick Start ## Usage
If needed, customize `main.py` variables: usage: main.py [-h] [-n COUNT] [OpusID]
- `opusid`: The Card opus you want to import Imports FFTCG cards for TT-Sim.
- `num_threads`: Lower this for really weak systems
- `opus_size`: 500 here means the script will correctly import anything containing *up to* 500 cards
### using your system's `python3` positional arguments:
OpusID the Opus to import
optional arguments:
-h, --help show this help message and exit
-n COUNT, --num_threads COUNT
maximum number of concurrent requests
### Run using your system's `python3`
1. Make sure `PIL` and `requests` python3 libraries (or equivalent) are installed. 1. Make sure `PIL` and `requests` python3 libraries (or equivalent) are installed.
1. Run `main.py` in project root directory. 1. Run `./main.py` from project root directory.
### using a `docker` container ### Run using a `docker` container
1. Build the container using `docker build -t ldericher/fftcgtool .` in project root directory. 1. Make sure you have a working installation of `docker` software.
1. Run `docker run --rm -it -v "$(pwd)/out:/app/out" ldericher/fftcgtool` in project root directory. 1. Update your local image `docker pull ldericher/fftcgtool`.
1. Run `docker run --rm -it -v "$(pwd)/out:/app/out" ldericher/fftcgtool` in any directory.
Output files will go to subdirectory `./out`. CLI arguments are supported as `docker run --rm -it -v "$(pwd)/out:/app/out" ldericher/fftcgtool -n 2 5` (imports Opus 5 using 2 threads)
## Future work ## Future work
- Add command line interface for `main.py` variables
- Multiple opus import - Multiple opus import

67
card.py
View file

@ -2,55 +2,44 @@ import requests
from PIL import Image from PIL import Image
from io import BytesIO from io import BytesIO
import json
# Base-URL of ffdecks card pages
FFDECKURL = "https://ffdecks.com/api/cards?alternates=1&serial_number={}"
# Card back image by Aurik
BACKURL = "http://cloud-3.steamusercontent.com/ugc/948455238665576576/85063172B8C340602E8D6C783A457122F53F7843/"
# Card front by Square API # Card front by Square API
FACEURL = "https://fftcg.square-enix-games.com/theme/tcg/images/cards/full/{}_eg.jpg" FACEURL = "https://fftcg.square-enix-games.com/theme/tcg/images/cards/full/{}_eg.jpg"
# Card back image by Aurik
BACKURL = "http://cloud-3.steamusercontent.com/ugc/948455238665576576/85063172B8C340602E8D6C783A457122F53F7843/"
class Card: class Card:
def __init__(self, data):
# check if this is a card back
if data == 0:
self._serial = "0-000"
self._name = "[cardback]"
self._rarity = "X"
self._element = "None"
self._description = "None"
self._iurl = BACKURL
# else import from data
else:
self._serial = data["serial_number"]
self._name = data["name"]
self._rarity = data["rarity"][0]
self._element = data["element"]
self._description = "\n\n".join(data["abilities"])
self._iurl = FACEURL.format(self.get_id()) # official url
#self._iurl = data["image"] # ffdecks url
# 'Shinra' (Wind, 6-048C) # 'Shinra' (Wind, 6-048C)
def __str__(self): def __str__(self):
return "'{}' ({}, {})".format(self._name, self._element, self.get_id()) return "'{}' ({}, {})".format(self._name, self._element, self.get_id())
# 6-048 # 6-048C
def get_id(self): def get_id(self):
return "{}-{:03}".format(self._opus, self._cardid) rarity = self._rarity if self._rarity in ["C", "R", "H", "L", "S"] else ""
return "{}{}".format(self._serial, rarity)
# find card
def load(self, opus, cardid):
self._opus = opus
self._cardid = cardid
# check if this is a card back
if opus == 0:
self._rarity = ""
self._name = "[cardback]"
self._element = "None"
self._iurl = BACKURL
return True
try:
# fetch card page from ffdecks API
result = requests.get( FFDECKURL.format(self.get_id()) )
res_obj = json.loads( result.content.decode("utf-8") )
cname = res_obj["name"].strip()
# success?
if cname:
self._name = cname
self._iurl = res_obj["image"]
self._element = res_obj["element"]
self._description = "\n\n".join(res_obj["abilities"])
return True
except:
# Something went wrong
return False
# return in dictionary format # return in dictionary format
def get_dict(self): def get_dict(self):

1
ffdecks.json Normal file

File diff suppressed because one or more lines are too long

131
main.py
View file

@ -1,59 +1,90 @@
#!/usr/bin/env python3 #!/usr/bin/python3
# base config import argparse
opusid = 7 # opus prefix import json
num_threads = 8 # maximum concurrent requests import logging
opus_size = 500 # maximum card count per opus import os
import requests
from opus import Opus
# constants # constants
grid = 7, 10 # default in TTsim: 7 rows, 10 columns GRID = 7, 10 # default in TTsim: 7 rows, 10 columns
reso = 429, 600 # default in TTsim: 480x670 pixels per card RESO = 429, 600 # default in TTsim: 480x670 pixels per card
FURL = "https://ffdecks.com/api/cards/basic" # FFDecks API URL
# set up logging def main():
import logging # Setup CLI
logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(threadName)s %(message)s') parser = argparse.ArgumentParser(
logger = logging.getLogger(__name__) description='Imports FFTCG cards for TT-Sim.')
# output directory parser.add_argument(
import os 'opusid',
if not os.path.exists("out"): default="7",
os.mkdir("out") metavar="OpusID",
os.chdir("out") nargs="?",
help='the Opus to import')
# load an Opus parser.add_argument(
from opus import Opus '-n', '--num_threads',
myOpus = Opus() type=int,
myOpus.load(opusid, opus_size, num_threads) default=20,
metavar="COUNT",
help='maximum number of concurrent requests')
# compose custom deck images args = parser.parse_args()
faceurls = []
for i, image in enumerate(myOpus.get_images(grid, reso, num_threads)):
filename = "opus_%d_%d.jpg" % (opusid, i)
image.save(filename)
# ask for upload
iurl = input("Upload '%s' and paste URL: " % (filename))
if not iurl:
# add local file (maybe upload to steam cloud in cloud manager)
logging.warn("Using local file for '%s'." % (filename))
iurl = "file://" + os.path.abspath(filename)
faceurls.append(iurl)
# Build json for element decks # Setup logging
elementaldecks = [ logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(threadName)s %(message)s')
["Fire"], logger = logging.getLogger(__name__)
["Water"],
["Lightning"],
["Ice"],
["Wind"],
["Earth"],
["Light", "Dark"]
]
for i, elements in enumerate(elementaldecks):
json_filename = "opus_%d_%s.json" % (opusid, "_".join(elements))
with open(json_filename, "w") as json_file:
cardfilter = lambda card: card._element in elements
json_data = myOpus.get_json("/".join(elements), grid, cardfilter, faceurls)
json_file.write(json_data)
logging.info("Done. Put the generated JSON files in your 'Saved Objects' Folder.") # Fetch and parse card database from ffdecks API
logging.info("Thanks for using fftcgtool!") ffdecks_raw = requests.get(FURL)
ffdecks = json.loads(ffdecks_raw.content.decode("utf-8"))
# Load an Opus
opus_data = (card_data for card_data in ffdecks["cards"] if card_data["serial_number"].startswith(args.opusid))
myOpus = Opus(opus_data)
# output directory
if not os.path.exists("out"):
os.mkdir("out")
os.chdir("out")
# compose custom deck images
faceurls = []
for i, image in enumerate(myOpus.get_images(GRID, RESO, args.num_threads)):
filename = "opus_{}_{}.jpg".format(args.opusid, i)
image.save(filename)
# ask for upload
iurl = input("Upload '{}' and paste URL: ".format(filename))
if not iurl:
# add local file (maybe upload to steam cloud in cloud manager)
logging.warn("Using local file for '{}'.".format(filename))
iurl = "file://" + os.path.abspath(filename)
faceurls.append(iurl)
# Build json for element decks
elementaldecks = [
["Fire"],
["Water"],
["Lightning"],
["Ice"],
["Wind"],
["Earth"],
["Light", "Dark"]
]
for i, elements in enumerate(elementaldecks):
json_filename = "opus_{}_{}.json".format(args.opusid, "_".join(elements))
with open(json_filename, "w") as json_file:
cardfilter = lambda card: card._element in elements
json_data = myOpus.get_json(args.opusid, "/".join(elements), GRID, cardfilter, faceurls)
json_file.write(json_data)
# Bye
logging.info("Done. Put the generated JSON files in your 'Saved Objects' Folder.")
logging.info("Thanks for using fftcgtool!")
if __name__ == "__main__":
main()

72
opus.py
View file

@ -3,38 +3,10 @@ import threading
import logging import logging
import json import json
from card import Card from card import Card, BACKURL
from PIL import Image from PIL import Image
# multithreaded card loading
class cardLoader(threading.Thread):
def __init__(self, opusid, queue, cards):
threading.Thread.__init__(self)
self.__opusid = opusid
self.__queue = queue
self.__cards = cards
def run(self):
logger = logging.getLogger(__name__)
while not self.__queue.empty():
# take next card number
cardid = self.__queue.get()
# try to load that card
card = Card()
if card.load(self.__opusid, cardid):
# success!
self.__cards.append(card)
logger.info("found %s" % (card))
else:
# seems like the end of this opus!
logger.info("exiting")
break
if self.__queue.empty():
logger.warn("empty queue, maxsize might be too small!")
# multithreaded card image loading # multithreaded card image loading
class imageLoader(threading.Thread): class imageLoader(threading.Thread):
def __init__(self, queue, composite, composite_lock, grid, resolution): def __init__(self, queue, composite, composite_lock, grid, resolution):
@ -56,13 +28,13 @@ class imageLoader(threading.Thread):
i, card = self.__queue.get() i, card = self.__queue.get()
# fetch card image # fetch card image
logger.info("get image for card %s" % (card)) logger.info("get image for card {}".format(card))
im = card.get_image(self.__resolution) im = card.get_image(self.__resolution)
# paste image in correct position # paste image in correct position
self.__lock.acquire() self.__lock.acquire()
x, y = (i % c) * w, (i // c) * h x, y = (i % c) * w, (i // c) * h
logger.info("paste image %s at P%d(%d, %d)" % (im.mode, i, x, y)) logger.info("paste image {} at P{}({}, {})".format(im.mode, i, x, y))
self.__composite.paste(im, (x, y, x+w, y+h)) self.__composite.paste(im, (x, y, x+w, y+h))
self.__lock.release() self.__lock.release()
@ -71,26 +43,12 @@ class imageLoader(threading.Thread):
class Opus: class Opus:
def load(self, opusid, maxsize=500, threadnum=16): def __init__(self, data):
self._opusid = opusid
self._cards = [] self._cards = []
# enqueue all card ids for card_data in data:
idQueue = queue.Queue() card = Card(card_data)
for cardid in range(1, maxsize + 1): self._cards.append(card)
idQueue.put(cardid)
# start multithreading, wait for finish
threads = [cardLoader(opusid, idQueue, self._cards) for _ in range(threadnum)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
# sort every element alphabetically
self._cards.sort(key=lambda x: x._cardid)
self._cards.sort(key=lambda x: x._name)
self._cards.sort(key=lambda x: x._element)
def __get_sheets(self, grid): def __get_sheets(self, grid):
# cards per sheet # cards per sheet
@ -118,13 +76,12 @@ class Opus:
cardQueue.put((i, card)) cardQueue.put((i, card))
# add hidden face # add hidden face
hidden = Card() hidden = Card(0)
hidden.load(0, 0)
cardQueue.put((r * c - 1, hidden)) cardQueue.put((r * c - 1, hidden))
# create a new card sheet # create a new card sheet
sheet = Image.new("RGB", (c*w, r*h)) sheet = Image.new("RGB", (c*w, r*h))
logger.info("New image: %dx%d" % sheet.size) logger.info("New image: {}x{}".format(*sheet.size))
# beware concurrent paste # beware concurrent paste
sheet_lock = threading.Lock() sheet_lock = threading.Lock()
@ -137,18 +94,13 @@ class Opus:
# sheet image is generated, return now # sheet image is generated, return now
yield sheet yield sheet
def get_json(self, deckname, grid, cardfilter, faceurls): def get_json(self, opusid, deckname, grid, cardfilter, faceurls):
# shorthands # shorthands
r, c = grid # rows and columns per sheet r, c = grid # rows and columns per sheet
# get BackURL
back = Card()
back.load(0, 0)
backurl = back._iurl
jsondict = { "ObjectStates": [ { jsondict = { "ObjectStates": [ {
"Name": "Deck", "Name": "Deck",
"Nickname": "Opus %d %s" % (self._opusid, deckname), "Nickname": "Opus {} {}".format(opusid, deckname),
"Description": "", "Description": "",
"Transform": { "Transform": {
@ -181,7 +133,7 @@ class Opus:
# recurring sheet dictionary # recurring sheet dictionary
sheetdict = { str(sheetnum): { sheetdict = { str(sheetnum): {
"FaceURL": faceurl, "FaceURL": faceurl,
"BackURL": backurl, "BackURL": BACKURL,
"NumWidth": c, "NumWidth": c,
"NumHeight": r "NumHeight": r
} } } }