1
0
Fork 0
mirror of https://github.com/ldericher/fftcgtool synced 2025-01-15 15:02:59 +00:00

initial commit

This commit is contained in:
Jörn-Michael Miehe 2018-11-02 11:28:49 +01:00
commit 38bb6b33e3
4 changed files with 359 additions and 0 deletions

2
.gitignore vendored Normal file
View file

@ -0,0 +1,2 @@
/out/
/__pycache__/

94
card.py Normal file
View file

@ -0,0 +1,94 @@
import requests
from PIL import Image
from io import BytesIO
from lxml import etree
# Base-URL of fftcgmognet card pages
TCGMOGURL = "http://www.fftcgmognet.com/card/"
# Card back image by Aurik
BACKURL = "http://cloud-3.steamusercontent.com/ugc/948455238665576576/85063172B8C340602E8D6C783A457122F53F7843/"
# Possible rarity suffixes
RARITIES = ["C", "R", "H", "L", "S"]
# If fftcgmognet alters designs, these *might* get old
XP_IMAGEURL = 'string(//div[@class="col-xs-12 col-sm-5 text-center mog-cardpage-image"]//img/@src)'
XP_CARDNAME = 'string(//div[@class="col-xs-12 col-sm-7 box mog-cardpage-props"]/div[@class="row"][1]/div[2])'
XP_ELEMENT = 'string(//div[@class="col-xs-12 col-sm-7 box mog-cardpage-props"]/div[@class="row"][3]/div[2])'
XP_DESCRIPT = 'string(//div[@class="col-xs-12 col-sm-7 box mog-cardpage-props"]/div[@class="row"][7]/div[2])'
class Card:
# 'Shinra' (Wind, 6-048C)
def __str__(self):
return "'{}' ({}, {})".format(self._name, self._element, self.get_id())
# 6-048C
def get_id(self):
return "{}-{:03}{}".format(self._opus, self._cardid, self._rarity)
# find card
def load(self, opus, cardid):
self._opus = opus
self._cardid = cardid
# check if this is a card back
if opus == 0:
self._rarity = ""
self._name = "[cardback]"
self._element = "None"
self._iurl = BACKURL
return True
# rarity was not given (but needed for mognet URL!)
for rarity in RARITIES:
# assume some rarity
self._rarity = rarity
# try to fetch card name
html = requests.get(TCGMOGURL + self.get_id())
doc = etree.HTML(html.content)
cname = doc.xpath(XP_CARDNAME).strip()
# succeed or retry with next rarity tier
if cname:
self._name = cname
self._iurl = doc.xpath(XP_IMAGEURL).strip()
self._element = doc.xpath(XP_ELEMENT).strip()
self._description = doc.xpath(XP_DESCRIPT).strip()
return True
# No fitting rarity found,
return False
# return in dictionary format
def get_dict(self):
return {
"Nickname": self._name,
"Description": self._description,
"Name": "Card",
"Transform": {
"scaleX": 2.17822933,
"scaleY": 1.0,
"scaleZ": 2.17822933
},
"Locked": False,
"Grid": True,
"Snap": True,
"Autoraise": True,
"Sticky": True,
"Tooltip": True,
"GridProjection": False,
"SidewaysCard": False,
"Hands": True
}
# download and resize card image
def get_image(self, resolution):
response = requests.get(self._iurl)
im = Image.open(BytesIO(response.content))
im = im.convert("RGB")
im = im.resize(resolution, Image.BICUBIC)
return im

57
main.py Executable file
View file

@ -0,0 +1,57 @@
#!/usr/bin/env python3
# base config
opusid = 6
# constants
grid = 7, 10 # default in TTsim: 7 rows, 10 columns
reso = 480, 670 # default in TTsim: 480x670 pixels per card
# set up logging
import logging
logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(threadName)s %(message)s')
logger = logging.getLogger(__name__)
# output directory
import os
if not os.path.exists("out"):
os.mkdir("out")
os.chdir("out")
# load an Opus
from opus import Opus
myOpus = Opus()
myOpus.load(opusid, 2, 2)
# compose custom deck images
faceurls = []
for i, image in enumerate(myOpus.get_images(grid, reso)):
filename = "opus_%d_%d.jpg" % (opusid, i)
image.save(filename)
# ask for upload
iurl = input("Upload '%s' and paste URL: " % (filename))
if not iurl:
# add local file (maybe upload to steam cloud in cloud manager)
logging.warn("Using local file for '%s'." % (filename))
iurl = "file://" + os.path.abspath(filename)
faceurls.append(iurl)
# Build json for element decks
elementaldecks = [
["Fire"],
["Water"],
["Lightning"],
["Ice"],
["Wind"],
["Earth"],
["Light", "Dark"]
]
for i, elements in enumerate(elementaldecks):
json_filename = "opus_%d_%s.json" % (opusid, "_".join(elements))
with open(json_filename, "w") as json_file:
cardfilter = lambda card: card._element in elements
json_data = myOpus.get_json("/".join(elements), grid, cardfilter, faceurls)
json_file.write(json_data)
logging.info("Done. Put the generated JSON files in your 'Saved Objects' Folder.")
logging.info("Thanks for using fftcgtool!")

206
opus.py Normal file
View file

@ -0,0 +1,206 @@
import queue
import threading
import logging
import json
from card import Card
from PIL import Image
# multithreaded card loading
class cardLoader(threading.Thread):
def __init__(self, opusid, queue, cards):
threading.Thread.__init__(self)
self.__opusid = opusid
self.__queue = queue
self.__cards = cards
def run(self):
logger = logging.getLogger(__name__)
while not self.__queue.empty():
# take next card number
cardid = self.__queue.get()
# try to load that card
card = Card()
if card.load(self.__opusid, cardid):
# success!
self.__cards.append(card)
logger.info("found %s" % (card))
else:
# seems like the end of this opus!
logger.info("exiting")
break
if self.__queue.empty():
logger.warn("empty queue, maxsize might be too small!")
# multithreaded card image loading
class imageLoader(threading.Thread):
def __init__(self, queue, composite, composite_lock, grid, resolution):
threading.Thread.__init__(self)
self.__queue = queue
self.__composite = composite
self.__lock = composite_lock
self.__grid = grid
self.__resolution = resolution
def run(self):
logger = logging.getLogger(__name__)
# shorthands
r, c = self.__grid # rows and columns per sheet
w, h = self.__resolution # width, height per card
while not self.__queue.empty():
# take next card
i, card = self.__queue.get()
# fetch card image
logger.info("get image for card %s" % (card))
im = card.get_image(self.__resolution)
# paste image in correct position
self.__lock.acquire()
x, y = (i % c) * w, (i // c) * h
logger.info("paste image %s at P%d(%d, %d)" % (im.mode, i, x, y))
self.__composite.paste(im, (x, y, x+w, y+h))
self.__lock.release()
# image is processed
self.__queue.task_done()
class Opus:
def load(self, opusid, maxsize=500, threadnum=16):
self._opusid = opusid
self._cards = []
# enqueue all card ids
idQueue = queue.Queue()
for cardid in range(1, maxsize + 1):
idQueue.put(cardid)
# start multithreading, wait for finish
threads = [cardLoader(opusid, idQueue, self._cards) for _ in range(threadnum)]
for thread in threads:
thread.start()
for thread in threads:
thread.join()
# sort every element alphabetically
self._cards.sort(key=lambda x: x._cardid)
self._cards.sort(key=lambda x: x._name)
self._cards.sort(key=lambda x: x._element)
def __get_sheets(self, grid):
# cards per sheet
count = grid[0]*grid[1] - 1
# flat copy
cards = self._cards
# while there are cards
while cards:
# get a chunk
yield cards[:count]
# remove that chunk
cards = cards[count:]
def get_images(self, grid, resolution, threadnum=16):
logger = logging.getLogger(__name__)
# shorthands
r, c = grid # rows and columns per sheet
w, h = resolution # width, height per card
for sheet in self.__get_sheets(grid):
# enqueue a sheet of cards
cardQueue = queue.Queue()
for i, card in enumerate(sheet):
cardQueue.put((i, card))
# add hidden face
hidden = Card()
hidden.load(0, 0)
cardQueue.put((r * c - 1, hidden))
# create a new card sheet
sheet = Image.new("RGB", (c*w, r*h))
logger.info("New image: %dx%d" % sheet.size)
# beware concurrent writes
sheet_lock = threading.Lock()
# start multithreading, wait for finish
for _ in range(threadnum):
imageLoader(cardQueue, sheet, sheet_lock, grid, resolution).start()
cardQueue.join()
# sheet image is generated, return now
yield sheet
def get_json(self, deckname, grid, cardfilter, faceurls):
# shorthands
r, c = grid # rows and columns per sheet
# get BackURL
back = Card()
back.load(0, 0)
backurl = back._iurl
jsondict = { "ObjectStates": [ {
"Name": "Deck",
"Nickname": "Opus %d %s" % (self._opusid, deckname),
"Description": "",
"Transform": {
"scaleX": 2.17822933,
"scaleY": 1.0,
"scaleZ": 2.17822933
},
"Locked": False,
"Grid": True,
"Snap": True,
"Autoraise": True,
"Sticky": True,
"Tooltip": True,
"GridProjection": False,
"Hands": False,
"SidewaysCard": False,
"DeckIDs": [],
"CustomDeck": {},
"ContainedObjects": []
} ] }
for sheetnum, sheet in enumerate(self.__get_sheets(grid)):
# get current face
faceurl = faceurls[sheetnum]
# first sheet is "CustomDeck 1"
sheetnum = sheetnum + 1
# recurring sheet dictionary
sheetdict = { str(sheetnum): {
"FaceURL": faceurl,
"BackURL": backurl,
"NumWidth": c,
"NumHeight": r
} }
for cardnum, card in enumerate(sheet):
if not cardfilter(card):
continue
# cardid 123 is on "CustomDeck 1", 3rd row, 4th column
cardid = sheetnum * 100 + (cardnum // c) * 10 + (cardnum % c) * 1
jsondict["ObjectStates"][0]["DeckIDs"].append(cardid)
# Add card object to ContainedObjects
carddict = card.get_dict()
carddict["CardID"] = cardid
carddict["CustomDeck"] = sheetdict
jsondict["ObjectStates"][0]["ContainedObjects"].append(carddict)
# Add sheet to CustomDecks
jsondict["ObjectStates"][0]["CustomDeck"].update(sheetdict)
return json.dumps(jsondict, indent=2)