order and api and more

This commit is contained in:
2025-04-17 00:09:16 -04:00
parent 593e8960b7
commit 21408af48c
31 changed files with 1924 additions and 542 deletions

View File

@@ -1,15 +1,11 @@
from app.services.base_service import BaseService
from app.services.card_service import CardService
from app.services.order_service import OrderService
from app.services.service_manager import ServiceManager
from app.services.file_processing_service import FileProcessingService
from app.services.inventory_service import InventoryService
from app.services.service_registry import ServiceRegistry
__all__ = [
'BaseService',
'CardService',
'OrderService',
'ServiceManager',
'FileProcessingService',
'InventoryService',
'ServiceRegistry'
'InventoryService'
]

View File

@@ -4,6 +4,11 @@ import os
from pathlib import Path
from jinja2 import Environment, FileSystemLoader
from weasyprint import HTML
import logging
import asyncio
from concurrent.futures import ThreadPoolExecutor
logger = logging.getLogger(__name__)
class AddressLabelService:
def __init__(self):
@@ -16,8 +21,9 @@ class AddressLabelService:
self.return_address_path = "file://" + os.path.abspath("app/data/assets/images/ccrcardsaddress.png")
self.output_dir = "app/data/cache/tcgplayer/address_labels/"
os.makedirs(self.output_dir, exist_ok=True)
self.executor = ThreadPoolExecutor()
def generate_labels_from_csv(self, csv_path: str, label_type: Literal["dk1201", "dk1241"]) -> List[str]:
async def generate_labels_from_csv(self, csv_path: str, label_type: Literal["dk1201", "dk1241"]) -> List[str]:
"""Generate address labels from a CSV file and save them as PDFs.
Args:
@@ -29,17 +35,30 @@ class AddressLabelService:
"""
generated_files = []
with open(csv_path, 'r') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
# Generate label for each row
pdf_path = self._generate_single_label(row, label_type)
if pdf_path:
generated_files.append(str(pdf_path))
# Read CSV file in a thread pool
loop = asyncio.get_event_loop()
rows = await loop.run_in_executor(self.executor, self._read_csv, csv_path)
for row in rows:
# if value of Value Of Products is greater than 50, skip
if row.get('Value Of Products') and float(row['Value Of Products']) > 50:
logger.info(f"Skipping order {row.get('Order #')} because value of products is greater than 50")
continue
# Generate label for each row
pdf_path = await self._generate_single_label(row, label_type)
if pdf_path:
generated_files.append(str(pdf_path))
return generated_files
def _generate_single_label(self, row: Dict[str, str], label_type: Literal["dk1201", "dk1241"]) -> Optional[str]:
def _read_csv(self, csv_path: str) -> List[Dict[str, str]]:
"""Read CSV file and return list of rows."""
with open(csv_path, 'r') as csvfile:
reader = csv.DictReader(csvfile)
return list(reader)
async def _generate_single_label(self, row: Dict[str, str], label_type: Literal["dk1201", "dk1241"]) -> Optional[str]:
"""Generate a single address label PDF.
Args:
@@ -67,11 +86,15 @@ class AddressLabelService:
# Render HTML
html_content = self.templates[label_type].render(**template_data)
# Generate PDF
# Generate PDF in a thread pool
loop = asyncio.get_event_loop()
pdf_path = self.output_dir + f"{row['Order #']}_{label_type}.pdf"
HTML(string=html_content).write_pdf(str(pdf_path))
await loop.run_in_executor(
self.executor,
lambda: HTML(string=html_content).write_pdf(str(pdf_path))
)
return pdf_path
except Exception as e:
print(f"Error generating label for order {row.get('Order #', 'unknown')}: {str(e)}")
logger.error(f"Error generating label for order {row.get('Order #', 'unknown')}: {str(e)}")
return None

View File

@@ -1,15 +1,12 @@
from typing import Type, TypeVar, Generic, List, Optional, Any
from sqlalchemy.orm import Session
from app.db.database import Base
from app.services.service_registry import ServiceRegistry
T = TypeVar('T')
class BaseService(Generic[T]):
def __init__(self, model: Type[T]):
self.model = model
# Register the service instance
ServiceRegistry.register(self.__class__.__name__, self)
def get(self, db: Session, id: int) -> Optional[T]:
return db.query(self.model).filter(self.model.id == id).first()
@@ -39,7 +36,4 @@ class BaseService(Generic[T]):
db.delete(obj)
db.commit()
return True
return False
def get_service(self, service_name: str) -> Any:
return ServiceRegistry.get(service_name)
return False

View File

@@ -1,7 +1,6 @@
from typing import Any, Dict, Optional, Union
import aiohttp
import logging
from app.services.service_registry import ServiceRegistry
import json
logger = logging.getLogger(__name__)
@@ -11,8 +10,6 @@ class BaseExternalService:
self.base_url = base_url
self.api_key = api_key
self.session = None
# Register the service instance
ServiceRegistry.register(self.__class__.__name__, self)
async def _get_session(self) -> aiohttp.ClientSession:
if self.session is None or self.session.closed:
@@ -72,5 +69,8 @@ class BaseExternalService:
raise
async def close(self):
"""Close the aiohttp session if it exists"""
if self.session and not self.session.closed:
await self.session.close()
await self.session.close()
self.session = None
logger.info(f"Closed session for {self.__class__.__name__}")

View File

@@ -10,6 +10,7 @@ from sqlalchemy.orm import Session
from datetime import datetime
from app.models.mtgjson_card import MTGJSONCard
from app.models.mtgjson_sku import MTGJSONSKU
from app.db.database import get_db, transaction
class MTGJSONService:
def __init__(self, cache_dir: str = "app/data/cache/mtgjson", batch_size: int = 1000):
@@ -84,72 +85,67 @@ class MTGJSONService:
for key, value in data["data"].items():
yield {"type": "item", "data": {key: value}}
async def _process_batch(self, db: Session, items: list, model_class, commit: bool = True) -> int:
async def _process_batch(self, db: Session, items: list, model_class) -> int:
"""Process a batch of items and add them to the database"""
processed = 0
for item in items:
if model_class == MTGJSONCard:
# Check if card already exists
existing_card = db.query(MTGJSONCard).filter(MTGJSONCard.card_id == item["card_id"]).first()
if existing_card:
continue
new_item = MTGJSONCard(
card_id=item["card_id"],
name=item["name"],
set_code=item["set_code"],
uuid=item["uuid"],
abu_id=item.get("abu_id"),
card_kingdom_etched_id=item.get("card_kingdom_etched_id"),
card_kingdom_foil_id=item.get("card_kingdom_foil_id"),
card_kingdom_id=item.get("card_kingdom_id"),
cardsphere_id=item.get("cardsphere_id"),
cardsphere_foil_id=item.get("cardsphere_foil_id"),
cardtrader_id=item.get("cardtrader_id"),
csi_id=item.get("csi_id"),
mcm_id=item.get("mcm_id"),
mcm_meta_id=item.get("mcm_meta_id"),
miniaturemarket_id=item.get("miniaturemarket_id"),
mtg_arena_id=item.get("mtg_arena_id"),
mtgjson_foil_version_id=item.get("mtgjson_foil_version_id"),
mtgjson_non_foil_version_id=item.get("mtgjson_non_foil_version_id"),
mtgjson_v4_id=item.get("mtgjson_v4_id"),
mtgo_foil_id=item.get("mtgo_foil_id"),
mtgo_id=item.get("mtgo_id"),
multiverse_id=item.get("multiverse_id"),
scg_id=item.get("scg_id"),
scryfall_id=item.get("scryfall_id"),
scryfall_card_back_id=item.get("scryfall_card_back_id"),
scryfall_oracle_id=item.get("scryfall_oracle_id"),
scryfall_illustration_id=item.get("scryfall_illustration_id"),
tcgplayer_product_id=item.get("tcgplayer_product_id"),
tcgplayer_etched_product_id=item.get("tcgplayer_etched_product_id"),
tnt_id=item.get("tnt_id")
)
else: # MTGJSONSKU
# Check if SKU already exists
existing_sku = db.query(MTGJSONSKU).filter(MTGJSONSKU.sku_id == item["sku_id"]).first()
if existing_sku:
continue
new_item = MTGJSONSKU(
sku_id=str(item["sku_id"]),
product_id=str(item["product_id"]),
condition=item["condition"],
finish=item["finish"],
language=item["language"],
printing=item["printing"],
card_id=item["card_id"]
)
db.add(new_item)
processed += 1
if commit:
try:
db.commit()
except Exception as e:
db.rollback()
raise e
with transaction(db):
for item in items:
if model_class == MTGJSONCard:
# Check if card already exists
existing_card = db.query(MTGJSONCard).filter(MTGJSONCard.card_id == item["card_id"]).first()
if existing_card:
continue
new_item = MTGJSONCard(
card_id=item["card_id"],
name=item["name"],
set_code=item["set_code"],
uuid=item["uuid"],
abu_id=item.get("abu_id"),
card_kingdom_etched_id=item.get("card_kingdom_etched_id"),
card_kingdom_foil_id=item.get("card_kingdom_foil_id"),
card_kingdom_id=item.get("card_kingdom_id"),
cardsphere_id=item.get("cardsphere_id"),
cardsphere_foil_id=item.get("cardsphere_foil_id"),
cardtrader_id=item.get("cardtrader_id"),
csi_id=item.get("csi_id"),
mcm_id=item.get("mcm_id"),
mcm_meta_id=item.get("mcm_meta_id"),
miniaturemarket_id=item.get("miniaturemarket_id"),
mtg_arena_id=item.get("mtg_arena_id"),
mtgjson_foil_version_id=item.get("mtgjson_foil_version_id"),
mtgjson_non_foil_version_id=item.get("mtgjson_non_foil_version_id"),
mtgjson_v4_id=item.get("mtgjson_v4_id"),
mtgo_foil_id=item.get("mtgo_foil_id"),
mtgo_id=item.get("mtgo_id"),
multiverse_id=item.get("multiverse_id"),
scg_id=item.get("scg_id"),
scryfall_id=item.get("scryfall_id"),
scryfall_card_back_id=item.get("scryfall_card_back_id"),
scryfall_oracle_id=item.get("scryfall_oracle_id"),
scryfall_illustration_id=item.get("scryfall_illustration_id"),
tcgplayer_product_id=item.get("tcgplayer_product_id"),
tcgplayer_etched_product_id=item.get("tcgplayer_etched_product_id"),
tnt_id=item.get("tnt_id")
)
else: # MTGJSONSKU
# Check if SKU already exists
existing_sku = db.query(MTGJSONSKU).filter(MTGJSONSKU.sku_id == item["sku_id"]).first()
if existing_sku:
continue
new_item = MTGJSONSKU(
sku_id=str(item["sku_id"]),
product_id=str(item["product_id"]),
condition=item["condition"],
finish=item["finish"],
language=item["language"],
printing=item["printing"],
card_id=item["card_id"]
)
db.add(new_item)
processed += 1
return processed
async def download_and_process_identifiers(self, db: Session) -> Dict[str, int]:

View File

@@ -6,6 +6,7 @@ from app.services.external_api.base_external_service import BaseExternalService
from app.models.tcgplayer_group import TCGPlayerGroup
from app.models.tcgplayer_product import TCGPlayerProduct
from app.models.tcgplayer_category import TCGPlayerCategory
from app.db.database import get_db, transaction
from sqlalchemy.orm import Session
import py7zr
import os
@@ -100,42 +101,41 @@ class TCGCSVService(BaseExternalService):
groups = response.get("results", [])
synced_groups = []
with transaction(db):
for group_data in groups:
# Convert string dates to datetime objects
published_on = datetime.fromisoformat(group_data["publishedOn"].replace("Z", "+00:00")) if group_data.get("publishedOn") else None
modified_on = datetime.fromisoformat(group_data["modifiedOn"].replace("Z", "+00:00")) if group_data.get("modifiedOn") else None
for group_data in groups:
# Convert string dates to datetime objects
published_on = datetime.fromisoformat(group_data["publishedOn"].replace("Z", "+00:00")) if group_data.get("publishedOn") else None
modified_on = datetime.fromisoformat(group_data["modifiedOn"].replace("Z", "+00:00")) if group_data.get("modifiedOn") else None
# Check if group already exists
existing_group = db.query(TCGPlayerGroup).filter(TCGPlayerGroup.group_id == group_data["groupId"]).first()
if existing_group:
# Update existing group
for key, value in {
"name": group_data["name"],
"abbreviation": group_data.get("abbreviation"),
"is_supplemental": group_data.get("isSupplemental", False),
"published_on": published_on,
"modified_on": modified_on,
"category_id": group_data.get("categoryId")
}.items():
setattr(existing_group, key, value)
synced_groups.append(existing_group)
else:
# Create new group
new_group = TCGPlayerGroup(
group_id=group_data["groupId"],
name=group_data["name"],
abbreviation=group_data.get("abbreviation"),
is_supplemental=group_data.get("isSupplemental", False),
published_on=published_on,
modified_on=modified_on,
category_id=group_data.get("categoryId")
)
db.add(new_group)
synced_groups.append(new_group)
# Check if group already exists
existing_group = db.query(TCGPlayerGroup).filter(TCGPlayerGroup.group_id == group_data["groupId"]).first()
if existing_group:
# Update existing group
for key, value in {
"name": group_data["name"],
"abbreviation": group_data.get("abbreviation"),
"is_supplemental": group_data.get("isSupplemental", False),
"published_on": published_on,
"modified_on": modified_on,
"category_id": group_data.get("categoryId")
}.items():
setattr(existing_group, key, value)
synced_groups.append(existing_group)
else:
# Create new group
new_group = TCGPlayerGroup(
group_id=group_data["groupId"],
name=group_data["name"],
abbreviation=group_data.get("abbreviation"),
is_supplemental=group_data.get("isSupplemental", False),
published_on=published_on,
modified_on=modified_on,
category_id=group_data.get("categoryId")
)
db.add(new_group)
synced_groups.append(new_group)
db.commit()
return synced_groups
async def sync_products_to_db(self, db: Session, game_id: int, group_id: int) -> List[TCGPlayerProduct]:
@@ -178,29 +178,29 @@ class TCGCSVService(BaseExternalService):
synced_products.append(existing_product)
else:
# Create new product
new_product = TCGPlayerProduct(
product_id=int(product_data["productId"]),
name=product_data["name"],
clean_name=product_data.get("cleanName"),
image_url=product_data.get("imageUrl"),
category_id=int(product_data["categoryId"]),
group_id=int(product_data["groupId"]),
url=product_data.get("url"),
modified_on=modified_on,
image_count=int(product_data.get("imageCount", 0)),
ext_rarity=product_data.get("extRarity"),
ext_number=product_data.get("extNumber"),
low_price=parse_price(product_data.get("lowPrice")),
mid_price=parse_price(product_data.get("midPrice")),
high_price=parse_price(product_data.get("highPrice")),
market_price=parse_price(product_data.get("marketPrice")),
direct_low_price=parse_price(product_data.get("directLowPrice")),
sub_type_name=product_data.get("subTypeName")
)
db.add(new_product)
synced_products.append(new_product)
with transaction(db):
new_product = TCGPlayerProduct(
product_id=int(product_data["productId"]),
name=product_data["name"],
clean_name=product_data.get("cleanName"),
image_url=product_data.get("imageUrl"),
category_id=int(product_data["categoryId"]),
group_id=int(product_data["groupId"]),
url=product_data.get("url"),
modified_on=modified_on,
image_count=int(product_data.get("imageCount", 0)),
ext_rarity=product_data.get("extRarity"),
ext_number=product_data.get("extNumber"),
low_price=parse_price(product_data.get("lowPrice")),
mid_price=parse_price(product_data.get("midPrice")),
high_price=parse_price(product_data.get("highPrice")),
market_price=parse_price(product_data.get("marketPrice")),
direct_low_price=parse_price(product_data.get("directLowPrice")),
sub_type_name=product_data.get("subTypeName")
)
db.add(new_product)
synced_products.append(new_product)
db.commit()
return synced_products
async def sync_categories_to_db(self, db: Session) -> List[TCGPlayerCategory]:
@@ -212,51 +212,50 @@ class TCGCSVService(BaseExternalService):
categories = response.get("results", [])
synced_categories = []
with transaction(db):
for category_data in categories:
# Convert string dates to datetime objects
modified_on = datetime.fromisoformat(category_data["modifiedOn"].replace("Z", "+00:00")) if category_data.get("modifiedOn") else None
for category_data in categories:
# Convert string dates to datetime objects
modified_on = datetime.fromisoformat(category_data["modifiedOn"].replace("Z", "+00:00")) if category_data.get("modifiedOn") else None
# Check if category already exists
existing_category = db.query(TCGPlayerCategory).filter(TCGPlayerCategory.category_id == category_data["categoryId"]).first()
if existing_category:
# Update existing category
for key, value in {
"name": category_data["name"],
"display_name": category_data.get("displayName"),
"seo_category_name": category_data.get("seoCategoryName"),
"category_description": category_data.get("categoryDescription"),
"category_page_title": category_data.get("categoryPageTitle"),
"sealed_label": category_data.get("sealedLabel"),
"non_sealed_label": category_data.get("nonSealedLabel"),
"condition_guide_url": category_data.get("conditionGuideUrl"),
"is_scannable": category_data.get("isScannable", False),
"popularity": category_data.get("popularity", 0),
"is_direct": category_data.get("isDirect", False),
"modified_on": modified_on
}.items():
setattr(existing_category, key, value)
synced_categories.append(existing_category)
else:
# Create new category
new_category = TCGPlayerCategory(
category_id=category_data["categoryId"],
name=category_data["name"],
display_name=category_data.get("displayName"),
seo_category_name=category_data.get("seoCategoryName"),
category_description=category_data.get("categoryDescription"),
category_page_title=category_data.get("categoryPageTitle"),
sealed_label=category_data.get("sealedLabel"),
non_sealed_label=category_data.get("nonSealedLabel"),
condition_guide_url=category_data.get("conditionGuideUrl"),
is_scannable=category_data.get("isScannable", False),
popularity=category_data.get("popularity", 0),
is_direct=category_data.get("isDirect", False),
modified_on=modified_on
)
db.add(new_category)
synced_categories.append(new_category)
# Check if category already exists
existing_category = db.query(TCGPlayerCategory).filter(TCGPlayerCategory.category_id == category_data["categoryId"]).first()
if existing_category:
# Update existing category
for key, value in {
"name": category_data["name"],
"display_name": category_data.get("displayName"),
"seo_category_name": category_data.get("seoCategoryName"),
"category_description": category_data.get("categoryDescription"),
"category_page_title": category_data.get("categoryPageTitle"),
"sealed_label": category_data.get("sealedLabel"),
"non_sealed_label": category_data.get("nonSealedLabel"),
"condition_guide_url": category_data.get("conditionGuideUrl"),
"is_scannable": category_data.get("isScannable", False),
"popularity": category_data.get("popularity", 0),
"is_direct": category_data.get("isDirect", False),
"modified_on": modified_on
}.items():
setattr(existing_category, key, value)
synced_categories.append(existing_category)
else:
# Create new category
new_category = TCGPlayerCategory(
category_id=category_data["categoryId"],
name=category_data["name"],
display_name=category_data.get("displayName"),
seo_category_name=category_data.get("seoCategoryName"),
category_description=category_data.get("categoryDescription"),
category_page_title=category_data.get("categoryPageTitle"),
sealed_label=category_data.get("sealedLabel"),
non_sealed_label=category_data.get("nonSealedLabel"),
condition_guide_url=category_data.get("conditionGuideUrl"),
is_scannable=category_data.get("isScannable", False),
popularity=category_data.get("popularity", 0),
is_direct=category_data.get("isDirect", False),
modified_on=modified_on
)
db.add(new_category)
synced_categories.append(new_category)
db.commit()
return synced_categories

View File

@@ -67,9 +67,6 @@ class BaseTCGPlayerService(BaseExternalService):
auth_required: bool = True,
download_file: bool = False
) -> Union[Dict[str, Any], bytes]:
session = await self._get_session()
url = f"{self.base_url}{endpoint}"
# Get the authentication cookie if required
if auth_required:
cookie = self.credentials.get_cookie()
@@ -83,28 +80,26 @@ class BaseTCGPlayerService(BaseExternalService):
request_headers["Cookie"] = cookie
else:
request_headers = headers or {}
try:
async with session.request(method, url, params=params, headers=request_headers, json=data) as response:
if response.status == 401:
raise RuntimeError("TCGPlayer authentication failed. Cookie may be invalid or expired.")
response.raise_for_status()
if download_file:
return await response.read()
return await response.json()
# Use the parent class's _make_request with our custom headers and binary flag
response = await super()._make_request(
method=method,
endpoint=endpoint,
params=params,
headers=request_headers,
data=data,
binary=download_file
)
if isinstance(response, dict) and response.get('status') == 401:
raise RuntimeError("TCGPlayer authentication failed. Cookie may be invalid or expired.")
return response
except aiohttp.ClientError as e:
logger.error(f"TCGPlayer API request failed: {str(e)}")
raise
except Exception as e:
logger.error(f"Unexpected error during TCGPlayer API request: {str(e)}")
raise
async def _get_session(self) -> aiohttp.ClientSession:
if self.session is None or self.session.closed:
self.session = aiohttp.ClientSession()
return self.session
async def close(self):
if self.session and not self.session.closed:
await self.session.close()
raise

View File

@@ -1,7 +1,26 @@
from typing import Any, Dict, Optional, Union
import logging
from app.services.external_api.tcgplayer.base_tcgplayer_service import BaseTCGPlayerService
from app.schemas.tcgplayer import (
TCGPlayerAPIOrder,
TCGPlayerOrderCreate,
TCGPlayerOrderTransactionCreate,
TCGPlayerOrderProductCreate,
TCGPlayerOrderRefundCreate,
TCGPlayerAPIOrderSummary,
TCGPlayerAPIOrderSearchResponse
)
from app.models.tcgplayer_order import (
TCGPlayerOrder,
TCGPlayerOrderTransaction,
TCGPlayerOrderProduct,
TCGPlayerOrderRefund
)
from sqlalchemy.orm import Session
from app.db.database import transaction
import os
import csv
import io
logger = logging.getLogger(__name__)
@@ -20,12 +39,20 @@ class OrderManagementService(BaseTCGPlayerService):
self.shipping_endpoint = f"/shipping/export{self.API_VERSION}"
async def get_orders(self, open_only: bool = False):
async def get_orders(self, search_range: str = "LastThreeMonths", open_only: bool = False, filter_out: list[str] = [], filter_in: list[str] = []) -> list[TCGPlayerAPIOrderSummary]:
"""
search range options:
LastWeek
LastMonth
LastThreeMonths
LastFourMonths
LastTwoYears
"""
search_from = 0
orders = []
while True:
payload = {
"searchRange": "LastThreeMonths",
"searchRange": search_range,
"filters": {
"sellerKey": self.SELLER_KEY
},
@@ -37,17 +64,27 @@ class OrderManagementService(BaseTCGPlayerService):
"size": 25
}
if open_only:
payload["filters"]["orderStatus"] = ["Processing","ReadyToShip","Received","Pulling","ReadyForPickup"]
payload["filters"]["orderStatuses"] = ["Processing","ReadyToShip","Received","Pulling","ReadyForPickup"]
payload["filters"]["fulfillmentTypes"] = ["Normal"]
logger.info(f"Getting orders from {search_from} to {search_from + 25}")
response = await self._make_request("POST", self.order_search_endpoint, data=payload, headers=self._get_headers("POST", "application/json"), auth_required=True)
logger.info(f"Got {len(response.get('orders'))} orders")
if len(response.get("orders")) == 0:
break
search_from += 25
orders.extend(response.get("orders"))
if filter_out:
orders = [order for order in orders if order.get("orderNumber") not in filter_out]
if filter_in:
orders = [order for order in orders if order.get("orderNumber") in filter_in]
return orders
async def get_order_ids(self, search_range: str = "LastThreeMonths", open_only: bool = False, filter_out: list[str] = [], filter_in: list[str] = []):
orders = await self.get_orders(search_range, open_only, filter_out, filter_in)
return [order.get("orderNumber") for order in orders]
async def get_order(self, order_id: str):
response = await self._make_request("GET", f"{self.ORDER_MANAGEMENT_BASE_URL}/{order_id}{self.API_VERSION}")
response = await self._make_request("GET", f"/{order_id}{self.API_VERSION}")
return response
async def get_packing_slip(self, order_ids: list[str]):
@@ -59,6 +96,7 @@ class OrderManagementService(BaseTCGPlayerService):
}
response = await self._make_request("POST", self.packing_slip_endpoint, data=payload, headers=self._get_headers("POST", "application/json"), auth_required=True, download_file=True)
return response
async def get_pull_sheet(self, order_ids: list[str]):
payload = {
"orderNumbers": order_ids,
@@ -75,10 +113,283 @@ class OrderManagementService(BaseTCGPlayerService):
response = await self._make_request("POST", self.shipping_endpoint, data=payload, headers=self._get_headers("POST", "application/json"), auth_required=True, download_file=True)
return response
async def save_file(self, file_data: bytes, file_name: str) -> str:
async def save_file(self, file_data: Union[bytes, list[dict]], file_name: str) -> str:
if not os.path.exists("app/data/cache/tcgplayer/orders"):
os.makedirs("app/data/cache/tcgplayer/orders")
file_path = f"app/data/cache/tcgplayer/orders/{file_name}"
if isinstance(file_data, list):
# Convert list of dictionaries to CSV bytes
output = io.StringIO()
writer = csv.DictWriter(output, fieldnames=file_data[0].keys())
writer.writeheader()
writer.writerows(file_data)
file_data = output.getvalue().encode('utf-8')
with open(file_path, "wb") as f:
f.write(file_data)
return file_path
return file_path
async def save_order_to_db(self, order: dict, db: Session):
# Parse API response using our API schema
api_order = TCGPlayerAPIOrder.model_validate(order)
# Check if order exists
existing_order = db.query(TCGPlayerOrder).filter(TCGPlayerOrder.order_number == api_order.orderNumber).first()
# Create database models
db_order = TCGPlayerOrderCreate(
order_number=api_order.orderNumber,
order_created_at=api_order.createdAt,
status=api_order.status,
channel=api_order.orderChannel,
fulfillment=api_order.orderFulfillment,
seller_name=api_order.sellerName,
buyer_name=api_order.buyerName,
payment_type=api_order.paymentType,
pickup_status=api_order.pickupStatus,
shipping_type=api_order.shippingType,
estimated_delivery_date=api_order.estimatedDeliveryDate,
recipient_name=api_order.shippingAddress.recipientName,
address_line_1=api_order.shippingAddress.addressOne,
address_line_2=api_order.shippingAddress.addressTwo,
city=api_order.shippingAddress.city,
state=api_order.shippingAddress.territory,
zip_code=api_order.shippingAddress.postalCode,
country=api_order.shippingAddress.country,
tracking_numbers=[t.trackingNumber for t in api_order.trackingNumbers]
)
# Create transaction
db_transaction = TCGPlayerOrderTransactionCreate(
order_number=api_order.orderNumber,
product_amount=api_order.transaction.productAmount,
shipping_amount=api_order.transaction.shippingAmount,
gross_amount=api_order.transaction.grossAmount,
fee_amount=api_order.transaction.feeAmount,
net_amount=api_order.transaction.netAmount,
direct_fee_amount=api_order.transaction.directFeeAmount,
taxes=[{"code": t.code, "amount": t.amount} for t in api_order.transaction.taxes]
)
# Create products
db_products = [
TCGPlayerOrderProductCreate(
order_number=api_order.orderNumber,
product_name=p.name,
unit_price=p.unitPrice,
extended_price=p.extendedPrice,
quantity=p.quantity,
url=p.url,
product_id=p.productId,
sku_id=p.skuId
)
for p in api_order.products
]
# Create refunds if they exist
db_refunds = []
if api_order.refunds:
db_refunds = [
TCGPlayerOrderRefundCreate(
order_number=api_order.orderNumber,
refund_created_at=r.createdAt,
type=r.type,
amount=r.amount,
description=r.description,
origin=r.origin,
shipping_amount=r.shippingAmount,
products=r.products
)
for r in api_order.refunds
]
with transaction(db):
try:
if existing_order:
# Check if order needs to be updated
if not self._order_needs_update(existing_order, api_order, db):
logger.info(f"Order {api_order.orderNumber} is up to date, skipping update")
return existing_order
logger.info(f"Updating existing order {api_order.orderNumber}")
# Update existing order
for key, value in db_order.model_dump().items():
setattr(existing_order, key, value)
db_order_model = existing_order
# Update transaction
existing_transaction = db.query(TCGPlayerOrderTransaction).filter(
TCGPlayerOrderTransaction.order_number == api_order.orderNumber
).first()
if existing_transaction:
for key, value in db_transaction.model_dump().items():
setattr(existing_transaction, key, value)
else:
db_transaction_model = TCGPlayerOrderTransaction(**db_transaction.model_dump())
db.add(db_transaction_model)
# Delete existing products and refunds
db.query(TCGPlayerOrderProduct).filter(
TCGPlayerOrderProduct.order_number == api_order.orderNumber
).delete()
db.query(TCGPlayerOrderRefund).filter(
TCGPlayerOrderRefund.order_number == api_order.orderNumber
).delete()
else:
logger.info(f"Creating new order {api_order.orderNumber}")
# Create new order
db_order_model = TCGPlayerOrder(**db_order.model_dump())
db.add(db_order_model)
db.flush() # Get the order ID
# Create transaction
db_transaction_model = TCGPlayerOrderTransaction(**db_transaction.model_dump())
db.add(db_transaction_model)
# Create products
for product in db_products:
db_product_model = TCGPlayerOrderProduct(**product.model_dump())
db.add(db_product_model)
# Create refunds
for refund in db_refunds:
db_refund_model = TCGPlayerOrderRefund(**refund.model_dump())
db.add(db_refund_model)
db.commit()
return db_order_model
except Exception as e:
logger.error(f"Error saving/updating order {api_order.orderNumber} to database: {str(e)}")
raise
def _order_needs_update(self, existing_order: TCGPlayerOrder, new_order: TCGPlayerAPIOrder, db: Session) -> bool:
"""
Compare existing order data with new order data to determine if an update is needed.
Returns True if the order needs to be updated, False otherwise.
"""
# Compare basic order fields
order_fields_to_compare = [
('status', 'status'),
('channel', 'orderChannel'),
('fulfillment', 'orderFulfillment'),
('seller_name', 'sellerName'),
('buyer_name', 'buyerName'),
('payment_type', 'paymentType'),
('pickup_status', 'pickupStatus'),
('shipping_type', 'shippingType'),
('recipient_name', 'shippingAddress.recipientName'),
('address_line_1', 'shippingAddress.addressOne'),
('address_line_2', 'shippingAddress.addressTwo'),
('city', 'shippingAddress.city'),
('state', 'shippingAddress.territory'),
('zip_code', 'shippingAddress.postalCode'),
('country', 'shippingAddress.country'),
('tracking_numbers', 'trackingNumbers')
]
for db_field, api_field in order_fields_to_compare:
existing_value = getattr(existing_order, db_field)
# Handle nested fields
if '.' in api_field:
parts = api_field.split('.')
new_value = new_order
for part in parts:
new_value = getattr(new_value, part)
else:
new_value = getattr(new_order, api_field)
# Handle special cases for tracking numbers
if db_field == 'tracking_numbers':
if set(existing_value or []) != set([t.trackingNumber for t in new_value or []]):
return True
continue
if existing_value != new_value:
return True
# Compare transaction data
existing_transaction = db.query(TCGPlayerOrderTransaction).filter(
TCGPlayerOrderTransaction.order_number == existing_order.order_number
).first()
if existing_transaction:
transaction_fields_to_compare = [
('product_amount', 'productAmount'),
('shipping_amount', 'shippingAmount'),
('gross_amount', 'grossAmount'),
('fee_amount', 'feeAmount'),
('net_amount', 'netAmount'),
('direct_fee_amount', 'directFeeAmount')
]
for db_field, api_field in transaction_fields_to_compare:
if getattr(existing_transaction, db_field) != getattr(new_order.transaction, api_field):
return True
# Compare taxes
existing_taxes = sorted(existing_transaction.taxes, key=lambda x: x['code'])
new_taxes = sorted(new_order.transaction.taxes, key=lambda x: x.code)
if len(existing_taxes) != len(new_taxes):
return True
for existing_tax, new_tax in zip(existing_taxes, new_taxes):
if existing_tax['code'] != new_tax.code or existing_tax['amount'] != new_tax.amount:
return True
# Compare products
existing_products = db.query(TCGPlayerOrderProduct).filter(
TCGPlayerOrderProduct.order_number == existing_order.order_number
).all()
if len(existing_products) != len(new_order.products):
return True
# Sort products by product_id for comparison
existing_products_sorted = sorted(existing_products, key=lambda x: x.product_id)
new_products_sorted = sorted(new_order.products, key=lambda x: x.productId)
for existing_product, new_product in zip(existing_products_sorted, new_products_sorted):
product_fields_to_compare = [
('product_name', 'name'),
('unit_price', 'unitPrice'),
('extended_price', 'extendedPrice'),
('quantity', 'quantity'),
('url', 'url'),
('product_id', 'productId'),
('sku_id', 'skuId')
]
for db_field, api_field in product_fields_to_compare:
if getattr(existing_product, db_field) != getattr(new_product, api_field):
return True
# Compare refunds
existing_refunds = db.query(TCGPlayerOrderRefund).filter(
TCGPlayerOrderRefund.order_number == existing_order.order_number
).all()
if len(existing_refunds) != len(new_order.refunds or []):
return True
# Sort refunds by created_at for comparison
existing_refunds_sorted = sorted(existing_refunds, key=lambda x: x.refund_created_at)
new_refunds_sorted = sorted(new_order.refunds or [], key=lambda x: x.createdAt)
for existing_refund, new_refund in zip(existing_refunds_sorted, new_refunds_sorted):
refund_fields_to_compare = [
('type', 'type'),
('amount', 'amount'),
('description', 'description'),
('origin', 'origin'),
('shipping_amount', 'shippingAmount'),
('products', 'products')
]
for db_field, api_field in refund_fields_to_compare:
if getattr(existing_refund, db_field) != getattr(new_refund, api_field):
return True
return False

View File

@@ -27,6 +27,13 @@ class LabelPrinterService:
self._session = None
self._lock = asyncio.Lock()
async def cleanup(self):
"""Clean up resources, particularly the aiohttp session."""
if self._session:
await self._session.close()
self._session = None
logger.info("Label printer service session closed")
@asynccontextmanager
async def _get_session(self):
"""Context manager for aiohttp session."""
@@ -64,8 +71,16 @@ class LabelPrinterService:
elif response.status == 404:
logger.error(f"Printer status endpoint not found at {self.status_url}")
return False
elif response.status == 500:
data = await response.json()
error_msg = data.get('message', 'Unknown printer error')
logger.error(f"Printer error: {error_msg}")
raise Exception(f"Printer error: {error_msg}")
except aiohttp.ClientError as e:
logger.warning(f"Error checking printer status: {e}")
if "Cannot connect to host" in str(e):
logger.error("Printer reciever is not available")
raise Exception("Printer reciever is not available")
except Exception as e:
logger.error(f"Unexpected error in _wait_for_printer_ready: {e}")
return False
@@ -109,6 +124,11 @@ class LabelPrinterService:
elif response.status == 429:
logger.error("Printer is busy")
return False
elif response.status == 500:
data = await response.json()
error_msg = data.get('message', 'Unknown printer error')
logger.error(f"Printer error: {error_msg}")
raise Exception(f"Printer error: {error_msg}")
else:
data = await response.json()
logger.error(f"Print request failed with status {response.status}: {data.get('message')}")
@@ -121,13 +141,13 @@ class LabelPrinterService:
logger.error(f"Unexpected error in _send_print_request: {e}")
return False
async def print_file(self, file_path: Union[str, Path], label_size: Literal["dk1201", "dk1241"], label_type: Optional[Literal["address_label", "packing_slip"]] = None) -> bool:
async def print_file(self, file_path: Union[str, Path], label_size: Literal["dk1201", "dk1241"], label_type: Optional[Literal["address_label", "packing_slip", "set_label"]] = None) -> bool:
"""Print a PDF or PNG file to the label printer.
Args:
file_path: Path to the PDF or PNG file
label_size: Size of label to use ("dk1201" or "dk1241")
label_type: Type of label to use ("address_label" or "packing_slip")
label_type: Type of label to use ("address_label" or "packing_slip" or "set_label")
Returns:
bool: True if print was successful, False otherwise
@@ -177,14 +197,16 @@ class LabelPrinterService:
resized_image = resized_image.resize((1660, 1164), Image.Resampling.LANCZOS)
else:
resized_image = resized_image.resize((1164, 1660), Image.Resampling.LANCZOS)
elif original_label_size == "dk1201" and label_type == "address_label":
elif original_label_size == "dk1201":
resized_image = resized_image.resize((991, 306), Image.Resampling.LANCZOS)
# if file path contains address_label, rotate image 90 degrees
if label_type == "address_label":
if label_type == "address_label" or label_type == "set_label":
rotate = "90"
cut = False
else:
rotate = "0"
cut = True
# Convert to label format
qlr = BrotherQLRaster("QL-1100")
@@ -205,7 +227,7 @@ class LabelPrinterService:
dpi_600=False,
#hq=True,
hq=False,
cut=True
cut=cut
)
# Cache the converted binary data

View File

@@ -1,58 +0,0 @@
from sqlalchemy.orm import Session
from app.services.base_service import BaseService
from app.models.order import Order, OrderCard
from app.models.card import Card
class OrderService(BaseService):
def __init__(self):
super().__init__(Order)
def create_order_with_cards(self, db: Session, order_data: dict, card_ids: list[int]) -> Order:
"""
Create a new order with associated cards.
Args:
db: Database session
order_data: Dictionary containing order details
card_ids: List of card IDs to associate with the order
Returns:
The created Order object
"""
# Create the order
order = Order(**order_data)
db.add(order)
db.flush() # Get the order ID
# Associate cards with the order
for card_id in card_ids:
card = db.query(Card).filter(Card.id == card_id).first()
if not card:
raise ValueError(f"Card with ID {card_id} not found")
order_card = OrderCard(order_id=order.id, card_id=card_id)
db.add(order_card)
db.commit()
db.refresh(order)
return order
def get_orders_with_cards(self, db: Session, skip: int = 0, limit: int = 10) -> list[Order]:
"""
Get orders with their associated cards.
Args:
db: Database session
skip: Number of records to skip
limit: Maximum number of records to return
Returns:
List of Order objects with their associated cards
"""
orders = db.query(Order).offset(skip).limit(limit).all()
# Eager load the cards for each order
for order in orders:
order.cards = db.query(Card).join(OrderCard).filter(OrderCard.order_id == order.id).all()
return orders

View File

@@ -5,6 +5,7 @@ from pathlib import Path
from jinja2 import Environment, FileSystemLoader
from weasyprint import HTML
import logging
import asyncio
logger = logging.getLogger(__name__)
@@ -16,7 +17,7 @@ class PullSheetService:
self.output_dir = Path("app/data/cache/tcgplayer/pull_sheets")
self.output_dir.mkdir(parents=True, exist_ok=True)
def generate_pull_sheet_pdf(self, csv_path: str) -> str:
async def generate_pull_sheet_pdf(self, csv_path: str) -> str:
"""Generate a PDF pull sheet from a CSV file.
Args:
@@ -27,7 +28,7 @@ class PullSheetService:
"""
try:
# Read and process CSV data
items = self._read_and_process_csv(csv_path)
items = await self._read_and_process_csv(csv_path)
# Prepare template data
template_data = {
@@ -38,9 +39,12 @@ class PullSheetService:
# Render HTML
html_content = self.template.render(**template_data)
# Generate PDF
# Generate PDF in a separate thread to avoid blocking
pdf_path = self.output_dir / f"pull_sheet_{datetime.now().strftime('%Y%m%d_%H%M%S')}.pdf"
HTML(string=html_content).write_pdf(str(pdf_path))
await asyncio.get_event_loop().run_in_executor(
None,
lambda: HTML(string=html_content).write_pdf(str(pdf_path))
)
return str(pdf_path)
@@ -48,7 +52,7 @@ class PullSheetService:
logger.error(f"Error generating pull sheet PDF: {str(e)}")
raise
def _read_and_process_csv(self, csv_path: str) -> List[Dict]:
async def _read_and_process_csv(self, csv_path: str) -> List[Dict]:
"""Read and process CSV data using pandas.
Args:
@@ -57,8 +61,11 @@ class PullSheetService:
Returns:
List of processed items
"""
# Read CSV into pandas DataFrame
df = pd.read_csv(csv_path)
# Read CSV into pandas DataFrame in a separate thread to avoid blocking
df = await asyncio.get_event_loop().run_in_executor(
None,
lambda: pd.read_csv(csv_path)
)
# Filter out the "Orders Contained in Pull Sheet" row
df = df[df['Product Line'] != 'Orders Contained in Pull Sheet:']
@@ -67,7 +74,7 @@ class PullSheetService:
df['Set Release Date'] = pd.to_datetime(df['Set Release Date'], format='%m/%d/%Y %H:%M:%S')
# Sort by Set Release Date (descending) and then Product Name (ascending)
df = df.sort_values(['Set Release Date', 'Product Name'], ascending=[False, True])
df = df.sort_values(['Set Release Date', 'Set', 'Product Name'], ascending=[False, True, True])
# Convert to list of dictionaries
items = []
@@ -77,7 +84,8 @@ class PullSheetService:
'condition': row['Condition'],
'quantity': str(int(row['Quantity'])), # Convert to string for template
'set': row['Set'],
'rarity': row['Rarity']
'rarity': row['Rarity'],
'card_number': str(int(row['Number'])) if 'Number' in row else ''
})
return items

View File

@@ -2,7 +2,7 @@ from typing import Callable, Dict, Any
from apscheduler.schedulers.asyncio import AsyncIOScheduler
from apscheduler.triggers.interval import IntervalTrigger
import logging
from app.services.service_registry import ServiceRegistry
from apscheduler.schedulers.base import SchedulerNotRunningError
logger = logging.getLogger(__name__)
@@ -10,7 +10,7 @@ class BaseScheduler:
def __init__(self):
self.scheduler = AsyncIOScheduler()
self.jobs: Dict[str, Any] = {}
ServiceRegistry.register(self.__class__.__name__, self)
self._is_running = False
async def schedule_task(
self,
@@ -46,16 +46,20 @@ class BaseScheduler:
def start(self) -> None:
"""Start the scheduler"""
self.scheduler.start()
logger.info("Scheduler started")
if not self._is_running:
self.scheduler.start()
self._is_running = True
logger.info("Scheduler started")
async def shutdown(self) -> None:
"""Shutdown the scheduler"""
try:
self.scheduler.shutdown()
logger.info("Scheduler stopped")
except AttributeError as e:
if "'NoneType' object has no attribute 'call_soon_threadsafe'" in str(e):
logger.warning("Event loop already closed, skipping scheduler shutdown")
else:
if self._is_running:
try:
self.scheduler.shutdown()
self._is_running = False
logger.info("Scheduler stopped")
except SchedulerNotRunningError:
logger.warning("Scheduler was already stopped")
except Exception as e:
logger.error(f"Error shutting down scheduler: {str(e)}")
raise

View File

@@ -1,7 +1,4 @@
from sqlalchemy.orm import Session
from app.db.database import SessionLocal, transaction
from app.services.external_api.tcgplayer.tcgplayer_inventory_service import TCGPlayerInventoryService
from app.services.file_processing_service import FileProcessingService
from app.db.database import get_db, transaction
from app.services.scheduler.base_scheduler import BaseScheduler
import logging
@@ -9,9 +6,16 @@ logger = logging.getLogger(__name__)
class SchedulerService:
def __init__(self):
self.tcgplayer_service = TCGPlayerInventoryService()
self.file_processor = FileProcessingService()
self.scheduler = BaseScheduler()
# Service manager will be set during initialization
self._service_manager = None
@property
def service_manager(self):
if self._service_manager is None:
from app.services.service_manager import ServiceManager
self._service_manager = ServiceManager()
return self._service_manager
async def process_tcgplayer_export(self, export_type: str = "live", use_cache: bool = False):
"""
@@ -20,16 +24,20 @@ class SchedulerService:
Args:
export_type: Type of export to process (staged, live, or pricing)
"""
db = SessionLocal()
db = get_db()
try:
logger.info(f"Starting scheduled TCGPlayer export processing for {export_type}")
# Get services
tcgplayer_service = self.service_manager.get_service('tcgplayer_inventory')
file_processor = self.service_manager.get_service('file_processing')
# Download the file
file_bytes = await self.tcgplayer_service.get_tcgplayer_export(export_type)
file_bytes = await tcgplayer_service.get_tcgplayer_export(export_type)
# Process the file and load into database
with transaction(db):
stats = await self.file_processor.process_tcgplayer_export(db, export_type=export_type, file_bytes=file_bytes, use_cache=use_cache)
stats = await file_processor.process_tcgplayer_export(db, export_type=export_type, file_bytes=file_bytes, use_cache=use_cache)
logger.info(f"Completed TCGPlayer export processing: {stats}")
return stats
@@ -37,9 +45,53 @@ class SchedulerService:
except Exception as e:
logger.error(f"Error processing TCGPlayer export: {str(e)}")
raise
finally:
db.close()
async def update_open_orders_hourly(self):
"""
Hourly update of orders from TCGPlayer API to database
"""
db = get_db()
try:
logger.info("Starting hourly order update")
# Get order management service
order_management = self.service_manager.get_service('order_management')
# get all open orders from last 7 days
orders = await order_management.get_order_ids(open_only=True, search_range="LastWeek")
for order_id in orders:
order = await order_management.get_order(order_id)
with transaction(db):
await order_management.save_order_to_db(order, db)
logger.info("Completed hourly order update")
except Exception as e:
logger.error(f"Error updating open orders: {str(e)}")
raise
async def update_all_orders_daily(self):
"""
Daily update of all orders from TCGPlayer API to database
"""
db = get_db()
try:
logger.info("Starting daily order update")
# Get order management service
order_management = self.service_manager.get_service('order_management')
# get all order ids from last 3 months
orders = await order_management.get_order_ids(open_only=False, search_range="LastThreeMonths")
for order_id in orders:
order = await order_management.get_order(order_id)
with transaction(db):
await order_management.save_order_to_db(order, db)
logger.info("Completed daily order update")
except Exception as e:
logger.error(f"Error updating all orders: {str(e)}")
raise
async def start_scheduled_tasks(self):
"""Start all scheduled tasks"""
# Schedule TCGPlayer export processing to run daily at 2 AM
@@ -49,6 +101,27 @@ class SchedulerService:
interval_seconds=24 * 60 * 60, # 24 hours
export_type="live"
)
# Schedule open orders update to run hourly at 00 minutes
await self.scheduler.schedule_task(
task_name="update_open_orders_hourly",
func=self.update_open_orders_hourly,
interval_seconds=60 * 60, # 1 hour
)
# Schedule all orders update to run daily at 1 AM
await self.scheduler.schedule_task(
task_name="update_all_orders_daily",
func=self.update_all_orders_daily,
interval_seconds=24 * 60 * 60, # 24 hours
)
self.scheduler.start()
logger.info("All scheduled tasks started")
logger.info("All scheduled tasks started")
async def close(self):
"""Close all services used by the scheduler"""
try:
await self.scheduler.shutdown()
logger.info("Scheduler services closed")
except Exception as e:
logger.error(f"Error closing scheduler services: {str(e)}")
raise

View File

@@ -0,0 +1,84 @@
from typing import Dict, Any, Type
import logging
import importlib
logger = logging.getLogger(__name__)
class ServiceManager:
_instance = None
_initialized = False
def __new__(cls):
if cls._instance is None:
cls._instance = super(ServiceManager, cls).__new__(cls)
return cls._instance
def __init__(self):
if not self._initialized:
self.services: Dict[str, Any] = {}
self._service_classes = {
'order_management': 'app.services.external_api.tcgplayer.order_management_service.OrderManagementService',
'tcgplayer_inventory': 'app.services.external_api.tcgplayer.tcgplayer_inventory_service.TCGPlayerInventoryService',
'label_printer': 'app.services.label_printer_service.LabelPrinterService',
'regular_printer': 'app.services.regular_printer_service.RegularPrinterService',
'address_label': 'app.services.address_label_service.AddressLabelService',
'pull_sheet': 'app.services.pull_sheet_service.PullSheetService',
'set_label': 'app.services.set_label_service.SetLabelService',
'data_initialization': 'app.services.data_initialization.DataInitializationService',
'scheduler': 'app.services.scheduler.scheduler_service.SchedulerService'
}
self._service_configs = {
'label_printer': {'printer_api_url': "http://192.168.1.110:8000"},
'regular_printer': {'printer_name': "MFCL2750DW-3"}
}
self._initialized = True
def _import_service(self, module_path: str) -> Type:
"""Dynamically import a service class"""
module_name, class_name = module_path.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, class_name)
def register_service(self, service: Any) -> None:
"""Register a service with the service manager"""
service_name = service.__class__.__name__
self.services[service_name] = service
logger.info(f"Service {service_name} registered")
async def initialize_services(self):
"""Initialize all services"""
try:
# Initialize services
for name, class_path in self._service_classes.items():
service_class = self._import_service(class_path)
config = self._service_configs.get(name, {})
self.services[name] = service_class(**config)
self.register_service(self.services[name])
logger.info("All services initialized successfully")
except Exception as e:
logger.error(f"Failed to initialize services: {str(e)}")
raise
async def cleanup_services(self):
"""Clean up all services"""
try:
# Clean up services in reverse order of initialization
for name, service in reversed(self.services.items()):
if hasattr(service, 'close'):
await service.close()
elif hasattr(service, 'cleanup'):
await service.cleanup()
logger.info(f"Service {name} cleaned up")
self.services.clear()
logger.info("All services cleaned up successfully")
except Exception as e:
logger.error(f"Failed to clean up services: {str(e)}")
raise
def get_service(self, name: str) -> Any:
"""Get a service by name"""
if name not in self.services:
raise ValueError(f"Service {name} not found")
return self.services[name]

View File

@@ -1,18 +0,0 @@
from typing import Dict, Any
class ServiceRegistry:
_services: Dict[str, Any] = {}
@classmethod
def register(cls, name: str, service: Any) -> None:
cls._services[name] = service
@classmethod
def get(cls, name: str) -> Any:
if name not in cls._services:
raise ValueError(f"Service {name} not found in registry")
return cls._services[name]
@classmethod
def clear(cls) -> None:
cls._services.clear()

View File

@@ -0,0 +1,262 @@
import argparse
import asyncio
import base64
import logging
import os
from datetime import datetime
from pathlib import Path
from sqlalchemy.orm import Session
import aiohttp
import jinja2
from weasyprint import HTML
from app.services.base_service import BaseService
from app.models.tcgplayer_group import TCGPlayerGroup
log = logging.getLogger(__name__)
ENV = jinja2.Environment(
loader=jinja2.FileSystemLoader("app/data/assets/templates"))
# Set types we are interested in
SET_TYPES = (
"core",
"expansion",
"starter", # Portal, P3k, welcome decks
"masters",
"commander",
"planechase",
"draft_innovation", # Battlebond, Conspiracy
"duel_deck", # Duel Deck Elves,
"premium_deck", # Premium Deck Series: Slivers, Premium Deck Series: Graveborn
"from_the_vault", # Make sure to adjust the MINIMUM_SET_SIZE if you want these
"archenemy",
"box",
"funny", # Unglued, Unhinged, Ponies: TG, etc.
# "memorabilia", # Commander's Arsenal, Celebration Cards, World Champ Decks
# "spellbook",
# These are relatively large groups of sets
# You almost certainly don't want these
# "token",
# "promo",
)
# Only include sets at least this size
# For reference, the smallest proper expansion is Arabian Nights with 78 cards
MINIMUM_SET_SIZE = 50
# Set codes you might want to ignore
IGNORED_SETS = (
"cmb1", # Mystery Booster Playtest Cards
"amh1", # Modern Horizon Art Series
"cmb2", # Mystery Booster Playtest Cards Part Deux
)
# Used to rename very long set names
RENAME_SETS = {
"Adventures in the Forgotten Realms": "Forgotten Realms",
"Adventures in the Forgotten Realms Minigames": "Forgotten Realms Minigames",
"Angels: They're Just Like Us but Cooler and with Wings": "Angels: Just Like Us",
"Archenemy: Nicol Bolas Schemes": "Archenemy: Bolas Schemes",
"Chronicles Foreign Black Border": "Chronicles FBB",
"Commander Anthology Volume II": "Commander Anthology II",
"Commander Legends: Battle for Baldur's Gate": "CMDR Legends: Baldur's Gate",
"Dominaria United Commander": "Dominaria United [C]",
"Duel Decks: Elves vs. Goblins": "DD: Elves vs. Goblins",
"Duel Decks: Jace vs. Chandra": "DD: Jace vs. Chandra",
"Duel Decks: Divine vs. Demonic": "DD: Divine vs. Demonic",
"Duel Decks: Garruk vs. Liliana": "DD: Garruk vs. Liliana",
"Duel Decks: Phyrexia vs. the Coalition": "DD: Phyrexia vs. Coalition",
"Duel Decks: Elspeth vs. Tezzeret": "DD: Elspeth vs. Tezzeret",
"Duel Decks: Knights vs. Dragons": "DD: Knights vs. Dragons",
"Duel Decks: Ajani vs. Nicol Bolas": "DD: Ajani vs. Nicol Bolas",
"Duel Decks: Heroes vs. Monsters": "DD: Heroes vs. Monsters",
"Duel Decks: Speed vs. Cunning": "DD: Speed vs. Cunning",
"Duel Decks Anthology: Elves vs. Goblins": "DDA: Elves vs. Goblins",
"Duel Decks Anthology: Jace vs. Chandra": "DDA: Jace vs. Chandra",
"Duel Decks Anthology: Divine vs. Demonic": "DDA: Divine vs. Demonic",
"Duel Decks Anthology: Garruk vs. Liliana": "DDA: Garruk vs. Liliana",
"Duel Decks: Elspeth vs. Kiora": "DD: Elspeth vs. Kiora",
"Duel Decks: Zendikar vs. Eldrazi": "DD: Zendikar vs. Eldrazi",
"Duel Decks: Blessed vs. Cursed": "DD: Blessed vs. Cursed",
"Duel Decks: Nissa vs. Ob Nixilis": "DD: Nissa vs. Ob Nixilis",
"Duel Decks: Merfolk vs. Goblins": "DD: Merfolk vs. Goblins",
"Duel Decks: Elves vs. Inventors": "DD: Elves vs. Inventors",
"Duel Decks: Mirrodin Pure vs. New Phyrexia": "DD: Mirrodin vs.New Phyrexia",
"Duel Decks: Izzet vs. Golgari": "Duel Decks: Izzet vs. Golgari",
"Fourth Edition Foreign Black Border": "Fourth Edition FBB",
"Global Series Jiang Yanggu & Mu Yanling": "Jiang Yanggu & Mu Yanling",
"Innistrad: Crimson Vow Minigames": "Crimson Vow Minigames",
"Introductory Two-Player Set": "Intro Two-Player Set",
"March of the Machine: The Aftermath": "MotM: The Aftermath",
"March of the Machine Commander": "March of the Machine [C]",
"Murders at Karlov Manor Commander": "Murders at Karlov Manor [C]",
"Mystery Booster Playtest Cards": "Mystery Booster Playtest",
"Mystery Booster Playtest Cards 2019": "MB Playtest Cards 2019",
"Mystery Booster Playtest Cards 2021": "MB Playtest Cards 2021",
"Mystery Booster Retail Edition Foils": "Mystery Booster Retail Foils",
"Outlaws of Thunder Junction Commander": "Outlaws of Thunder Junction [C]",
"Phyrexia: All Will Be One Commander": "Phyrexia: All Will Be One [C]",
"Planechase Anthology Planes": "Planechase Anth. Planes",
"Premium Deck Series: Slivers": "Premium Deck Slivers",
"Premium Deck Series: Graveborn": "Premium Deck Graveborn",
"Premium Deck Series: Fire and Lightning": "PD: Fire & Lightning",
"Shadows over Innistrad Remastered": "SOI Remastered",
"Strixhaven: School of Mages Minigames": "Strixhaven Minigames",
"Tales of Middle-earth Commander": "Tales of Middle-earth [C]",
"The Brothers' War Retro Artifacts": "Brothers' War Retro",
"The Brothers' War Commander": "Brothers' War Commander",
"The Lord of the Rings: Tales of Middle-earth": "LOTR: Tales of Middle-earth",
"The Lost Caverns of Ixalan Commander": "The Lost Caverns of Ixalan [C]",
"Warhammer 40,000 Commander": "Warhammer 40K [C]",
"Wilds of Eldraine Commander": "Wilds of Eldraine [C]",
"World Championship Decks 1997": "World Championship 1997",
"World Championship Decks 1998": "World Championship 1998",
"World Championship Decks 1999": "World Championship 1999",
"World Championship Decks 2000": "World Championship 2000",
"World Championship Decks 2001": "World Championship 2001",
"World Championship Decks 2002": "World Championship 2002",
"World Championship Decks 2003": "World Championship 2003",
"World Championship Decks 2004": "World Championship 2004",
}
class SetLabelService(BaseService):
DEFAULT_OUTPUT_DIR = "app/data/cache/set_labels"
os.makedirs(DEFAULT_OUTPUT_DIR, exist_ok=True)
def __init__(self, output_dir=DEFAULT_OUTPUT_DIR):
super().__init__(None) # BaseService doesn't need a model for this service
self.set_codes = []
self.ignored_sets = IGNORED_SETS
self.set_types = SET_TYPES
self.minimum_set_size = MINIMUM_SET_SIZE
self.output_dir = Path(output_dir)
self.output_dir.mkdir(parents=True, exist_ok=True)
async def get_set_data(self, session):
log.info("Getting set data and icons from Scryfall")
async with session.get("https://api.scryfall.com/sets") as resp:
resp.raise_for_status()
data = (await resp.json())["data"]
set_data = []
for exp in data:
if exp["code"] in self.ignored_sets:
continue
elif exp["card_count"] < self.minimum_set_size:
continue
elif self.set_types and exp["set_type"] not in self.set_types:
continue
elif self.set_codes and exp["code"].lower() not in self.set_codes:
continue
else:
set_data.append(exp)
if self.set_codes:
known_sets = set([exp["code"] for exp in data])
specified_sets = set([code.lower() for code in self.set_codes])
unknown_sets = specified_sets.difference(known_sets)
for set_code in unknown_sets:
log.warning("Unknown set '%s'", set_code)
set_data.reverse()
return set_data
async def get_set_icon(self, session, icon_url):
try:
async with session.get(icon_url) as resp:
if resp.status == 200:
icon_data = await resp.read()
return base64.b64encode(icon_data).decode('utf-8')
except Exception as e:
log.warning(f"Failed to fetch icon from {icon_url}: {e}")
return None
async def generate_label(self, session, set_data):
output_file = self.output_dir / f"{set_data['code']}.pdf"
# Check if file already exists
if output_file.exists():
log.info(f"Label already exists for {set_data['name']} ({set_data['code']})")
return output_file
name = RENAME_SETS.get(set_data["name"], set_data["name"])
icon_b64 = await self.get_set_icon(session, set_data["icon_svg_uri"])
template = ENV.get_template("set_label.html")
html_content = template.render(
name=name,
code=set_data["code"],
date=datetime.strptime(set_data["released_at"], "%Y-%m-%d").date(),
icon_b64=icon_b64,
)
HTML(string=html_content).write_pdf(output_file)
log.info(f"Generated label for {name} ({set_data['code']})")
return output_file
async def generate_labels(self, sets=None):
if sets:
self.ignored_sets = ()
self.minimum_set_size = 0
self.set_types = ()
self.set_codes = [exp.lower() for exp in sets]
async with aiohttp.ClientSession() as session:
set_data = await self.get_set_data(session)
tasks = [self.generate_label(session, exp) for exp in set_data]
return await asyncio.gather(*tasks)
async def get_available_sets(self, db: Session):
"""
Get a list of available MTG sets that can be used for label generation.
Returns:
List of set codes and their names
"""
try:
# Get all sets from the database
sets = db.query(TCGPlayerGroup).filter(
TCGPlayerGroup.category_id == 1,
TCGPlayerGroup.abbreviation.isnot(None),
TCGPlayerGroup.abbreviation != ""
).all()
if not sets:
log.warning("No sets found in database")
return []
return [{"code": set.abbreviation, "name": set.name} for set in sets]
except Exception as e:
log.error(f"Error getting available sets: {str(e)}")
raise
def main():
log_format = '[%(levelname)s] %(message)s'
logging.basicConfig(format=log_format, level=logging.INFO)
parser = argparse.ArgumentParser(description="Generate MTG labels")
parser.add_argument(
"--output-dir",
default=SetLabelService.DEFAULT_OUTPUT_DIR,
help="Output labels to this directory",
)
parser.add_argument(
"sets",
nargs="*",
help=(
"Only output sets with the specified set code (eg. MH1, NEO). "
"This can be used multiple times."
),
metavar="SET",
)
args = parser.parse_args()
service = SetLabelService(args.output_dir)
asyncio.run(service.generate_labels(args.sets))
if __name__ == "__main__":
main()