kind of a mess lol but file caching and front end
This commit is contained in:
@@ -2,10 +2,12 @@ from app.services.base_service import BaseService
|
||||
from app.services.service_manager import ServiceManager
|
||||
from app.services.file_processing_service import FileProcessingService
|
||||
from app.services.inventory_service import InventoryService
|
||||
from app.services.file_service import FileService
|
||||
|
||||
__all__ = [
|
||||
'BaseService',
|
||||
'ServiceManager',
|
||||
'FileProcessingService',
|
||||
'InventoryService'
|
||||
'InventoryService',
|
||||
'FileService'
|
||||
]
|
@@ -7,11 +7,15 @@ from weasyprint import HTML
|
||||
import logging
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from sqlalchemy.orm import Session
|
||||
from app.schemas.file import FileInDB
|
||||
from app.services.base_service import BaseService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class AddressLabelService:
|
||||
class AddressLabelService(BaseService):
|
||||
def __init__(self):
|
||||
super().__init__(None) # BaseService doesn't need a model for this service
|
||||
self.template_dir = Path("app/data/assets/templates")
|
||||
self.env = Environment(loader=FileSystemLoader(str(self.template_dir)))
|
||||
self.templates = {
|
||||
@@ -19,19 +23,39 @@ class AddressLabelService:
|
||||
"dk1201": self.env.get_template("address_label_dk1201.html")
|
||||
}
|
||||
self.return_address_path = "file://" + os.path.abspath("app/data/assets/images/ccrcardsaddress.png")
|
||||
self.output_dir = "app/data/cache/tcgplayer/address_labels/"
|
||||
os.makedirs(self.output_dir, exist_ok=True)
|
||||
self.executor = ThreadPoolExecutor()
|
||||
|
||||
async def generate_labels_from_csv(self, csv_path: str, label_type: Literal["dk1201", "dk1241"]) -> List[str]:
|
||||
|
||||
async def get_or_create_address_labels(self, db: Session, order_ids: list[str], label_type: Literal["dk1201", "dk1241"]) -> List[FileInDB]:
|
||||
"""Get or create address labels for the specified orders.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
order_ids: List of TCGPlayer order numbers
|
||||
label_type: Type of label to generate ("dk1201" or "dk1241")
|
||||
Returns:
|
||||
List of FileInDB objects for generated PDF files
|
||||
"""
|
||||
# check if address labels exist for the order ids
|
||||
file_service = self.get_service('file')
|
||||
# honestly i just dont feel like caching the address labels bc its hard
|
||||
shipping_csv = await file_service.get_file_by_metadata(db, "order_ids", order_ids, "shipping_csv", "text/csv")
|
||||
if shipping_csv:
|
||||
return await self.generate_labels_from_csv(db, shipping_csv.path, label_type)
|
||||
else:
|
||||
order_management = self.get_service('order_management')
|
||||
shipping_csv = await order_management.get_shipping_csv(db, order_ids)
|
||||
return await self.generate_labels_from_csv(db, shipping_csv.path, label_type)
|
||||
|
||||
async def generate_labels_from_csv(self, db: Session, csv_path: str, label_type: Literal["dk1201", "dk1241"]) -> List[FileInDB]:
|
||||
"""Generate address labels from a CSV file and save them as PDFs.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
csv_path: Path to the CSV file containing address data
|
||||
label_type: Type of label to generate ("6x4" or "dk1201")
|
||||
|
||||
Returns:
|
||||
List of paths to generated PDF files
|
||||
List of FileInDB objects for generated PDF files
|
||||
"""
|
||||
generated_files = []
|
||||
|
||||
@@ -46,9 +70,9 @@ class AddressLabelService:
|
||||
continue
|
||||
|
||||
# Generate label for each row
|
||||
pdf_path = await self._generate_single_label(row, label_type)
|
||||
if pdf_path:
|
||||
generated_files.append(str(pdf_path))
|
||||
file_record = await self._generate_single_label(db, row, label_type)
|
||||
if file_record:
|
||||
generated_files.append(file_record)
|
||||
|
||||
return generated_files
|
||||
|
||||
@@ -58,15 +82,16 @@ class AddressLabelService:
|
||||
reader = csv.DictReader(csvfile)
|
||||
return list(reader)
|
||||
|
||||
async def _generate_single_label(self, row: Dict[str, str], label_type: Literal["dk1201", "dk1241"]) -> Optional[str]:
|
||||
async def _generate_single_label(self, db: Session, row: Dict[str, str], label_type: Literal["dk1201", "dk1241"]) -> Optional[FileInDB]:
|
||||
"""Generate a single address label PDF.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
row: Dictionary containing address data
|
||||
label_type: Type of label to generate ("6x4" or "dk1201")
|
||||
|
||||
Returns:
|
||||
Path to the generated PDF file or None if generation failed
|
||||
FileInDB object for the generated PDF file or None if generation failed
|
||||
"""
|
||||
try:
|
||||
# Prepare template data
|
||||
@@ -88,12 +113,30 @@ class AddressLabelService:
|
||||
|
||||
# Generate PDF in a thread pool
|
||||
loop = asyncio.get_event_loop()
|
||||
pdf_path = self.output_dir + f"{row['Order #']}_{label_type}.pdf"
|
||||
await loop.run_in_executor(
|
||||
pdf_content = await loop.run_in_executor(
|
||||
self.executor,
|
||||
lambda: HTML(string=html_content).write_pdf(str(pdf_path))
|
||||
lambda: HTML(string=html_content).write_pdf()
|
||||
)
|
||||
return pdf_path
|
||||
|
||||
# Prepare metadata
|
||||
metadata = {
|
||||
"order_number": row.get('Order #'),
|
||||
"label_type": label_type
|
||||
}
|
||||
|
||||
# Save using FileService
|
||||
filename = f"{row['Order #']}_{label_type}.pdf"
|
||||
file_record = await self.file_service.save_file(
|
||||
db=db,
|
||||
file_data=pdf_content,
|
||||
filename=filename,
|
||||
subdir="address_labels",
|
||||
file_type="address_label",
|
||||
content_type="application/pdf",
|
||||
metadata=metadata
|
||||
)
|
||||
|
||||
return file_record
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating label for order {row.get('Order #', 'unknown')}: {str(e)}")
|
||||
|
@@ -1,12 +1,15 @@
|
||||
from typing import Type, TypeVar, Generic, List, Optional, Any
|
||||
from sqlalchemy.orm import Session
|
||||
from app.db.database import Base
|
||||
from app.services.service_manager import ServiceManager
|
||||
|
||||
T = TypeVar('T')
|
||||
|
||||
class BaseService(Generic[T]):
|
||||
def __init__(self, model: Type[T]):
|
||||
self.model = model
|
||||
self.service_manager = ServiceManager()
|
||||
self._services = {}
|
||||
|
||||
def get(self, db: Session, id: int) -> Optional[T]:
|
||||
return db.query(self.model).filter(self.model.id == id).first()
|
||||
@@ -36,4 +39,15 @@ class BaseService(Generic[T]):
|
||||
db.delete(obj)
|
||||
db.commit()
|
||||
return True
|
||||
return False
|
||||
return False
|
||||
|
||||
def get_service(self, name: str) -> Any:
|
||||
"""Get a service by name with lazy loading"""
|
||||
if name not in self._services:
|
||||
self._services[name] = self.service_manager.get_service(name)
|
||||
return self._services[name]
|
||||
|
||||
@property
|
||||
def file_service(self):
|
||||
"""Convenience property for file service"""
|
||||
return self.get_service('file')
|
@@ -2,6 +2,11 @@ from typing import Any, Dict, Optional, Union
|
||||
import aiohttp
|
||||
import logging
|
||||
import json
|
||||
import csv
|
||||
import io
|
||||
from app.services.service_manager import ServiceManager
|
||||
from app.schemas.file import FileInDB
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -10,6 +15,8 @@ class BaseExternalService:
|
||||
self.base_url = base_url
|
||||
self.api_key = api_key
|
||||
self.session = None
|
||||
self.service_manager = ServiceManager()
|
||||
self._services = {}
|
||||
|
||||
async def _get_session(self) -> aiohttp.ClientSession:
|
||||
if self.session is None or self.session.closed:
|
||||
@@ -73,4 +80,36 @@ class BaseExternalService:
|
||||
if self.session and not self.session.closed:
|
||||
await self.session.close()
|
||||
self.session = None
|
||||
logger.info(f"Closed session for {self.__class__.__name__}")
|
||||
logger.info(f"Closed session for {self.__class__.__name__}")
|
||||
|
||||
def get_service(self, name: str) -> Any:
|
||||
"""Get a service by name with lazy loading"""
|
||||
if name not in self._services:
|
||||
self._services[name] = self.service_manager.get_service(name)
|
||||
return self._services[name]
|
||||
|
||||
@property
|
||||
def file_service(self):
|
||||
"""Convenience property for file service"""
|
||||
return self.get_service('file')
|
||||
|
||||
async def save_file(self, db: Session, file_data: Union[bytes, list[dict]], file_name: str, subdir: str, file_type: Optional[str] = None) -> FileInDB:
|
||||
"""Save a file using the FileService"""
|
||||
if isinstance(file_data, list):
|
||||
# Convert list of dictionaries to CSV bytes
|
||||
output = io.StringIO()
|
||||
writer = csv.DictWriter(output, fieldnames=file_data[0].keys())
|
||||
writer.writeheader()
|
||||
writer.writerows(file_data)
|
||||
file_data = output.getvalue().encode('utf-8')
|
||||
file_type = file_type or 'text/csv'
|
||||
|
||||
# Use FileService to save the file
|
||||
file_service = self.get_service('file')
|
||||
return await file_service.save_file(
|
||||
db=db,
|
||||
file_data=file_data,
|
||||
filename=file_name,
|
||||
subdir=subdir,
|
||||
file_type=file_type
|
||||
)
|
@@ -11,9 +11,12 @@ from datetime import datetime
|
||||
from app.models.mtgjson_card import MTGJSONCard
|
||||
from app.models.mtgjson_sku import MTGJSONSKU
|
||||
from app.db.database import get_db, transaction
|
||||
from app.services.external_api.base_external_service import BaseExternalService
|
||||
from app.schemas.file import FileInDB
|
||||
|
||||
class MTGJSONService:
|
||||
class MTGJSONService(BaseExternalService):
|
||||
def __init__(self, cache_dir: str = "app/data/cache/mtgjson", batch_size: int = 1000):
|
||||
super().__init__(base_url="https://mtgjson.com/api/v5/")
|
||||
self.cache_dir = cache_dir
|
||||
self.identifiers_dir = os.path.join(cache_dir, "identifiers")
|
||||
self.skus_dir = os.path.join(cache_dir, "skus")
|
||||
@@ -38,27 +41,22 @@ class MTGJSONService:
|
||||
"""Print progress message with flush"""
|
||||
print(message, end=end, flush=True)
|
||||
|
||||
async def _download_file(self, url: str, output_path: str) -> None:
|
||||
"""Download a file from the given URL to the specified path using streaming"""
|
||||
async def _download_file(self, db: Session, url: str, filename: str, subdir: str) -> FileInDB:
|
||||
"""Download a file from the given URL and save it using FileService"""
|
||||
print(f"Downloading {url}...")
|
||||
start_time = time.time()
|
||||
total_size = 0
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url) as response:
|
||||
if response.status == 200:
|
||||
total_size = int(response.headers.get('content-length', 0))
|
||||
with open(output_path, 'wb') as f:
|
||||
downloaded = 0
|
||||
async for chunk in response.content.iter_chunked(8192):
|
||||
f.write(chunk)
|
||||
downloaded += len(chunk)
|
||||
if total_size > 0:
|
||||
percent = (downloaded / total_size) * 100
|
||||
elapsed = time.time() - start_time
|
||||
speed = downloaded / elapsed / 1024 / 1024 # MB/s
|
||||
print(f"\rDownloading: {percent:.1f}% ({downloaded/1024/1024:.1f}MB/{total_size/1024/1024:.1f}MB) at {speed:.1f}MB/s", end="")
|
||||
print("\nDownload complete!")
|
||||
file_data = await response.read()
|
||||
return await self.save_file(
|
||||
db=db,
|
||||
file_data=file_data,
|
||||
file_name=filename,
|
||||
subdir=f"mtgjson/{subdir}",
|
||||
file_type=response.headers.get('content-type', 'application/octet-stream')
|
||||
)
|
||||
else:
|
||||
raise Exception(f"Failed to download file from {url}. Status: {response.status}")
|
||||
|
||||
@@ -153,14 +151,16 @@ class MTGJSONService:
|
||||
self._print_progress("Starting MTGJSON identifiers processing...")
|
||||
start_time = time.time()
|
||||
|
||||
zip_path = os.path.join(self.identifiers_dir, "AllIdentifiers.json.zip")
|
||||
await self._download_file(
|
||||
"https://mtgjson.com/api/v5/AllIdentifiers.json.zip",
|
||||
zip_path
|
||||
# Download the file using FileService
|
||||
file_record = await self._download_file(
|
||||
db=db,
|
||||
url="https://mtgjson.com/api/v5/AllIdentifiers.json.zip",
|
||||
filename="AllIdentifiers.json.zip",
|
||||
subdir="identifiers"
|
||||
)
|
||||
|
||||
self._print_progress("Unzipping file...")
|
||||
json_path = await self._unzip_file(zip_path, self.identifiers_dir)
|
||||
|
||||
# Get the file path from the database record
|
||||
zip_path = file_record.path
|
||||
|
||||
cards_processed = 0
|
||||
current_batch = []
|
||||
@@ -169,7 +169,7 @@ class MTGJSONService:
|
||||
|
||||
self._print_progress("Processing cards...")
|
||||
try:
|
||||
for item in self._stream_json_file(json_path):
|
||||
for item in self._stream_json_file(zip_path):
|
||||
if item["type"] == "meta":
|
||||
self._print_progress(f"Processing MTGJSON data version {item['data'].get('version')} from {item['data'].get('date')}")
|
||||
continue
|
||||
@@ -239,14 +239,16 @@ class MTGJSONService:
|
||||
self._print_progress("Starting MTGJSON SKUs processing...")
|
||||
start_time = time.time()
|
||||
|
||||
zip_path = os.path.join(self.skus_dir, "TcgplayerSkus.json.zip")
|
||||
await self._download_file(
|
||||
"https://mtgjson.com/api/v5/TcgplayerSkus.json.zip",
|
||||
zip_path
|
||||
# Download the file using FileService
|
||||
file_record = await self._download_file(
|
||||
db=db,
|
||||
url="https://mtgjson.com/api/v5/TcgplayerSkus.json.zip",
|
||||
filename="TcgplayerSkus.json.zip",
|
||||
subdir="skus"
|
||||
)
|
||||
|
||||
self._print_progress("Unzipping file...")
|
||||
json_path = await self._unzip_file(zip_path, self.skus_dir)
|
||||
|
||||
# Get the file path from the database record
|
||||
zip_path = file_record.path
|
||||
|
||||
skus_processed = 0
|
||||
current_batch = []
|
||||
@@ -255,7 +257,7 @@ class MTGJSONService:
|
||||
|
||||
self._print_progress("Processing SKUs...")
|
||||
try:
|
||||
for item in self._stream_json_file(json_path):
|
||||
for item in self._stream_json_file(zip_path):
|
||||
if item["type"] == "meta":
|
||||
self._print_progress(f"Processing MTGJSON SKUs version {item['data'].get('version')} from {item['data'].get('date')}")
|
||||
continue
|
||||
|
@@ -10,6 +10,7 @@ from app.db.database import get_db, transaction
|
||||
from sqlalchemy.orm import Session
|
||||
import py7zr
|
||||
import os
|
||||
from app.schemas.file import FileInDB
|
||||
|
||||
class TCGCSVService(BaseExternalService):
|
||||
def __init__(self):
|
||||
@@ -37,32 +38,28 @@ class TCGCSVService(BaseExternalService):
|
||||
endpoint = "tcgplayer/categories"
|
||||
return await self._make_request("GET", endpoint)
|
||||
|
||||
async def get_archived_prices_for_date(self, date_str: str):
|
||||
async def get_archived_prices_for_date(self, db: Session, date_str: str) -> str:
|
||||
"""Fetch archived prices from TCGCSV API"""
|
||||
# Check if the date directory already exists
|
||||
extract_path = f"app/data/cache/tcgcsv/prices/{date_str}"
|
||||
if os.path.exists(extract_path):
|
||||
print(f"Prices for date {date_str} already exist, skipping download")
|
||||
return date_str
|
||||
|
||||
# Download the archive file
|
||||
endpoint = f"archive/tcgplayer/prices-{date_str}.ppmd.7z"
|
||||
response = await self._make_request("GET", endpoint, binary=True)
|
||||
|
||||
# Save the archive file
|
||||
archive_path = f"app/data/cache/tcgcsv/prices/zip/prices-{date_str}.ppmd.7z"
|
||||
os.makedirs(os.path.dirname(archive_path), exist_ok=True)
|
||||
with open(archive_path, "wb") as f:
|
||||
f.write(response)
|
||||
|
||||
# Save the archive file using FileService
|
||||
file_record = await self.save_file(
|
||||
db=db,
|
||||
file_data=response,
|
||||
file_name=f"prices-{date_str}.ppmd.7z",
|
||||
subdir=f"tcgcsv/prices/zip",
|
||||
file_type="application/x-7z-compressed"
|
||||
)
|
||||
|
||||
# Extract the 7z file
|
||||
with py7zr.SevenZipFile(archive_path, 'r') as archive:
|
||||
with py7zr.SevenZipFile(file_record.path, 'r') as archive:
|
||||
# Extract to a directory named after the date
|
||||
extract_path = f"app/data/cache/tcgcsv/prices/{date_str}"
|
||||
os.makedirs(extract_path, exist_ok=True)
|
||||
archive.extractall(path=extract_path)
|
||||
|
||||
# The extracted files will be in a directory structure like:
|
||||
# {date_str}/{game_id}/{group_id}/prices
|
||||
return date_str
|
||||
|
||||
async def get_archived_prices_for_date_range(self, start_date: str, end_date: str):
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from typing import Any, Dict, Optional, Union
|
||||
from typing import Any, Dict, Optional, Union, Literal
|
||||
import logging
|
||||
from app.services.external_api.tcgplayer.base_tcgplayer_service import BaseTCGPlayerService
|
||||
from app.schemas.tcgplayer import (
|
||||
@@ -21,7 +21,8 @@ from app.db.database import transaction
|
||||
import os
|
||||
import csv
|
||||
import io
|
||||
|
||||
from app.schemas.file import FileInDB
|
||||
from datetime import datetime
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class OrderManagementService(BaseTCGPlayerService):
|
||||
@@ -87,7 +88,12 @@ class OrderManagementService(BaseTCGPlayerService):
|
||||
response = await self._make_request("GET", f"/{order_id}{self.API_VERSION}")
|
||||
return response
|
||||
|
||||
async def get_packing_slip(self, order_ids: list[str]):
|
||||
async def get_or_create_packing_slip(self, db: Session, order_ids: list[str]) -> FileInDB:
|
||||
# check if the file already exists
|
||||
file_service = self.get_service('file')
|
||||
file = await file_service.get_file_by_metadata(db, "order_ids", order_ids, "packing_slip", "application/pdf")
|
||||
if file:
|
||||
return file
|
||||
payload = {
|
||||
"sortingType": "byRelease",
|
||||
"format": "default",
|
||||
@@ -95,40 +101,53 @@ class OrderManagementService(BaseTCGPlayerService):
|
||||
"orderNumbers": order_ids
|
||||
}
|
||||
response = await self._make_request("POST", self.packing_slip_endpoint, data=payload, headers=self._get_headers("POST", "application/json"), auth_required=True, download_file=True)
|
||||
return response
|
||||
return await file_service.save_file(
|
||||
db=db,
|
||||
file_data=response,
|
||||
filename=f"packing_slip_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.pdf",
|
||||
subdir='tcgplayer/packing_slips/pdf',
|
||||
file_type='packing_slip',
|
||||
content_type='application/pdf',
|
||||
metadata={"order_ids": order_ids}
|
||||
)
|
||||
|
||||
async def get_pull_sheet(self, order_ids: list[str]):
|
||||
async def get_pull_sheet(self, db: Session, order_ids: list[str]) -> FileInDB:
|
||||
payload = {
|
||||
"orderNumbers": order_ids,
|
||||
"timezoneOffset": -4
|
||||
}
|
||||
response = await self._make_request("POST", self.pull_sheet_endpoint, data=payload, headers=self._get_headers("POST", "application/json"), auth_required=True, download_file=True)
|
||||
return response
|
||||
# get file service
|
||||
file_service = self.get_service('file')
|
||||
# save file
|
||||
return await file_service.save_file(
|
||||
db=db,
|
||||
file_data=response,
|
||||
filename=f"pull_sheet_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.csv",
|
||||
subdir='tcgplayer/pull_sheets/csv',
|
||||
file_type='pull_sheet',
|
||||
content_type='text/csv',
|
||||
metadata={"order_ids": order_ids}
|
||||
)
|
||||
|
||||
async def get_shipping_csv(self, order_ids: list[str]):
|
||||
async def get_shipping_csv(self, db: Session, order_ids: list[str]) -> FileInDB:
|
||||
payload = {
|
||||
"orderNumbers": order_ids,
|
||||
"timezoneOffset": -4
|
||||
}
|
||||
response = await self._make_request("POST", self.shipping_endpoint, data=payload, headers=self._get_headers("POST", "application/json"), auth_required=True, download_file=True)
|
||||
return response
|
||||
|
||||
async def save_file(self, file_data: Union[bytes, list[dict]], file_name: str) -> str:
|
||||
if not os.path.exists("app/data/cache/tcgplayer/orders"):
|
||||
os.makedirs("app/data/cache/tcgplayer/orders")
|
||||
file_path = f"app/data/cache/tcgplayer/orders/{file_name}"
|
||||
|
||||
if isinstance(file_data, list):
|
||||
# Convert list of dictionaries to CSV bytes
|
||||
output = io.StringIO()
|
||||
writer = csv.DictWriter(output, fieldnames=file_data[0].keys())
|
||||
writer.writeheader()
|
||||
writer.writerows(file_data)
|
||||
file_data = output.getvalue().encode('utf-8')
|
||||
|
||||
with open(file_path, "wb") as f:
|
||||
f.write(file_data)
|
||||
return file_path
|
||||
# get file service
|
||||
file_service = self.get_service('file')
|
||||
# save file
|
||||
return await file_service.save_file(
|
||||
db=db,
|
||||
file_data=response,
|
||||
filename=f"shipping_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.csv",
|
||||
subdir='tcgplayer/shipping/csv',
|
||||
file_type='shipping_csv',
|
||||
content_type='text/csv',
|
||||
metadata={"order_ids": order_ids}
|
||||
)
|
||||
|
||||
async def save_order_to_db(self, order: dict, db: Session):
|
||||
# Parse API response using our API schema
|
||||
|
@@ -1,21 +1,33 @@
|
||||
from typing import Dict, List, Optional
|
||||
from app.services.external_api.tcgplayer.base_tcgplayer_service import BaseTCGPlayerService
|
||||
from sqlalchemy.orm import Session
|
||||
from app.schemas.file import FileInDB
|
||||
|
||||
class TCGPlayerInventoryService(BaseTCGPlayerService):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
|
||||
async def get_tcgplayer_export(self, export_type: str):
|
||||
async def get_tcgplayer_export(self, db: Session, export_type: str) -> FileInDB:
|
||||
"""
|
||||
Get a TCGPlayer Staged Inventory Export, Live Inventory Export, or Pricing Export
|
||||
"""
|
||||
if export_type == "staged":
|
||||
endpoint = self.staged_inventory_endpoint
|
||||
file_type = "text/csv"
|
||||
elif export_type == "live":
|
||||
endpoint = self.live_inventory_endpoint
|
||||
file_type = "text/csv"
|
||||
elif export_type == "pricing":
|
||||
endpoint = self.pricing_export_endpoint
|
||||
file_type = "text/csv"
|
||||
else:
|
||||
raise ValueError(f"Invalid export type: {export_type}, must be 'staged', 'live', or 'pricing'")
|
||||
|
||||
file_bytes = await self._make_request("GET", endpoint, download_file=True)
|
||||
return file_bytes
|
||||
return await self.save_file(
|
||||
db=db,
|
||||
file_data=file_bytes,
|
||||
file_name=f"tcgplayer_{export_type}_export.csv",
|
||||
subdir="tcgplayer/inventory",
|
||||
file_type=file_type
|
||||
)
|
@@ -18,129 +18,4 @@ class FileProcessingService:
|
||||
|
||||
def _get_cache_path(self, filename: str) -> str:
|
||||
return os.path.join(self.cache_dir, filename)
|
||||
|
||||
async def _cache_export(self, file_bytes: bytes, export_type: str):
|
||||
cache_path = self._get_cache_path(f"{export_type}_export.csv")
|
||||
with open(cache_path, 'wb') as f:
|
||||
f.write(file_bytes)
|
||||
|
||||
async def _load_cached_export(self, export_type: str) -> Optional[bytes]:
|
||||
cache_path = self._get_cache_path(f"{export_type}_export.csv")
|
||||
if os.path.exists(cache_path):
|
||||
with open(cache_path, 'rb') as f:
|
||||
return f.read()
|
||||
return None
|
||||
|
||||
async def process_tcgplayer_export(self, db: Session, file_bytes: bytes, export_type: str = "live", use_cache: bool = False) -> dict:
|
||||
"""
|
||||
Process a TCGPlayer export file and load it into the inventory table.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
file_bytes: The downloaded file content as bytes
|
||||
export_type: Type of export (staged, live, pricing)
|
||||
use_cache: Whether to use cached export file for development
|
||||
|
||||
Returns:
|
||||
dict: Processing statistics
|
||||
"""
|
||||
stats = {
|
||||
"total_rows": 0,
|
||||
"processed_rows": 0,
|
||||
"errors": 0,
|
||||
"error_messages": []
|
||||
}
|
||||
|
||||
try:
|
||||
# For development, use cached file if available
|
||||
if use_cache:
|
||||
cached_bytes = await self._load_cached_export(export_type)
|
||||
if cached_bytes:
|
||||
file_bytes = cached_bytes
|
||||
else:
|
||||
await self._cache_export(file_bytes, export_type)
|
||||
|
||||
# Convert bytes to string and create a file-like object
|
||||
file_content = file_bytes.decode('utf-8')
|
||||
file_like = io.StringIO(file_content)
|
||||
|
||||
# Read CSV file
|
||||
csv_reader = csv.DictReader(file_like)
|
||||
|
||||
with transaction(db):
|
||||
for row in csv_reader:
|
||||
stats["total_rows"] += 1
|
||||
try:
|
||||
# Process each row and create/update inventory item in database
|
||||
inventory_data = self._map_tcgplayer_row_to_inventory(row)
|
||||
tcgplayer_id = inventory_data["tcgplayer_id"]
|
||||
|
||||
# Check if inventory item already exists
|
||||
existing_item = self.inventory_service.get_by_tcgplayer_id(db, tcgplayer_id)
|
||||
|
||||
# Find matching TCGPlayer product
|
||||
product_id = int(tcgplayer_id) if tcgplayer_id.isdigit() else None
|
||||
if product_id:
|
||||
tcg_product = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.product_id == product_id).first()
|
||||
if tcg_product:
|
||||
# Update inventory data with product information if available
|
||||
inventory_data.update({
|
||||
"product_name": tcg_product.name,
|
||||
"photo_url": tcg_product.image_url,
|
||||
"rarity": tcg_product.ext_rarity,
|
||||
"number": tcg_product.ext_number
|
||||
})
|
||||
|
||||
if existing_item:
|
||||
# Update existing item
|
||||
self.inventory_service.update(db, existing_item, inventory_data)
|
||||
else:
|
||||
# Create new item
|
||||
self.inventory_service.create(db, inventory_data)
|
||||
|
||||
stats["processed_rows"] += 1
|
||||
except Exception as e:
|
||||
stats["errors"] += 1
|
||||
stats["error_messages"].append(f"Error processing row {stats['total_rows']}: {str(e)}")
|
||||
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
raise Exception(f"Failed to process TCGPlayer export: {str(e)}")
|
||||
|
||||
def _map_tcgplayer_row_to_inventory(self, row: dict) -> dict:
|
||||
"""
|
||||
Map TCGPlayer export row to inventory model fields.
|
||||
"""
|
||||
def safe_float(value: str) -> float:
|
||||
"""Convert string to float, returning 0.0 for empty strings or invalid values"""
|
||||
try:
|
||||
return float(value) if value else 0.0
|
||||
except ValueError:
|
||||
return 0.0
|
||||
|
||||
def safe_int(value: str) -> int:
|
||||
"""Convert string to int, returning 0 for empty strings or invalid values"""
|
||||
try:
|
||||
return int(value) if value else 0
|
||||
except ValueError:
|
||||
return 0
|
||||
|
||||
return {
|
||||
"tcgplayer_id": row.get("TCGplayer Id", ""),
|
||||
"product_line": row.get("Product Line", ""),
|
||||
"set_name": row.get("Set Name", ""),
|
||||
"product_name": row.get("Product Name", ""),
|
||||
"title": row.get("Title", ""),
|
||||
"number": row.get("Number", ""),
|
||||
"rarity": row.get("Rarity", ""),
|
||||
"condition": row.get("Condition", ""),
|
||||
"tcg_market_price": safe_float(row.get("TCG Market Price", "")),
|
||||
"tcg_direct_low": safe_float(row.get("TCG Direct Low", "")),
|
||||
"tcg_low_price_with_shipping": safe_float(row.get("TCG Low Price With Shipping", "")),
|
||||
"tcg_low_price": safe_float(row.get("TCG Low Price", "")),
|
||||
"total_quantity": safe_int(row.get("Total Quantity", "")),
|
||||
"add_to_quantity": safe_int(row.get("Add to Quantity", "")),
|
||||
"tcg_marketplace_price": safe_float(row.get("TCG Marketplace Price", "")),
|
||||
"photo_url": row.get("Photo URL", "")
|
||||
}
|
||||
|
||||
|
152
app/services/file_service.py
Normal file
152
app/services/file_service.py
Normal file
@@ -0,0 +1,152 @@
|
||||
from typing import Optional, Union, List, Dict
|
||||
import os
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy import String
|
||||
from app.models.file import File
|
||||
from app.schemas.file import FileBase, FileCreate, FileInDB
|
||||
from app.db.database import transaction
|
||||
import logging
|
||||
import asyncio
|
||||
from weasyprint import HTML
|
||||
import json
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class FileService:
|
||||
def __init__(self, base_cache_dir: str = "app/data/cache"):
|
||||
self.base_cache_dir = Path(base_cache_dir)
|
||||
self.base_cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def _get_cache_path(self, subdir: str, filename: str) -> Path:
|
||||
"""Get the full cache path for a file"""
|
||||
cache_dir = self.base_cache_dir / subdir
|
||||
cache_dir.mkdir(parents=True, exist_ok=True)
|
||||
return cache_dir / filename
|
||||
|
||||
async def save_file(
|
||||
self,
|
||||
db: Session,
|
||||
file_data: Union[bytes, str],
|
||||
filename: str,
|
||||
subdir: str,
|
||||
file_type: Optional[str] = None,
|
||||
content_type: Optional[str] = None,
|
||||
metadata: Optional[Dict] = None,
|
||||
html_content: Optional[bool] = False
|
||||
) -> FileInDB:
|
||||
"""Save a file to the cache directory and create a database record"""
|
||||
try:
|
||||
# Get the full cache path
|
||||
cache_path = self._get_cache_path(subdir, filename)
|
||||
|
||||
if html_content and isinstance(file_data, str):
|
||||
await asyncio.get_event_loop().run_in_executor(
|
||||
None,
|
||||
lambda: HTML(string=file_data).write_pdf(str(cache_path))
|
||||
)
|
||||
else:
|
||||
# Write the file data
|
||||
if isinstance(file_data, str):
|
||||
mode = 'w'
|
||||
encoding = 'utf-8'
|
||||
else:
|
||||
mode = 'wb'
|
||||
encoding = None
|
||||
|
||||
with open(cache_path, mode, encoding=encoding) as f:
|
||||
f.write(file_data)
|
||||
|
||||
# Create database record
|
||||
file_record = File(
|
||||
name=filename,
|
||||
path=str(cache_path),
|
||||
file_type=file_type,
|
||||
content_type=content_type,
|
||||
size=os.path.getsize(cache_path),
|
||||
file_metadata=metadata
|
||||
)
|
||||
|
||||
db.add(file_record)
|
||||
db.commit()
|
||||
db.refresh(file_record)
|
||||
|
||||
return FileInDB.model_validate(file_record)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving file {filename}: {str(e)}")
|
||||
raise
|
||||
|
||||
async def get_file(self, db: Session, file_id: int) -> Optional[FileInDB]:
|
||||
"""Get a file record from the database"""
|
||||
file_record = db.query(File).filter(File.id == file_id).first()
|
||||
if file_record:
|
||||
return FileInDB.model_validate(file_record)
|
||||
return None
|
||||
|
||||
async def get_file_path(self, db: Session, file_id: int) -> Optional[Path]:
|
||||
"""Get the path to a file from its ID"""
|
||||
file_record = await self.get_file(db, file_id)
|
||||
if file_record and os.path.exists(file_record.path):
|
||||
return Path(file_record.path)
|
||||
return None
|
||||
|
||||
async def delete_file(self, db: Session, file_id: int) -> bool:
|
||||
"""Delete a file and its database record"""
|
||||
try:
|
||||
file_record = db.query(File).filter(File.id == file_id).first()
|
||||
if file_record:
|
||||
# Delete the file if it exists
|
||||
if os.path.exists(file_record.path):
|
||||
os.remove(file_record.path)
|
||||
# Delete the database record
|
||||
db.delete(file_record)
|
||||
db.commit()
|
||||
return True
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting file {file_id}: {str(e)}")
|
||||
raise
|
||||
|
||||
async def list_files(
|
||||
self,
|
||||
db: Session,
|
||||
skip: int = 0,
|
||||
limit: int = 100,
|
||||
file_type: Optional[str] = None
|
||||
) -> List[FileInDB]:
|
||||
"""List files with optional filtering"""
|
||||
query = db.query(File)
|
||||
if file_type:
|
||||
query = query.filter(File.type == file_type)
|
||||
files = query.offset(skip).limit(limit).all()
|
||||
return [FileInDB.model_validate(file) for file in files]
|
||||
|
||||
async def get_file_by_metadata(
|
||||
self,
|
||||
db: Session,
|
||||
metadata_key: str,
|
||||
metadata_value: Union[str, List[str]],
|
||||
file_type: Optional[str] = None,
|
||||
content_type: Optional[str] = None) -> Optional[FileInDB] | None:
|
||||
# Handle array comparison for order_ids
|
||||
if metadata_key == "order_ids" and isinstance(metadata_value, list):
|
||||
# Sort and convert to JSON string for consistent comparison
|
||||
sorted_value = sorted(metadata_value)
|
||||
query = db.query(File).filter(
|
||||
File.file_metadata[metadata_key].cast(String) == json.dumps(sorted_value)
|
||||
)
|
||||
else:
|
||||
query = db.query(File).filter(File.file_metadata[metadata_key].cast(String) == str(metadata_value))
|
||||
|
||||
if file_type:
|
||||
query = query.filter(File.file_type == file_type)
|
||||
if content_type:
|
||||
query = query.filter(File.content_type == content_type)
|
||||
file_record = query.first()
|
||||
if file_record:
|
||||
return FileInDB.model_validate(file_record)
|
||||
else:
|
||||
return None
|
@@ -9,6 +9,7 @@ import asyncio
|
||||
import time
|
||||
from PIL import Image
|
||||
from contextlib import asynccontextmanager
|
||||
from app.schemas.file import FileInDB
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
@@ -141,11 +142,11 @@ class LabelPrinterService:
|
||||
logger.error(f"Unexpected error in _send_print_request: {e}")
|
||||
return False
|
||||
|
||||
async def print_file(self, file_path: Union[str, Path], label_size: Literal["dk1201", "dk1241"], label_type: Optional[Literal["address_label", "packing_slip", "set_label"]] = None) -> bool:
|
||||
async def print_file(self, file_path: Union[str, Path, FileInDB], label_size: Literal["dk1201", "dk1241"], label_type: Optional[Literal["address_label", "packing_slip", "set_label"]] = None) -> bool:
|
||||
"""Print a PDF or PNG file to the label printer.
|
||||
|
||||
Args:
|
||||
file_path: Path to the PDF or PNG file
|
||||
file_path: Path to the PDF or PNG file, or a FileInDB object
|
||||
label_size: Size of label to use ("dk1201" or "dk1241")
|
||||
label_type: Type of label to use ("address_label" or "packing_slip" or "set_label")
|
||||
|
||||
@@ -158,6 +159,10 @@ class LabelPrinterService:
|
||||
logger.error("No file path provided")
|
||||
return False
|
||||
|
||||
# Handle FileInDB objects
|
||||
if isinstance(file_path, FileInDB):
|
||||
file_path = file_path.path
|
||||
|
||||
file_path = Path(file_path)
|
||||
if not file_path.exists():
|
||||
logger.error(f"File not found: {file_path}")
|
||||
|
@@ -6,29 +6,49 @@ from jinja2 import Environment, FileSystemLoader
|
||||
from weasyprint import HTML
|
||||
import logging
|
||||
import asyncio
|
||||
from app.schemas.file import FileInDB
|
||||
from app.services.base_service import BaseService
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class PullSheetService:
|
||||
class PullSheetService(BaseService):
|
||||
def __init__(self):
|
||||
super().__init__(None)
|
||||
self.template_dir = Path("app/data/assets/templates")
|
||||
self.env = Environment(loader=FileSystemLoader(str(self.template_dir)))
|
||||
self.template = self.env.get_template("pull_sheet.html")
|
||||
self.output_dir = Path("app/data/cache/tcgplayer/pull_sheets")
|
||||
self.output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
async def generate_pull_sheet_pdf(self, csv_path: str) -> str:
|
||||
|
||||
async def get_or_create_rendered_pull_sheet(self, db: Session, order_ids: list[str]) -> FileInDB:
|
||||
# get file service
|
||||
file_service = self.get_service('file')
|
||||
# check if rendered pull sheet exists
|
||||
rendered_pull_sheet = await file_service.get_file_by_metadata(db, "order_ids", order_ids, "rendered_pull_sheet", "application/pdf")
|
||||
if rendered_pull_sheet:
|
||||
return rendered_pull_sheet
|
||||
# check if pull sheet data file exists
|
||||
pull_sheet_data_file = await file_service.get_file_by_metadata(db, "order_ids", order_ids, "pull_sheet", "text/csv")
|
||||
if pull_sheet_data_file:
|
||||
# generate pdf from pull sheet data file
|
||||
return await self.generate_pull_sheet_pdf(db, pull_sheet_data_file)
|
||||
# if no pull sheet data file exists, get it from order management service
|
||||
order_service = self.get_service('order_management')
|
||||
pull_sheet_data_file = await order_service.get_pull_sheet(db, order_ids)
|
||||
return await self.generate_pull_sheet_pdf(db, pull_sheet_data_file)
|
||||
|
||||
async def generate_pull_sheet_pdf(self, db: Session, file: FileInDB) -> FileInDB:
|
||||
"""Generate a PDF pull sheet from a CSV file.
|
||||
|
||||
Args:
|
||||
csv_path: Path to the CSV file containing pull sheet data
|
||||
file: FileInDB object containing the pull sheet data
|
||||
|
||||
Returns:
|
||||
Path to the generated PDF file
|
||||
"""
|
||||
try:
|
||||
# Read and process CSV data
|
||||
items = await self._read_and_process_csv(csv_path)
|
||||
items = await self._read_and_process_csv(file.path)
|
||||
|
||||
# Prepare template data
|
||||
template_data = {
|
||||
@@ -38,16 +58,24 @@ class PullSheetService:
|
||||
|
||||
# Render HTML
|
||||
html_content = self.template.render(**template_data)
|
||||
|
||||
# Generate PDF in a separate thread to avoid blocking
|
||||
pdf_path = self.output_dir / f"pull_sheet_{datetime.now().strftime('%Y%m%d_%H%M%S')}.pdf"
|
||||
await asyncio.get_event_loop().run_in_executor(
|
||||
None,
|
||||
lambda: HTML(string=html_content).write_pdf(str(pdf_path))
|
||||
|
||||
# Ensure metadata is properly formatted
|
||||
metadata = file.file_metadata.copy() if file.file_metadata else {}
|
||||
if 'order_ids' in metadata:
|
||||
metadata['order_ids'] = sorted(metadata['order_ids'])
|
||||
|
||||
file_service = self.get_service('file')
|
||||
return await file_service.save_file(
|
||||
db=db,
|
||||
file_data=html_content,
|
||||
filename=f"rendered_pull_sheet_{datetime.now().strftime('%Y%m%d_%H%M%S')}.pdf",
|
||||
subdir="tcgplayer/pull_sheets/rendered",
|
||||
file_type="rendered_pull_sheet",
|
||||
content_type="application/pdf",
|
||||
metadata=metadata,
|
||||
html_content=True # This tells FileService to convert HTML to PDF
|
||||
)
|
||||
|
||||
return str(pdf_path)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating pull sheet PDF: {str(e)}")
|
||||
raise
|
||||
@@ -68,7 +96,7 @@ class PullSheetService:
|
||||
)
|
||||
|
||||
# Filter out the "Orders Contained in Pull Sheet" row
|
||||
df = df[df['Product Line'] != 'Orders Contained in Pull Sheet:']
|
||||
df = df[df['Product Line'] != 'Orders Contained in Pull Sheet:'].copy()
|
||||
|
||||
# Convert Set Release Date to datetime
|
||||
df['Set Release Date'] = pd.to_datetime(df['Set Release Date'], format='%m/%d/%Y %H:%M:%S')
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from app.db.database import get_db, transaction
|
||||
from app.db.database import transaction, get_db
|
||||
from app.services.scheduler.base_scheduler import BaseScheduler
|
||||
import logging
|
||||
|
||||
@@ -16,41 +16,12 @@ class SchedulerService:
|
||||
from app.services.service_manager import ServiceManager
|
||||
self._service_manager = ServiceManager()
|
||||
return self._service_manager
|
||||
|
||||
async def process_tcgplayer_export(self, export_type: str = "live", use_cache: bool = False):
|
||||
"""
|
||||
Process TCGPlayer export as a scheduled task.
|
||||
|
||||
Args:
|
||||
export_type: Type of export to process (staged, live, or pricing)
|
||||
"""
|
||||
db = get_db()
|
||||
try:
|
||||
logger.info(f"Starting scheduled TCGPlayer export processing for {export_type}")
|
||||
|
||||
# Get services
|
||||
tcgplayer_service = self.service_manager.get_service('tcgplayer_inventory')
|
||||
file_processor = self.service_manager.get_service('file_processing')
|
||||
|
||||
# Download the file
|
||||
file_bytes = await tcgplayer_service.get_tcgplayer_export(export_type)
|
||||
|
||||
# Process the file and load into database
|
||||
with transaction(db):
|
||||
stats = await file_processor.process_tcgplayer_export(db, export_type=export_type, file_bytes=file_bytes, use_cache=use_cache)
|
||||
|
||||
logger.info(f"Completed TCGPlayer export processing: {stats}")
|
||||
return stats
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing TCGPlayer export: {str(e)}")
|
||||
raise
|
||||
|
||||
async def update_open_orders_hourly(self):
|
||||
"""
|
||||
Hourly update of orders from TCGPlayer API to database
|
||||
"""
|
||||
db = get_db()
|
||||
db = next(get_db())
|
||||
try:
|
||||
logger.info("Starting hourly order update")
|
||||
# Get order management service
|
||||
@@ -68,12 +39,14 @@ class SchedulerService:
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating open orders: {str(e)}")
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def update_all_orders_daily(self):
|
||||
"""
|
||||
Daily update of all orders from TCGPlayer API to database
|
||||
"""
|
||||
db = get_db()
|
||||
db = next(get_db())
|
||||
try:
|
||||
logger.info("Starting daily order update")
|
||||
# Get order management service
|
||||
@@ -91,16 +64,11 @@ class SchedulerService:
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating all orders: {str(e)}")
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def start_scheduled_tasks(self):
|
||||
"""Start all scheduled tasks"""
|
||||
# Schedule TCGPlayer export processing to run daily at 2 AM
|
||||
await self.scheduler.schedule_task(
|
||||
task_name="process_tcgplayer_export",
|
||||
func=self.process_tcgplayer_export,
|
||||
interval_seconds=24 * 60 * 60, # 24 hours
|
||||
export_type="live"
|
||||
)
|
||||
# Schedule open orders update to run hourly at 00 minutes
|
||||
await self.scheduler.schedule_task(
|
||||
task_name="update_open_orders_hourly",
|
||||
|
@@ -25,11 +25,13 @@ class ServiceManager:
|
||||
'pull_sheet': 'app.services.pull_sheet_service.PullSheetService',
|
||||
'set_label': 'app.services.set_label_service.SetLabelService',
|
||||
'data_initialization': 'app.services.data_initialization.DataInitializationService',
|
||||
'scheduler': 'app.services.scheduler.scheduler_service.SchedulerService'
|
||||
'scheduler': 'app.services.scheduler.scheduler_service.SchedulerService',
|
||||
'file': 'app.services.file_service.FileService'
|
||||
}
|
||||
self._service_configs = {
|
||||
'label_printer': {'printer_api_url': "http://192.168.1.110:8000"},
|
||||
'regular_printer': {'printer_name': "MFCL2750DW-3"}
|
||||
'regular_printer': {'printer_name': "MFCL2750DW-3"},
|
||||
'file': {'base_cache_dir': "app/data/cache"}
|
||||
}
|
||||
self._initialized = True
|
||||
|
||||
|
@@ -122,8 +122,7 @@ RENAME_SETS = {
|
||||
|
||||
|
||||
class SetLabelService(BaseService):
|
||||
DEFAULT_OUTPUT_DIR = "app/data/cache/set_labels"
|
||||
os.makedirs(DEFAULT_OUTPUT_DIR, exist_ok=True)
|
||||
DEFAULT_OUTPUT_DIR = "set_labels" # Changed to be relative to FileService's base_cache_dir
|
||||
|
||||
def __init__(self, output_dir=DEFAULT_OUTPUT_DIR):
|
||||
super().__init__(None) # BaseService doesn't need a model for this service
|
||||
@@ -131,8 +130,7 @@ class SetLabelService(BaseService):
|
||||
self.ignored_sets = IGNORED_SETS
|
||||
self.set_types = SET_TYPES
|
||||
self.minimum_set_size = MINIMUM_SET_SIZE
|
||||
self.output_dir = Path(output_dir)
|
||||
self.output_dir.mkdir(parents=True, exist_ok=True)
|
||||
self.output_dir = output_dir
|
||||
|
||||
async def get_set_data(self, session):
|
||||
log.info("Getting set data and icons from Scryfall")
|
||||
@@ -173,14 +171,8 @@ class SetLabelService(BaseService):
|
||||
log.warning(f"Failed to fetch icon from {icon_url}: {e}")
|
||||
return None
|
||||
|
||||
async def generate_label(self, session, set_data):
|
||||
output_file = self.output_dir / f"{set_data['code']}.pdf"
|
||||
|
||||
# Check if file already exists
|
||||
if output_file.exists():
|
||||
log.info(f"Label already exists for {set_data['name']} ({set_data['code']})")
|
||||
return output_file
|
||||
|
||||
async def generate_label(self, session, set_data, db: Session):
|
||||
"""Generate a label for a set and save it using FileService"""
|
||||
name = RENAME_SETS.get(set_data["name"], set_data["name"])
|
||||
icon_b64 = await self.get_set_icon(session, set_data["icon_svg_uri"])
|
||||
|
||||
@@ -192,11 +184,32 @@ class SetLabelService(BaseService):
|
||||
icon_b64=icon_b64,
|
||||
)
|
||||
|
||||
HTML(string=html_content).write_pdf(output_file)
|
||||
# Generate PDF content
|
||||
pdf_content = HTML(string=html_content).write_pdf()
|
||||
|
||||
# Save using FileService
|
||||
filename = f"{set_data['code']}.pdf"
|
||||
metadata = {
|
||||
"set_name": name,
|
||||
"set_code": set_data["code"],
|
||||
"release_date": set_data["released_at"],
|
||||
"card_count": set_data["card_count"]
|
||||
}
|
||||
|
||||
file_record = await self.file_service.save_file(
|
||||
db=db,
|
||||
file_data=pdf_content,
|
||||
filename=filename,
|
||||
subdir=self.output_dir,
|
||||
file_type="set_label",
|
||||
metadata=metadata
|
||||
)
|
||||
|
||||
log.info(f"Generated label for {name} ({set_data['code']})")
|
||||
return output_file
|
||||
return file_record
|
||||
|
||||
async def generate_labels(self, sets=None):
|
||||
async def generate_labels(self, db: Session, sets=None):
|
||||
"""Generate labels for sets and return their file records"""
|
||||
if sets:
|
||||
self.ignored_sets = ()
|
||||
self.minimum_set_size = 0
|
||||
@@ -205,7 +218,7 @@ class SetLabelService(BaseService):
|
||||
|
||||
async with aiohttp.ClientSession() as session:
|
||||
set_data = await self.get_set_data(session)
|
||||
tasks = [self.generate_label(session, exp) for exp in set_data]
|
||||
tasks = [self.generate_label(session, exp, db) for exp in set_data]
|
||||
return await asyncio.gather(*tasks)
|
||||
|
||||
async def get_available_sets(self, db: Session):
|
||||
|
Reference in New Issue
Block a user