data init idk other stuff

This commit is contained in:
zman 2025-04-18 15:19:57 -04:00
parent 8f35cedb4a
commit 03b43ce3ab
28 changed files with 3378 additions and 810 deletions

View File

@ -0,0 +1,42 @@
"""tcg product update again
Revision ID: 1746d35187a2
Revises: 9775314e337b
Create Date: 2025-04-17 22:02:35.492726
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '1746d35187a2'
down_revision: Union[str, None] = '9775314e337b'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('tcgplayer_products', sa.Column('ext_subtype', sa.String(), nullable=True))
op.add_column('tcgplayer_products', sa.Column('ext_oracle_text', sa.String(), nullable=True))
op.add_column('tcgplayer_products', sa.Column('ext_flavor_text', sa.String(), nullable=True))
op.drop_column('tcgplayer_products', 'ext_mana_cost')
op.drop_column('tcgplayer_products', 'ext_loyalty')
op.drop_column('tcgplayer_products', 'ext_mana_value')
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('tcgplayer_products', sa.Column('ext_mana_value', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('tcgplayer_products', sa.Column('ext_loyalty', sa.VARCHAR(), autoincrement=False, nullable=True))
op.add_column('tcgplayer_products', sa.Column('ext_mana_cost', sa.VARCHAR(), autoincrement=False, nullable=True))
op.drop_column('tcgplayer_products', 'ext_flavor_text')
op.drop_column('tcgplayer_products', 'ext_oracle_text')
op.drop_column('tcgplayer_products', 'ext_subtype')
# ### end Alembic commands ###

View File

@ -0,0 +1,32 @@
"""tcg prices again
Revision ID: 2fcce9c8883a
Revises: b45c43900b56
Create Date: 2025-04-17 22:48:53.378544
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '2fcce9c8883a'
down_revision: Union[str, None] = 'b45c43900b56'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@ -0,0 +1,51 @@
"""tcg prices again 2
Revision ID: 493b2cb724d0
Revises: 2fcce9c8883a
Create Date: 2025-04-17 23:05:11.919652
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '493b2cb724d0'
down_revision: Union[str, None] = '2fcce9c8883a'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_tcgplayer_prices_date', table_name='tcgplayer_prices')
op.drop_index('ix_tcgplayer_prices_id', table_name='tcgplayer_prices')
op.drop_index('ix_tcgplayer_prices_product_id', table_name='tcgplayer_prices')
op.drop_table('tcgplayer_prices')
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tcgplayer_prices',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('product_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('low_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('mid_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('high_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('market_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('direct_low_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('sub_type_name', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='tcgplayer_prices_pkey')
)
op.create_index('ix_tcgplayer_prices_product_id', 'tcgplayer_prices', ['product_id'], unique=False)
op.create_index('ix_tcgplayer_prices_id', 'tcgplayer_prices', ['id'], unique=False)
op.create_index('ix_tcgplayer_prices_date', 'tcgplayer_prices', ['date'], unique=False)
# ### end Alembic commands ###

View File

@ -0,0 +1,53 @@
"""fuck foreign keys for real dog
Revision ID: 54cd251d13a3
Revises: e34bfa37db00
Create Date: 2025-04-17 23:10:59.010644
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '54cd251d13a3'
down_revision: Union[str, None] = 'e34bfa37db00'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index('ix_tcgplayer_price_history_date', table_name='tcgplayer_price_history')
op.drop_index('ix_tcgplayer_price_history_id', table_name='tcgplayer_price_history')
op.drop_index('ix_tcgplayer_price_history_product_id', table_name='tcgplayer_price_history')
op.drop_table('tcgplayer_price_history')
op.drop_constraint('tcgplayer_products_group_id_fkey', 'tcgplayer_products', type_='foreignkey')
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_foreign_key('tcgplayer_products_group_id_fkey', 'tcgplayer_products', 'tcgplayer_groups', ['group_id'], ['group_id'])
op.create_table('tcgplayer_price_history',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('product_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.Column('date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
sa.Column('low_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('mid_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('high_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('market_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('direct_low_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
sa.Column('sub_type_name', sa.VARCHAR(), autoincrement=False, nullable=True),
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
sa.PrimaryKeyConstraint('id', name='tcgplayer_price_history_pkey')
)
op.create_index('ix_tcgplayer_price_history_product_id', 'tcgplayer_price_history', ['product_id'], unique=False)
op.create_index('ix_tcgplayer_price_history_id', 'tcgplayer_price_history', ['id'], unique=False)
op.create_index('ix_tcgplayer_price_history_date', 'tcgplayer_price_history', ['date'], unique=False)
# ### end Alembic commands ###

View File

@ -0,0 +1,32 @@
"""fuck foreign keys for real dog
Revision ID: 7f309a891094
Revises: 54cd251d13a3
Create Date: 2025-04-17 23:11:55.027126
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '7f309a891094'
down_revision: Union[str, None] = '54cd251d13a3'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@ -0,0 +1,40 @@
"""tcg product update
Revision ID: 9775314e337b
Revises: 479003fbead7
Create Date: 2025-04-17 21:58:17.637210
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '9775314e337b'
down_revision: Union[str, None] = '479003fbead7'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('tcgplayer_products', sa.Column('ext_power', sa.String(), nullable=True))
op.add_column('tcgplayer_products', sa.Column('ext_toughness', sa.String(), nullable=True))
op.add_column('tcgplayer_products', sa.Column('ext_loyalty', sa.String(), nullable=True))
op.add_column('tcgplayer_products', sa.Column('ext_mana_cost', sa.String(), nullable=True))
op.add_column('tcgplayer_products', sa.Column('ext_mana_value', sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('tcgplayer_products', 'ext_mana_value')
op.drop_column('tcgplayer_products', 'ext_mana_cost')
op.drop_column('tcgplayer_products', 'ext_loyalty')
op.drop_column('tcgplayer_products', 'ext_toughness')
op.drop_column('tcgplayer_products', 'ext_power')
# ### end Alembic commands ###

View File

@ -0,0 +1,51 @@
"""recreate tcgplayer price history
Revision ID: 9fb73424598c
Revises: 7f309a891094
Create Date: 2025-04-17 23:13:55.027126
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers, used by Alembic.
revision: str = '9fb73424598c'
down_revision: Union[str, None] = '7f309a891094'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('tcgplayer_price_history',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('product_id', sa.Integer(), nullable=True),
sa.Column('date', sa.DateTime(), nullable=True),
sa.Column('low_price', sa.Float(), nullable=True),
sa.Column('mid_price', sa.Float(), nullable=True),
sa.Column('high_price', sa.Float(), nullable=True),
sa.Column('market_price', sa.Float(), nullable=True),
sa.Column('direct_low_price', sa.Float(), nullable=True),
sa.Column('sub_type_name', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tcgplayer_price_history_id'), 'tcgplayer_price_history', ['id'], unique=False)
op.create_index(op.f('ix_tcgplayer_price_history_product_id'), 'tcgplayer_price_history', ['product_id'], unique=False)
op.create_index(op.f('ix_tcgplayer_price_history_date'), 'tcgplayer_price_history', ['date'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tcgplayer_price_history_date'), table_name='tcgplayer_price_history')
op.drop_index(op.f('ix_tcgplayer_price_history_product_id'), table_name='tcgplayer_price_history')
op.drop_index(op.f('ix_tcgplayer_price_history_id'), table_name='tcgplayer_price_history')
op.drop_table('tcgplayer_price_history')
# ### end Alembic commands ###

View File

@ -0,0 +1,32 @@
"""tcg prices
Revision ID: b45c43900b56
Revises: 1746d35187a2
Create Date: 2025-04-17 22:47:44.405906
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'b45c43900b56'
down_revision: Union[str, None] = '1746d35187a2'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

View File

@ -0,0 +1,32 @@
"""tcg prices again 3
Revision ID: e34bfa37db00
Revises: 493b2cb724d0
Create Date: 2025-04-17 23:05:40.805511
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'e34bfa37db00'
down_revision: Union[str, None] = '493b2cb724d0'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
pass
# ### end Alembic commands ###

1793
app.log

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
from fastapi import FastAPI from fastapi import FastAPI, HTTPException
from fastapi.middleware.cors import CORSMiddleware from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse from fastapi.responses import FileResponse
@ -6,15 +6,17 @@ from contextlib import asynccontextmanager
import uvicorn import uvicorn
import logging import logging
import os import os
from pathlib import Path
from app.routes import routes from app.routes import routes
from app.db.database import init_db, SessionLocal from app.db.database import init_db, SessionLocal
from app.services.service_manager import ServiceManager from app.services.service_manager import ServiceManager
import logging
# Configure logging # Configure logging
log_file = "app.log" log_file = Path("app.log")
if os.path.exists(log_file): if log_file.exists():
os.remove(log_file) # Remove existing log file to start fresh # Archive old log file instead of deleting
archive_path = log_file.with_suffix(f'.{log_file.stat().st_mtime}.log')
log_file.rename(archive_path)
# Create a formatter # Create a formatter
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(name)s - %(message)s') formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(name)s - %(message)s')
@ -37,25 +39,45 @@ logger = logging.getLogger(__name__)
logger.info("Application starting up...") logger.info("Application starting up...")
# Initialize service manager # Initialize service manager
service_manager = ServiceManager() service_manager = None
@asynccontextmanager @asynccontextmanager
async def lifespan(app: FastAPI): async def lifespan(app: FastAPI):
global service_manager
service_manager = ServiceManager()
# Startup # Startup
try:
init_db() init_db()
logger.info("Database initialized successfully") logger.info("Database initialized successfully")
# Initialize all services # Initialize all services
await service_manager.initialize_services() await service_manager.initialize_services()
# Get a database session
db = SessionLocal()
try:
data_init_service = service_manager.get_service('data_initialization')
data_init = await data_init_service.initialize_data(db, game_ids=[1, 3], use_cache=False, init_categories=False, init_products=False, init_groups=False, init_archived_prices=True, init_mtgjson=False, archived_prices_start_date="2024-03-05", archived_prices_end_date="2025-04-17")
logger.info(f"Data initialization results: {data_init}")
# Start the scheduler # Start the scheduler
scheduler = service_manager.get_service('scheduler') scheduler = service_manager.get_service('scheduler')
await scheduler.start_scheduled_tasks() await scheduler.start_scheduled_tasks(db)
logger.info("Scheduler started successfully") logger.info("Scheduler started successfully")
yield yield
except Exception as e:
logger.error(f"Error during application startup: {str(e)}")
raise
finally:
db.close()
except Exception as e:
logger.error(f"Critical error during application startup: {str(e)}")
raise
finally:
# Shutdown # Shutdown
if service_manager:
await service_manager.cleanup_services() await service_manager.cleanup_services()
logger.info("All services cleaned up successfully") logger.info("All services cleaned up successfully")
@ -72,16 +94,23 @@ app.mount("/static", StaticFiles(directory="app/static"), name="static")
# Serve index.html at root # Serve index.html at root
@app.get("/") @app.get("/")
async def read_root(): async def read_root():
return FileResponse('app/static/index.html') index_path = Path('app/static/index.html')
if not index_path.exists():
raise HTTPException(status_code=404, detail="Index file not found")
return FileResponse(index_path)
# Serve app.js # Serve app.js
@app.get("/app.js") @app.get("/app.js")
async def read_app_js(): async def read_app_js():
return FileResponse('app/static/app.js') js_path = Path('app/static/app.js')
if not js_path.exists():
raise HTTPException(status_code=404, detail="App.js file not found")
return FileResponse(js_path)
# Configure CORS with specific origins in production
app.add_middleware( app.add_middleware(
CORSMiddleware, CORSMiddleware,
allow_origins=["*"], allow_origins=["http://localhost:3000"], # Update with your frontend URL
allow_credentials=True, allow_credentials=True,
allow_methods=["*"], allow_methods=["*"],
allow_headers=["*"], allow_headers=["*"],

View File

@ -1,5 +1,4 @@
from sqlalchemy import Column, Integer, String, Float, DateTime, ForeignKey, JSON from sqlalchemy import Column, Integer, String, Float, DateTime, JSON
from sqlalchemy.orm import relationship
from datetime import datetime, UTC from datetime import datetime, UTC
from app.db.database import Base from app.db.database import Base

View File

@ -0,0 +1,19 @@
from sqlalchemy import Column, Integer, Float, DateTime, String
from sqlalchemy.sql import func
from app.db.database import Base
class TCGPlayerPriceHistory(Base):
__tablename__ = "tcgplayer_price_history"
id = Column(Integer, primary_key=True, index=True)
product_id = Column(Integer, index=True)
date = Column(DateTime, index=True)
low_price = Column(Float)
mid_price = Column(Float)
high_price = Column(Float)
market_price = Column(Float)
direct_low_price = Column(Float)
sub_type_name = Column(String)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now())

View File

@ -1,4 +1,4 @@
from sqlalchemy import Column, Integer, String, Float, DateTime, ForeignKey from sqlalchemy import Column, Integer, String, Float, DateTime
from sqlalchemy.sql import func from sqlalchemy.sql import func
from app.db.database import Base from app.db.database import Base
@ -11,11 +11,13 @@ class TCGPlayerProduct(Base):
clean_name = Column(String) clean_name = Column(String)
image_url = Column(String) image_url = Column(String)
category_id = Column(Integer) category_id = Column(Integer)
group_id = Column(Integer, ForeignKey("tcgplayer_groups.group_id")) group_id = Column(Integer)
url = Column(String) url = Column(String)
modified_on = Column(DateTime) modified_on = Column(DateTime)
image_count = Column(Integer) image_count = Column(Integer)
ext_rarity = Column(String) ext_rarity = Column(String)
ext_subtype = Column(String)
ext_oracle_text = Column(String)
ext_number = Column(String) ext_number = Column(String)
low_price = Column(Float) low_price = Column(Float)
mid_price = Column(Float) mid_price = Column(Float)
@ -23,5 +25,9 @@ class TCGPlayerProduct(Base):
market_price = Column(Float) market_price = Column(Float)
direct_low_price = Column(Float) direct_low_price = Column(Float)
sub_type_name = Column(String) sub_type_name = Column(String)
ext_power = Column(String)
ext_toughness = Column(String)
ext_flavor_text = Column(String)
created_at = Column(DateTime(timezone=True), server_default=func.now()) created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now()) updated_at = Column(DateTime(timezone=True), onupdate=func.now())

View File

@ -1,12 +1,19 @@
from fastapi import APIRouter, HTTPException, Depends, Query from fastapi import APIRouter, HTTPException, Depends, Query, UploadFile, File
from typing import List from typing import List
from datetime import datetime from datetime import datetime
from enum import Enum from enum import Enum
from app.schemas.tcgplayer import TCGPlayerAPIOrderSummary, TCGPlayerAPIOrder from app.schemas.tcgplayer import TCGPlayerAPIOrderSummary, TCGPlayerAPIOrder
from app.schemas.generate import GenerateRequest, GenerateAddressLabelsRequest, GeneratePackingSlipsRequest, GeneratePullSheetsRequest, GenerateResponse from app.schemas.generate import GenerateAddressLabelsRequest, GeneratePackingSlipsRequest, GeneratePullSheetsRequest, GenerateResponse, GenerateReturnLabelsRequest
from app.schemas.file import FileUpload
from app.services.service_manager import ServiceManager from app.services.service_manager import ServiceManager
from app.services.file_service import FileService
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from app.db.database import get_db from app.db.database import get_db
import os
import tempfile
import logging
logger = logging.getLogger(__name__)
class SearchRange(str, Enum): class SearchRange(str, Enum):
@ -153,3 +160,66 @@ async def generate_address_labels(
return {"success": False, "message": "Address labels not found"} return {"success": False, "message": "Address labels not found"}
except Exception as e: except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to generate address labels: {str(e)}") raise HTTPException(status_code=500, detail=f"Failed to generate address labels: {str(e)}")
@router.post("/generate-return-labels")
async def generate_return_labels(
request: GenerateReturnLabelsRequest,
db: Session = Depends(get_db)
) -> GenerateResponse:
"""
Generate and print return labels for the specified number of labels.
Args:
request: Dictionary containing:
- number_of_labels: Number of return labels to generate
"""
try:
label_printer = service_manager.get_service('label_printer')
success = await label_printer.print_file("app/data/assets/images/ccrcardsaddress.png", label_size="dk1201", label_type="return_label", copies=request.number_of_labels)
return {"success": success, "message": "Return labels generated and printed successfully"}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to generate return labels: {str(e)}")
@router.post("/print-pirate-ship-label")
async def print_pirate_ship_label(
file: UploadFile = File(...),
db: Session = Depends(get_db)
) -> GenerateResponse:
"""
Print a PDF file uploaded via the API.
Args:
file: The PDF file to print
Returns:
Success status of the operation
"""
try:
# Read the file content
content = await file.read()
# Store the file using FileService
file_service = service_manager.get_service('file')
stored_file = await file_service.save_file(
db=db,
file_data=content,
filename=file.filename,
subdir="pirate_ship_labels",
file_type="pdf",
content_type=file.content_type,
metadata={"filename": file.filename}
)
try:
# Use the label printer service to print the file
label_printer = service_manager.get_service('label_printer')
success = await label_printer.print_file(stored_file, label_size="dk1241", label_type="pirate_ship_label")
return {"success": success, "message": "Pirate Ship label printed successfully"}
except Exception as e:
# If printing fails, we'll keep the file in storage for potential retry
logger.error(f"Failed to print file: {str(e)}")
raise e
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to print Pirate Ship label: {str(e)}")

View File

@ -1,6 +1,7 @@
from typing import List, Optional from typing import List, Optional
from datetime import datetime from datetime import datetime
from pydantic import BaseModel from pydantic import BaseModel
from fastapi import UploadFile
# Base schema with common attributes # Base schema with common attributes
class FileBase(BaseModel): class FileBase(BaseModel):
@ -38,3 +39,7 @@ class FileList(BaseModel):
total: int total: int
page: int page: int
limit: int limit: int
# Schema for file upload
class FileUpload(BaseModel):
file: UploadFile

View File

@ -27,3 +27,6 @@ class GeneratePullSheetsRequest(GenerateRequest):
class GenerateResponse(BaseModel): class GenerateResponse(BaseModel):
message: str message: str
success: bool success: bool
class GenerateReturnLabelsRequest(BaseModel):
number_of_labels: int

View File

@ -3,11 +3,33 @@ from app.services.service_manager import ServiceManager
from app.services.file_processing_service import FileProcessingService from app.services.file_processing_service import FileProcessingService
from app.services.inventory_service import InventoryService from app.services.inventory_service import InventoryService
from app.services.file_service import FileService from app.services.file_service import FileService
from app.services.data_initialization import DataInitializationService
from app.services.external_api.tcgcsv.tcgcsv_service import TCGCSVService
from app.services.external_api.mtgjson.mtgjson_service import MTGJSONService
from app.services.label_printer_service import LabelPrinterService
from app.services.regular_printer_service import RegularPrinterService
from app.services.address_label_service import AddressLabelService
from app.services.pull_sheet_service import PullSheetService
from app.services.set_label_service import SetLabelService
from app.services.scheduler.scheduler_service import SchedulerService
from app.services.external_api.tcgplayer.order_management_service import OrderManagementService
from app.services.external_api.tcgplayer.tcgplayer_inventory_service import TCGPlayerInventoryService
__all__ = [ __all__ = [
'BaseService', 'BaseService',
'ServiceManager', 'ServiceManager',
'FileProcessingService', 'FileProcessingService',
'InventoryService', 'InventoryService',
'FileService' 'FileService',
'DataInitializationService',
'TCGCSVService',
'MTGJSONService',
'LabelPrinterService',
'RegularPrinterService',
'AddressLabelService',
'PullSheetService',
'SetLabelService',
'SchedulerService',
'OrderManagementService',
'TCGPlayerInventoryService'
] ]

View File

@ -1,121 +1,97 @@
import os import os
import json import json
from datetime import datetime, timedelta from datetime import datetime, timedelta
from typing import Optional, List, Dict, Any from typing import Optional, List, Dict, Any, Union, Generator, Callable
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from app.services.external_api.tcgcsv.tcgcsv_service import TCGCSVService
from app.services.external_api.mtgjson.mtgjson_service import MTGJSONService
from app.models.tcgplayer_group import TCGPlayerGroup from app.models.tcgplayer_group import TCGPlayerGroup
from app.models.tcgplayer_product import TCGPlayerProduct from app.models.tcgplayer_product import TCGPlayerProduct
from app.models.tcgplayer_category import TCGPlayerCategory from app.models.tcgplayer_category import TCGPlayerCategory
from app.services.base_service import BaseService
from app.schemas.file import FileInDB
from app.db.database import transaction
import logging
from app.models.tcgplayer_price_history import TCGPlayerPriceHistory
from sqlalchemy import and_, bindparam, update, insert
import py7zr
import shutil
class DataInitializationService: logger = logging.getLogger(__name__)
def __init__(self, cache_dir: str = "app/data/cache/tcgcsv"):
self.cache_dir = cache_dir
self.categories_dir = os.path.join(cache_dir, "categories")
self.groups_dir = os.path.join(cache_dir, "groups")
self.products_dir = os.path.join(cache_dir, "products")
self.tcgcsv_service = TCGCSVService()
self.mtgjson_service = MTGJSONService()
# Create all necessary directories
os.makedirs(cache_dir, exist_ok=True)
os.makedirs(self.categories_dir, exist_ok=True)
os.makedirs(self.groups_dir, exist_ok=True)
os.makedirs(self.products_dir, exist_ok=True)
def _get_cache_path(self, filename: str, subdir: str) -> str: class DataInitializationService(BaseService):
"""Get the full path for a cached file in the specified subdirectory""" def __init__(self):
return os.path.join(self.cache_dir, subdir, filename) super().__init__(None)
async def _cache_categories(self, categories_data: dict): async def _cache_data(
"""Cache categories data to a JSON file"""
cache_path = self._get_cache_path("categories.json", "categories")
with open(cache_path, 'w') as f:
json.dump(categories_data, f, indent=2)
async def _cache_groups(self, game_ids: List[int], groups_data: dict):
for game_id in game_ids:
cache_path = self._get_cache_path(f"groups_{game_id}.json", "groups")
with open(cache_path, 'w') as f:
json.dump(groups_data, f, default=str)
async def _cache_products(self, game_ids: List[int], group_id: int, products_data: list):
for game_id in game_ids:
cache_path = self._get_cache_path(f"products_{game_id}_{group_id}.json", "products")
with open(cache_path, 'w') as f:
json.dump(products_data, f, default=str)
async def _load_cached_categories(self) -> Optional[dict]:
cache_path = self._get_cache_path("categories.json", "categories")
if os.path.exists(cache_path):
with open(cache_path, 'r') as f:
return json.load(f)
return None
async def _load_cached_groups(self, game_ids: List[int]) -> Optional[dict]:
# Try to load cached data for any of the game IDs
for game_id in game_ids:
cache_path = self._get_cache_path(f"groups_{game_id}.json", "groups")
if os.path.exists(cache_path):
with open(cache_path, 'r') as f:
return json.load(f)
return None
async def _load_cached_products(self, game_ids: List[int], group_id: int) -> Optional[list]:
# Try to load cached data for any of the game IDs
for game_id in game_ids:
cache_path = self._get_cache_path(f"products_{game_id}_{group_id}.json", "products")
if os.path.exists(cache_path):
with open(cache_path, 'r') as f:
return json.load(f)
return None
async def initialize_data(
self, self,
db: Session, db: Session,
game_ids: List[int], data: Union[dict, list],
use_cache: bool = True, filename: str,
init_categories: bool = True, subdir: str,
init_groups: bool = True, default_str: bool = False,
init_products: bool = True, file_type: str = "json",
init_archived_prices: bool = False, content_type: str = "application/json",
archived_prices_start_date: Optional[str] = None, metadata: Optional[Dict] = None
archived_prices_end_date: Optional[str] = None, ) -> FileInDB:
init_mtgjson: bool = True """Generic function to cache data to a JSON file"""
) -> Dict[str, Any]: file_data = json.dumps(data, default=str if default_str else None, indent=2)
"""Initialize TCGPlayer data with configurable steps""" return await self.file_service.save_file(
print("Initializing TCGPlayer data...") db,
results = { file_data,
"categories": 0, filename,
"groups": {}, subdir,
"products": {}, file_type=file_type,
"archived_prices": False, content_type=content_type,
"mtgjson": {} metadata=metadata
} )
if init_categories: async def _load_cached_data(
print("\nInitializing categories...") self,
categories_data = None db: Session,
if use_cache: filename: str
categories_data = await self._load_cached_categories() ) -> Optional[Dict[str, Any]]:
"""Generic function to load cached data from a JSON file with 7-day expiration"""
file_record = await self.file_service.get_file_by_filename(db, filename)
if file_record:
# Check if cache is expired (7 days)
cache_age = datetime.now() - file_record.created_at
if cache_age.days < 7:
with open(file_record.path, 'r') as f:
return json.load(f)
else:
logger.info(f"Cache expired for {filename}, age: {cache_age.days} days")
# Delete the expired cache file
await self.file_service.delete_file(db, file_record.id)
return None
if not categories_data: async def sync_categories(self, db: Session, categories_data: dict):
print("Fetching categories from API...") """Sync categories data to the database using streaming for large datasets"""
categories_data = await self.tcgcsv_service.get_categories()
if use_cache:
await self._cache_categories(categories_data)
if not categories_data.get("success"):
raise Exception(f"Failed to fetch categories: {categories_data.get('errors')}")
# Sync categories to database
categories = categories_data.get("results", []) categories = categories_data.get("results", [])
synced_categories = [] batch_size = 1000 # Process in batches of 1000
for category_data in categories: total_categories = len(categories)
with transaction(db):
for i in range(0, total_categories, batch_size):
batch = categories[i:i + batch_size]
for category_data in batch:
existing_category = db.query(TCGPlayerCategory).filter(TCGPlayerCategory.category_id == category_data["categoryId"]).first() existing_category = db.query(TCGPlayerCategory).filter(TCGPlayerCategory.category_id == category_data["categoryId"]).first()
if existing_category: if existing_category:
synced_categories.append(existing_category) # Update existing category
for key, value in {
"name": category_data["name"],
"display_name": category_data.get("displayName"),
"seo_category_name": category_data.get("seoCategoryName"),
"category_description": category_data.get("categoryDescription"),
"category_page_title": category_data.get("categoryPageTitle"),
"sealed_label": category_data.get("sealedLabel"),
"non_sealed_label": category_data.get("nonSealedLabel"),
"condition_guide_url": category_data.get("conditionGuideUrl"),
"is_scannable": category_data.get("isScannable", False),
"popularity": category_data.get("popularity", 0),
"is_direct": category_data.get("isDirect", False),
"modified_on": datetime.fromisoformat(category_data["modifiedOn"].replace("Z", "+00:00")) if category_data.get("modifiedOn") else None
}.items():
setattr(existing_category, key, value)
else: else:
new_category = TCGPlayerCategory( new_category = TCGPlayerCategory(
category_id=category_data["categoryId"], category_id=category_data["categoryId"],
@ -133,39 +109,63 @@ class DataInitializationService:
modified_on=datetime.fromisoformat(category_data["modifiedOn"].replace("Z", "+00:00")) if category_data.get("modifiedOn") else None modified_on=datetime.fromisoformat(category_data["modifiedOn"].replace("Z", "+00:00")) if category_data.get("modifiedOn") else None
) )
db.add(new_category) db.add(new_category)
synced_categories.append(new_category)
# Commit after each batch
db.commit() db.commit()
results["categories"] = len(synced_categories) logger.info(f"Processed {min(i + batch_size, total_categories)}/{total_categories} categories")
print(f"Synced {len(synced_categories)} categories")
# Process each game ID separately async def init_categories(self, db: Session, use_cache: bool = True) -> bool:
for game_id in game_ids: """Initialize categories data"""
print(f"\nProcessing game ID: {game_id}") logger.info("Starting categories initialization")
results["groups"][game_id] = 0
results["products"][game_id] = {}
if init_groups:
print(f"Initializing groups for game ID {game_id}...")
groups_data = None
if use_cache: if use_cache:
groups_data = await self._load_cached_groups([game_id]) categories_data = await self._load_cached_data(db, "categories.json")
if categories_data:
await self.sync_categories(db, categories_data)
logger.info("Categories initialized from cache")
return True
else:
logger.warning("No cached categories data found")
return False
else:
tcgcsv_service = self.get_service('tcgcsv')
categories_data = await tcgcsv_service.get_categories()
if not groups_data: # Save the categories data
print(f"Fetching groups for game ID {game_id} from API...") await self._cache_data(
groups_data = await self.tcgcsv_service.get_groups([game_id]) db,
if use_cache: categories_data,
await self._cache_groups([game_id], groups_data) "categories.json",
"tcgcsv/categories",
file_type="json",
content_type="application/json"
)
if not groups_data.get("success"): await self.sync_categories(db, categories_data)
raise Exception(f"Failed to fetch groups for game ID {game_id}: {groups_data.get('errors')}") logger.info("Categories initialized from API")
return True
# Sync groups to database async def sync_groups(self, db: Session, groups_data: dict):
"""Sync groups data to the database using streaming for large datasets"""
groups = groups_data.get("results", []) groups = groups_data.get("results", [])
synced_groups = [] batch_size = 1000 # Process in batches of 1000
for group_data in groups: total_groups = len(groups)
with transaction(db):
for i in range(0, total_groups, batch_size):
batch = groups[i:i + batch_size]
for group_data in batch:
existing_group = db.query(TCGPlayerGroup).filter(TCGPlayerGroup.group_id == group_data["groupId"]).first() existing_group = db.query(TCGPlayerGroup).filter(TCGPlayerGroup.group_id == group_data["groupId"]).first()
if existing_group: if existing_group:
synced_groups.append(existing_group) # Update existing group
for key, value in {
"name": group_data["name"],
"abbreviation": group_data.get("abbreviation"),
"is_supplemental": group_data.get("isSupplemental", False),
"published_on": datetime.fromisoformat(group_data["publishedOn"].replace("Z", "+00:00")) if group_data.get("publishedOn") else None,
"modified_on": datetime.fromisoformat(group_data["modifiedOn"].replace("Z", "+00:00")) if group_data.get("modifiedOn") else None,
"category_id": group_data.get("categoryId")
}.items():
setattr(existing_group, key, value)
else: else:
new_group = TCGPlayerGroup( new_group = TCGPlayerGroup(
group_id=group_data["groupId"], group_id=group_data["groupId"],
@ -177,88 +177,561 @@ class DataInitializationService:
category_id=group_data.get("categoryId") category_id=group_data.get("categoryId")
) )
db.add(new_group) db.add(new_group)
synced_groups.append(new_group)
# Commit after each batch
db.commit() db.commit()
results["groups"][game_id] = len(synced_groups) logger.info(f"Processed {min(i + batch_size, total_groups)}/{total_groups} groups")
print(f"Synced {len(synced_groups)} groups for game ID {game_id}")
if init_products: async def init_groups(self, db: Session, use_cache: bool = True, game_ids: List[int] = None) -> bool:
# Handle products for each group in this game ID """Initialize groups data"""
for group in synced_groups: logger.info(f"Starting groups initialization for game IDs: {game_ids}")
print(f"Initializing products for group {group.name} (game ID {game_id})...") tcgcsv_service = self.get_service('tcgcsv')
products_data = None for game_id in game_ids:
if use_cache: if use_cache:
products_data = await self._load_cached_products([game_id], group.group_id) groups_data = await self._load_cached_data(db, f"groups_{game_id}.json")
if groups_data:
await self.sync_groups(db, groups_data)
logger.info(f"Groups initialized from cache for game ID {game_id}")
else:
logger.warning(f"No cached groups data found for game ID {game_id}")
return False
else:
groups_data = await tcgcsv_service.get_groups(game_id)
if not products_data: # Save the groups data
print(f"Fetching products for group {group.name} (game ID {game_id}) from API...") await self._cache_data(
products_data = await self.tcgcsv_service.get_products_and_prices([game_id], group.group_id) db,
if use_cache: groups_data,
await self._cache_products([game_id], group.group_id, products_data) f"groups_{game_id}.json",
"tcgcsv/groups",
file_type="json",
content_type="application/json"
)
# Sync products to database await self.sync_groups(db, groups_data)
synced_products = [] logger.info(f"Groups initialized from API for game ID {game_id}")
for product_data in products_data: return True
existing_product = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.product_id == int(product_data["productId"])).first()
async def sync_products(self, db: Session, products_data: str):
"""Sync products data to the database using streaming for large datasets"""
import csv
import io
# Parse CSV data
csv_reader = csv.DictReader(io.StringIO(products_data))
products_list = list(csv_reader)
batch_size = 1000 # Process in batches of 1000
total_products = len(products_list)
with transaction(db):
for i in range(0, total_products, batch_size):
batch = products_list[i:i + batch_size]
for product_data in batch:
existing_product = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.product_id == product_data["productId"]).first()
if existing_product: if existing_product:
synced_products.append(existing_product) # Update existing product
for key, value in {
"name": product_data["name"],
"clean_name": product_data.get("cleanName"),
"image_url": product_data.get("imageUrl"),
"category_id": product_data.get("categoryId"),
"group_id": product_data.get("groupId"),
"url": product_data.get("url"),
"modified_on": datetime.fromisoformat(product_data["modifiedOn"].replace("Z", "+00:00")) if product_data.get("modifiedOn") else None,
"image_count": product_data.get("imageCount", 0),
"ext_rarity": product_data.get("extRarity"),
"ext_number": product_data.get("extNumber"),
"low_price": float(product_data.get("lowPrice")) if product_data.get("lowPrice") else None,
"mid_price": float(product_data.get("midPrice")) if product_data.get("midPrice") else None,
"high_price": float(product_data.get("highPrice")) if product_data.get("highPrice") else None,
"market_price": float(product_data.get("marketPrice")) if product_data.get("marketPrice") else None,
"direct_low_price": float(product_data.get("directLowPrice")) if product_data.get("directLowPrice") else None,
"sub_type_name": product_data.get("subTypeName")
}.items():
setattr(existing_product, key, value)
else: else:
new_product = TCGPlayerProduct( new_product = TCGPlayerProduct(
product_id=int(product_data["productId"]), product_id=product_data["productId"],
name=product_data["name"], name=product_data["name"],
clean_name=product_data.get("cleanName"), clean_name=product_data.get("cleanName"),
image_url=product_data.get("imageUrl"), image_url=product_data.get("imageUrl"),
category_id=int(product_data["categoryId"]), category_id=product_data.get("categoryId"),
group_id=int(product_data["groupId"]), group_id=product_data.get("groupId"),
url=product_data.get("url"), url=product_data.get("url"),
modified_on=datetime.fromisoformat(product_data["modifiedOn"].replace("Z", "+00:00")) if product_data.get("modifiedOn") else None, modified_on=datetime.fromisoformat(product_data["modifiedOn"].replace("Z", "+00:00")) if product_data.get("modifiedOn") else None,
image_count=int(product_data.get("imageCount", 0)), image_count=product_data.get("imageCount", 0),
ext_rarity=product_data.get("extRarity"), ext_rarity=product_data.get("extRarity"),
ext_subtype=product_data.get("extSubtype"),
ext_oracle_text=product_data.get("extOracleText"),
ext_number=product_data.get("extNumber"), ext_number=product_data.get("extNumber"),
low_price=float(product_data.get("lowPrice")) if product_data.get("lowPrice") else None, low_price=float(product_data.get("lowPrice")) if product_data.get("lowPrice") else None,
mid_price=float(product_data.get("midPrice")) if product_data.get("midPrice") else None, mid_price=float(product_data.get("midPrice")) if product_data.get("midPrice") else None,
high_price=float(product_data.get("highPrice")) if product_data.get("highPrice") else None, high_price=float(product_data.get("highPrice")) if product_data.get("highPrice") else None,
market_price=float(product_data.get("marketPrice")) if product_data.get("marketPrice") else None, market_price=float(product_data.get("marketPrice")) if product_data.get("marketPrice") else None,
direct_low_price=float(product_data.get("directLowPrice")) if product_data.get("directLowPrice") else None, direct_low_price=float(product_data.get("directLowPrice")) if product_data.get("directLowPrice") else None,
sub_type_name=product_data.get("subTypeName") sub_type_name=product_data.get("subTypeName"),
ext_power=product_data.get("extPower"),
ext_toughness=product_data.get("extToughness"),
ext_flavor_text=product_data.get("extFlavorText")
) )
db.add(new_product) db.add(new_product)
synced_products.append(new_product)
# Commit after each batch
db.commit() db.commit()
results["products"][game_id][group.group_id] = len(synced_products) logger.info(f"Processed {min(i + batch_size, total_products)}/{total_products} products")
print(f"Synced {len(synced_products)} products for group {group.name} (game ID {game_id})")
if init_archived_prices: async def init_products(self, db: Session, use_cache: bool = True, game_ids: List[int] = None) -> bool:
if not archived_prices_start_date or not archived_prices_end_date: """Initialize products data"""
raise ValueError("Both start_date and end_date are required for archived prices initialization") logger.info(f"Starting products initialization for game IDs: {game_ids}")
tcgcsv_service = self.get_service('tcgcsv')
for game_id in game_ids:
groups = db.query(TCGPlayerGroup).filter(TCGPlayerGroup.category_id == game_id).all()
logger.info(f"Processing {len(groups)} groups for game ID {game_id}")
for group in groups:
if use_cache:
products_data = await self._load_cached_data(db, f"products_{game_id}_{group.group_id}.json")
if products_data:
await self.sync_products(db, products_data)
logger.info(f"Products initialized from cache for group {group.group_id}")
else:
logger.warning(f"No cached products data found for group {group.group_id}")
continue
else:
# Get CSV data from API
csv_data = await tcgcsv_service.get_products_and_prices(game_id, group.group_id)
print(f"\nInitializing archived prices from {archived_prices_start_date} to {archived_prices_end_date}...") # Save the CSV file
await self.tcgcsv_service.get_archived_prices_for_date_range(archived_prices_start_date, archived_prices_end_date) await self.file_service.save_file(
results["archived_prices"] = True db,
print("Archived prices initialization completed") csv_data,
f"products_{game_id}_{group.group_id}.csv",
"tcgcsv/products",
file_type="csv",
content_type="text/csv"
)
if init_mtgjson: # Parse and sync the CSV data
print("\nInitializing MTGJSON data...") await self.sync_products(db, csv_data)
identifiers_result = await self.mtgjson_service.download_and_process_identifiers(db) logger.info(f"Products initialized from API for group {group.group_id}")
skus_result = await self.mtgjson_service.download_and_process_skus(db) return True
results["mtgjson"] = {
"cards_processed": identifiers_result["cards_processed"], async def sync_archived_prices(self, db: Session, archived_prices_data: dict, date: datetime):
"skus_processed": skus_result["skus_processed"] """Sync archived prices data to the database using bulk operations.
Note: Historical prices are never updated, only new records are inserted."""
from sqlalchemy import insert
from app.models.tcgplayer_price_history import TCGPlayerPriceHistory
# Prepare data for bulk operations
price_records = []
for price_data in archived_prices_data.get("results", []):
record = {
"product_id": price_data["productId"],
"date": date,
"sub_type_name": price_data["subTypeName"],
"low_price": price_data.get("lowPrice"),
"mid_price": price_data.get("midPrice"),
"high_price": price_data.get("highPrice"),
"market_price": price_data.get("marketPrice"),
"direct_low_price": price_data.get("directLowPrice")
}
price_records.append(record)
if not price_records:
return
# Get existing records in bulk to avoid duplicates
product_ids = [r["product_id"] for r in price_records]
sub_type_names = [r["sub_type_name"] for r in price_records]
existing_records = db.query(TCGPlayerPriceHistory).filter(
TCGPlayerPriceHistory.product_id.in_(product_ids),
TCGPlayerPriceHistory.date == date,
TCGPlayerPriceHistory.sub_type_name.in_(sub_type_names)
).all()
# Filter out existing records
existing_keys = {(r.product_id, r.date, r.sub_type_name) for r in existing_records}
to_insert = [
record for record in price_records
if (record["product_id"], record["date"], record["sub_type_name"]) not in existing_keys
]
# Perform bulk insert for new records only
if to_insert:
stmt = insert(TCGPlayerPriceHistory)
db.execute(stmt, to_insert)
db.commit()
async def init_archived_prices(self, db: Session, start_date: datetime, end_date: datetime, use_cache: bool = True, game_ids: List[int] = None) -> bool:
"""Initialize archived prices data"""
logger.info(f"Starting archived prices initialization from {start_date} to {end_date}")
tcgcsv_service = self.get_service('tcgcsv')
processed_dates = await tcgcsv_service.get_tcgcsv_date_range(start_date, end_date)
logger.info(f"Processing {len(processed_dates)} dates")
# Convert game_ids to set for faster lookups
desired_game_ids = set(game_ids) if game_ids else set()
for date in processed_dates:
date_path = f"app/data/cache/tcgcsv/prices/{date}"
# Check if we already have the data for this date
if use_cache and os.path.exists(date_path):
logger.info(f"Using cached price data for {date}")
else:
logger.info(f"Downloading and processing archived prices for {date}")
# Download and extract the archive
archive_data = await tcgcsv_service.get_archived_prices_for_date(date)
# Save the archive file
file_record = await self.file_service.save_file(
db,
archive_data,
f"prices-{date}.ppmd.7z",
"tcgcsv/prices/zip",
file_type="application/x-7z-compressed",
content_type="application/x-7z-compressed"
)
# Extract the 7z file to a temporary directory
temp_extract_path = f"app/data/cache/tcgcsv/prices/temp_{date}"
os.makedirs(temp_extract_path, exist_ok=True)
with py7zr.SevenZipFile(file_record.path, 'r') as archive:
archive.extractall(path=temp_extract_path)
# Find the date subdirectory in the temp directory
date_subdir = os.path.join(temp_extract_path, str(date))
if os.path.exists(date_subdir):
# Remove existing directory if it exists
if os.path.exists(date_path):
shutil.rmtree(date_path)
# Create the destination directory
os.makedirs(date_path, exist_ok=True)
# Move contents from the date subdirectory to the final path
for item in os.listdir(date_subdir):
src = os.path.join(date_subdir, item)
dst = os.path.join(date_path, item)
os.rename(src, dst)
# Clean up the temporary directory
os.rmdir(date_subdir)
os.rmdir(temp_extract_path)
# Process each category directory
for category_id in os.listdir(date_path):
# Skip categories that aren't in our desired game IDs
if int(category_id) not in desired_game_ids:
continue
category_path = os.path.join(date_path, category_id)
if not os.path.isdir(category_path):
continue
# Process each group directory
for group_id in os.listdir(category_path):
group_path = os.path.join(category_path, group_id)
if not os.path.isdir(group_path):
continue
# Process the prices file
prices_file = os.path.join(group_path, "prices")
if not os.path.exists(prices_file):
continue
try:
with open(prices_file, 'r') as f:
price_data = json.load(f)
if price_data.get("success"):
await self.sync_archived_prices(db, price_data, datetime.strptime(date, "%Y-%m-%d"))
logger.info(f"Processed prices for category {category_id}, group {group_id} on {date}")
except Exception as e:
logger.error(f"Error processing prices file {prices_file}: {str(e)}")
continue
return True
async def init_mtgjson(self, db: Session, use_cache: bool = True) -> Dict[str, Any]:
"""Initialize MTGJSON data"""
logger.info("Starting MTGJSON initialization")
mtgjson_service = self.get_service('mtgjson')
identifiers_count = 0
skus_count = 0
# Process identifiers
if use_cache:
cached_file = await self.file_service.get_file_by_filename(db, "mtgjson_identifiers.json")
if cached_file and os.path.exists(cached_file.path):
logger.info("MTGJSON identifiers initialized from cache")
identifiers_count = await self._process_streamed_data(
db,
self._stream_json_file(cached_file.path),
"mtgjson_identifiers.json",
"mtgjson",
self.sync_mtgjson_identifiers
)
else:
logger.info("Downloading MTGJSON identifiers from API")
identifiers_count = await self._process_streamed_data(
db,
await mtgjson_service.get_identifiers(db),
"mtgjson_identifiers.json",
"mtgjson",
self.sync_mtgjson_identifiers
)
else:
logger.info("Downloading MTGJSON identifiers from API")
identifiers_count = await self._process_streamed_data(
db,
await mtgjson_service.get_identifiers(db),
"mtgjson_identifiers.json",
"mtgjson",
self.sync_mtgjson_identifiers
)
# Process SKUs
if use_cache:
cached_file = await self.file_service.get_file_by_filename(db, "mtgjson_skus.json")
if cached_file and os.path.exists(cached_file.path):
logger.info("MTGJSON SKUs initialized from cache")
skus_count = await self._process_streamed_data(
db,
self._stream_json_file(cached_file.path),
"mtgjson_skus.json",
"mtgjson",
self.sync_mtgjson_skus
)
else:
logger.info("Downloading MTGJSON SKUs from API")
skus_count = await self._process_streamed_data(
db,
await mtgjson_service.get_skus(db),
"mtgjson_skus.json",
"mtgjson",
self.sync_mtgjson_skus
)
else:
logger.info("Downloading MTGJSON SKUs from API")
skus_count = await self._process_streamed_data(
db,
await mtgjson_service.get_skus(db),
"mtgjson_skus.json",
"mtgjson",
self.sync_mtgjson_skus
)
return {
"identifiers_processed": identifiers_count,
"skus_processed": skus_count
} }
async def _process_streamed_data(
self,
db: Session,
data_stream: Generator[Dict[str, Any], None, None],
filename: str,
subdir: str,
sync_func: Callable
) -> int:
"""Process streamed data and sync to database"""
count = 0
items = []
batch_size = 1000
for item in data_stream:
if item["type"] == "meta":
# Handle meta data separately
continue
count += 1
items.append(item["data"])
# Process in batches
if len(items) >= batch_size:
await sync_func(db, items)
items = []
# Process any remaining items
if items:
await sync_func(db, items)
return count
async def sync_mtgjson_identifiers(self, db: Session, identifiers_data: dict):
"""Sync MTGJSON identifiers data to the database"""
from app.models.mtgjson_card import MTGJSONCard
with transaction(db):
for card_id, card_data in identifiers_data.items():
existing_card = db.query(MTGJSONCard).filter(MTGJSONCard.card_id == card_id).first()
if existing_card:
# Update existing card
for key, value in {
"name": card_data.get("name"),
"set_code": card_data.get("setCode"),
"uuid": card_data.get("uuid"),
"abu_id": card_data.get("identifiers", {}).get("abuId"),
"card_kingdom_etched_id": card_data.get("identifiers", {}).get("cardKingdomEtchedId"),
"card_kingdom_foil_id": card_data.get("identifiers", {}).get("cardKingdomFoilId"),
"card_kingdom_id": card_data.get("identifiers", {}).get("cardKingdomId"),
"cardsphere_id": card_data.get("identifiers", {}).get("cardsphereId"),
"cardsphere_foil_id": card_data.get("identifiers", {}).get("cardsphereFoilId"),
"cardtrader_id": card_data.get("identifiers", {}).get("cardtraderId"),
"csi_id": card_data.get("identifiers", {}).get("csiId"),
"mcm_id": card_data.get("identifiers", {}).get("mcmId"),
"mcm_meta_id": card_data.get("identifiers", {}).get("mcmMetaId"),
"miniaturemarket_id": card_data.get("identifiers", {}).get("miniaturemarketId"),
"mtg_arena_id": card_data.get("identifiers", {}).get("mtgArenaId"),
"mtgjson_foil_version_id": card_data.get("identifiers", {}).get("mtgjsonFoilVersionId"),
"mtgjson_non_foil_version_id": card_data.get("identifiers", {}).get("mtgjsonNonFoilVersionId"),
"mtgjson_v4_id": card_data.get("identifiers", {}).get("mtgjsonV4Id"),
"mtgo_foil_id": card_data.get("identifiers", {}).get("mtgoFoilId"),
"mtgo_id": card_data.get("identifiers", {}).get("mtgoId"),
"multiverse_id": card_data.get("identifiers", {}).get("multiverseId"),
"scg_id": card_data.get("identifiers", {}).get("scgId"),
"scryfall_id": card_data.get("identifiers", {}).get("scryfallId"),
"scryfall_card_back_id": card_data.get("identifiers", {}).get("scryfallCardBackId"),
"scryfall_oracle_id": card_data.get("identifiers", {}).get("scryfallOracleId"),
"scryfall_illustration_id": card_data.get("identifiers", {}).get("scryfallIllustrationId"),
"tcgplayer_product_id": card_data.get("identifiers", {}).get("tcgplayerProductId"),
"tcgplayer_etched_product_id": card_data.get("identifiers", {}).get("tcgplayerEtchedProductId"),
"tnt_id": card_data.get("identifiers", {}).get("tntId")
}.items():
setattr(existing_card, key, value)
else:
new_card = MTGJSONCard(
card_id=card_id,
name=card_data.get("name"),
set_code=card_data.get("setCode"),
uuid=card_data.get("uuid"),
abu_id=card_data.get("identifiers", {}).get("abuId"),
card_kingdom_etched_id=card_data.get("identifiers", {}).get("cardKingdomEtchedId"),
card_kingdom_foil_id=card_data.get("identifiers", {}).get("cardKingdomFoilId"),
card_kingdom_id=card_data.get("identifiers", {}).get("cardKingdomId"),
cardsphere_id=card_data.get("identifiers", {}).get("cardsphereId"),
cardsphere_foil_id=card_data.get("identifiers", {}).get("cardsphereFoilId"),
cardtrader_id=card_data.get("identifiers", {}).get("cardtraderId"),
csi_id=card_data.get("identifiers", {}).get("csiId"),
mcm_id=card_data.get("identifiers", {}).get("mcmId"),
mcm_meta_id=card_data.get("identifiers", {}).get("mcmMetaId"),
miniaturemarket_id=card_data.get("identifiers", {}).get("miniaturemarketId"),
mtg_arena_id=card_data.get("identifiers", {}).get("mtgArenaId"),
mtgjson_foil_version_id=card_data.get("identifiers", {}).get("mtgjsonFoilVersionId"),
mtgjson_non_foil_version_id=card_data.get("identifiers", {}).get("mtgjsonNonFoilVersionId"),
mtgjson_v4_id=card_data.get("identifiers", {}).get("mtgjsonV4Id"),
mtgo_foil_id=card_data.get("identifiers", {}).get("mtgoFoilId"),
mtgo_id=card_data.get("identifiers", {}).get("mtgoId"),
multiverse_id=card_data.get("identifiers", {}).get("multiverseId"),
scg_id=card_data.get("identifiers", {}).get("scgId"),
scryfall_id=card_data.get("identifiers", {}).get("scryfallId"),
scryfall_card_back_id=card_data.get("identifiers", {}).get("scryfallCardBackId"),
scryfall_oracle_id=card_data.get("identifiers", {}).get("scryfallOracleId"),
scryfall_illustration_id=card_data.get("identifiers", {}).get("scryfallIllustrationId"),
tcgplayer_product_id=card_data.get("identifiers", {}).get("tcgplayerProductId"),
tcgplayer_etched_product_id=card_data.get("identifiers", {}).get("tcgplayerEtchedProductId"),
tnt_id=card_data.get("identifiers", {}).get("tntId")
)
db.add(new_card)
async def sync_mtgjson_skus(self, db: Session, skus_data: dict):
"""Sync MTGJSON SKUs data to the database"""
from app.models.mtgjson_sku import MTGJSONSKU
with transaction(db):
for card_uuid, sku_list in skus_data.items():
for sku in sku_list:
# Handle case where sku is a string (skuId)
if isinstance(sku, str):
sku_id = sku
existing_sku = db.query(MTGJSONSKU).filter(MTGJSONSKU.sku_id == sku_id).first()
if existing_sku:
# Update existing SKU
existing_sku.card_id = card_uuid
else:
new_sku = MTGJSONSKU(
sku_id=sku_id,
card_id=card_uuid
)
db.add(new_sku)
# Handle case where sku is a dictionary
else:
sku_id = str(sku.get("skuId"))
existing_sku = db.query(MTGJSONSKU).filter(MTGJSONSKU.sku_id == sku_id).first()
if existing_sku:
# Update existing SKU
for key, value in {
"product_id": str(sku.get("productId")),
"condition": sku.get("condition"),
"finish": sku.get("finish"),
"language": sku.get("language"),
"printing": sku.get("printing"),
"card_id": card_uuid
}.items():
setattr(existing_sku, key, value)
else:
new_sku = MTGJSONSKU(
sku_id=sku_id,
product_id=str(sku.get("productId")),
condition=sku.get("condition"),
finish=sku.get("finish"),
language=sku.get("language"),
printing=sku.get("printing"),
card_id=card_uuid
)
db.add(new_sku)
async def initialize_data(
self,
db: Session,
game_ids: List[int],
use_cache: bool = False,
init_categories: bool = True,
init_groups: bool = True,
init_products: bool = True,
init_archived_prices: bool = True,
archived_prices_start_date: Optional[str] = None,
archived_prices_end_date: Optional[str] = None,
init_mtgjson: bool = True
) -> Dict[str, Any]:
"""Initialize 3rd party API data loads with configurable steps"""
logger.info("Starting data initialization process")
results = {}
if init_categories:
logger.info("Initializing categories...")
results["categories"] = await self.init_categories(db, use_cache)
if init_groups:
logger.info("Initializing groups...")
results["groups"] = await self.init_groups(db, use_cache, game_ids)
if init_products:
logger.info("Initializing products...")
results["products"] = await self.init_products(db, use_cache, game_ids)
if init_archived_prices:
logger.info("Initializing archived prices...")
results["archived_prices"] = await self.init_archived_prices(
db,
archived_prices_start_date,
archived_prices_end_date,
use_cache,
game_ids
)
if init_mtgjson:
logger.info("Initializing MTGJSON data...")
results["mtgjson"] = await self.init_mtgjson(db, use_cache)
logger.info("Data initialization completed")
return results return results
async def clear_cache(self) -> None: async def clear_cache(self, db: Session) -> None:
"""Clear all cached data""" """Clear all cached data"""
# Delete all files in categories, groups, and products directories
for subdir in ["categories", "groups", "products"]: for subdir in ["categories", "groups", "products"]:
dir_path = os.path.join(self.cache_dir, subdir) files = await self.file_service.list_files(db, file_type="json")
if os.path.exists(dir_path): for file in files:
for filename in os.listdir(dir_path): if file.path.startswith(subdir):
file_path = os.path.join(dir_path, filename) await self.file_service.delete_file(db, file.id)
if os.path.isfile(file_path):
os.unlink(file_path)
await self.mtgjson_service.clear_cache() await self.mtgjson_service.clear_cache()
print("Cache cleared") print("Cache cleared")
async def close(self):
await self.tcgcsv_service.close()

View File

@ -92,24 +92,3 @@ class BaseExternalService:
def file_service(self): def file_service(self):
"""Convenience property for file service""" """Convenience property for file service"""
return self.get_service('file') return self.get_service('file')
async def save_file(self, db: Session, file_data: Union[bytes, list[dict]], file_name: str, subdir: str, file_type: Optional[str] = None) -> FileInDB:
"""Save a file using the FileService"""
if isinstance(file_data, list):
# Convert list of dictionaries to CSV bytes
output = io.StringIO()
writer = csv.DictWriter(output, fieldnames=file_data[0].keys())
writer.writeheader()
writer.writerows(file_data)
file_data = output.getvalue().encode('utf-8')
file_type = file_type or 'text/csv'
# Use FileService to save the file
file_service = self.get_service('file')
return await file_service.save_file(
db=db,
file_data=file_data,
filename=file_name,
subdir=subdir,
file_type=file_type
)

View File

@ -1,29 +1,24 @@
import os import os
import json import json
import zipfile import zipfile
import aiohttp
import asyncio
import time import time
import sys
from typing import Dict, Any, Optional, Generator from typing import Dict, Any, Optional, Generator
from sqlalchemy.orm import Session from sqlalchemy.orm import Session
from datetime import datetime
from app.models.mtgjson_card import MTGJSONCard
from app.models.mtgjson_sku import MTGJSONSKU
from app.db.database import get_db, transaction
from app.services.external_api.base_external_service import BaseExternalService from app.services.external_api.base_external_service import BaseExternalService
from app.schemas.file import FileInDB from app.schemas.file import FileInDB
import logging
logger = logging.getLogger(__name__)
class MTGJSONService(BaseExternalService): class MTGJSONService(BaseExternalService):
def __init__(self, cache_dir: str = "app/data/cache/mtgjson", batch_size: int = 1000): def __init__(self, cache_dir: str = "app/data/cache/mtgjson"):
super().__init__(base_url="https://mtgjson.com/api/v5/") super().__init__(base_url="https://mtgjson.com/api/v5/")
# Ensure the cache directory exists
os.makedirs(cache_dir, exist_ok=True)
self.cache_dir = cache_dir self.cache_dir = cache_dir
self.identifiers_dir = os.path.join(cache_dir, "identifiers") self.identifiers_dir = os.path.join(cache_dir, "identifiers")
self.skus_dir = os.path.join(cache_dir, "skus") self.skus_dir = os.path.join(cache_dir, "skus")
self.batch_size = batch_size # Ensure subdirectories exist
# Create necessary directories
os.makedirs(cache_dir, exist_ok=True)
os.makedirs(self.identifiers_dir, exist_ok=True) os.makedirs(self.identifiers_dir, exist_ok=True)
os.makedirs(self.skus_dir, exist_ok=True) os.makedirs(self.skus_dir, exist_ok=True)
@ -46,112 +41,133 @@ class MTGJSONService(BaseExternalService):
print(f"Downloading {url}...") print(f"Downloading {url}...")
start_time = time.time() start_time = time.time()
async with aiohttp.ClientSession() as session: # Use the base external service's _make_request method
async with session.get(url) as response: file_data = await self._make_request(
if response.status == 200: method="GET",
file_data = await response.read() endpoint=url.replace(self.base_url, ""),
return await self.save_file( binary=True
)
# Save the file using the file service
return await self.file_service.save_file(
db=db, db=db,
file_data=file_data, file_data=file_data,
file_name=filename, filename=filename,
subdir=f"mtgjson/{subdir}", subdir=f"mtgjson/{subdir}",
file_type=response.headers.get('content-type', 'application/octet-stream') file_type="application/zip",
content_type="application/zip"
) )
else:
raise Exception(f"Failed to download file from {url}. Status: {response.status}")
async def _unzip_file(self, zip_path: str, extract_dir: str) -> str: async def _unzip_file(self, file_record: FileInDB, subdir: str, db: Session) -> str:
"""Unzip a file to the specified directory and return the path to the extracted JSON file""" """Unzip a file to the specified subdirectory and return the path to the extracted JSON file"""
with zipfile.ZipFile(zip_path, 'r') as zip_ref: try:
# Use the appropriate subdirectory based on the type
extract_path = self.identifiers_dir if subdir == "identifiers" else self.skus_dir
os.makedirs(extract_path, exist_ok=True)
with zipfile.ZipFile(file_record.path, 'r') as zip_ref:
json_filename = zip_ref.namelist()[0] json_filename = zip_ref.namelist()[0]
zip_ref.extractall(extract_dir) zip_ref.extractall(extract_path)
return os.path.join(extract_dir, json_filename) json_path = os.path.join(extract_path, json_filename)
# Create a file record for the extracted JSON file
with open(json_path, 'r') as f:
json_data = f.read()
json_file_record = await self.file_service.save_file(
db=db,
file_data=json_data,
filename=json_filename,
subdir=f"mtgjson/{subdir}",
file_type="application/json",
content_type="application/json"
)
return str(json_file_record.path)
except Exception as e:
logger.error(f"Error unzipping file: {e}")
raise
def _stream_json_file(self, file_path: str) -> Generator[Dict[str, Any], None, None]: def _stream_json_file(self, file_path: str) -> Generator[Dict[str, Any], None, None]:
"""Stream a JSON file and yield items one at a time""" """Stream a JSON file and yield items one at a time using a streaming parser"""
print(f"Starting to stream JSON file: {file_path}") logger.info(f"Starting to stream JSON file: {file_path}")
try:
with open(file_path, 'r') as f: with open(file_path, 'r') as f:
# Load the entire file since MTGJSON uses a specific format # First, we need to find the start of the data section
data = json.load(f) data_started = False
current_key = None
current_value = []
brace_count = 0
# First yield the meta data for line in f:
if "meta" in data: line = line.strip()
yield {"type": "meta", "data": data["meta"]} if not line:
# Then yield each item in the data section
if "data" in data:
for key, value in data["data"].items():
yield {"type": "item", "data": {key: value}}
async def _process_batch(self, db: Session, items: list, model_class) -> int:
"""Process a batch of items and add them to the database"""
processed = 0
with transaction(db):
for item in items:
if model_class == MTGJSONCard:
# Check if card already exists
existing_card = db.query(MTGJSONCard).filter(MTGJSONCard.card_id == item["card_id"]).first()
if existing_card:
continue continue
new_item = MTGJSONCard( if not data_started:
card_id=item["card_id"], if '"data":' in line:
name=item["name"], data_started = True
set_code=item["set_code"], # Skip the opening brace of the data object
uuid=item["uuid"], line = line[line.find('"data":') + 7:].strip()
abu_id=item.get("abu_id"), if line.startswith('{'):
card_kingdom_etched_id=item.get("card_kingdom_etched_id"), line = line[1:].strip()
card_kingdom_foil_id=item.get("card_kingdom_foil_id"), else:
card_kingdom_id=item.get("card_kingdom_id"), # Yield meta data if found
cardsphere_id=item.get("cardsphere_id"), if '"meta":' in line:
cardsphere_foil_id=item.get("cardsphere_foil_id"), meta_start = line.find('"meta":') + 7
cardtrader_id=item.get("cardtrader_id"), meta_end = line.rfind('}')
csi_id=item.get("csi_id"), if meta_end > meta_start:
mcm_id=item.get("mcm_id"), meta_json = line[meta_start:meta_end + 1]
mcm_meta_id=item.get("mcm_meta_id"), try:
miniaturemarket_id=item.get("miniaturemarket_id"), meta_data = json.loads(meta_json)
mtg_arena_id=item.get("mtg_arena_id"), yield {"type": "meta", "data": meta_data}
mtgjson_foil_version_id=item.get("mtgjson_foil_version_id"), except json.JSONDecodeError as e:
mtgjson_non_foil_version_id=item.get("mtgjson_non_foil_version_id"), logger.warning(f"Failed to parse meta data: {e}")
mtgjson_v4_id=item.get("mtgjson_v4_id"),
mtgo_foil_id=item.get("mtgo_foil_id"),
mtgo_id=item.get("mtgo_id"),
multiverse_id=item.get("multiverse_id"),
scg_id=item.get("scg_id"),
scryfall_id=item.get("scryfall_id"),
scryfall_card_back_id=item.get("scryfall_card_back_id"),
scryfall_oracle_id=item.get("scryfall_oracle_id"),
scryfall_illustration_id=item.get("scryfall_illustration_id"),
tcgplayer_product_id=item.get("tcgplayer_product_id"),
tcgplayer_etched_product_id=item.get("tcgplayer_etched_product_id"),
tnt_id=item.get("tnt_id")
)
else: # MTGJSONSKU
# Check if SKU already exists
existing_sku = db.query(MTGJSONSKU).filter(MTGJSONSKU.sku_id == item["sku_id"]).first()
if existing_sku:
continue continue
new_item = MTGJSONSKU( # Process the data section
sku_id=str(item["sku_id"]), if data_started:
product_id=str(item["product_id"]), if not current_key:
condition=item["condition"], # Look for a new key
finish=item["finish"], if '"' in line:
language=item["language"], key_start = line.find('"') + 1
printing=item["printing"], key_end = line.find('"', key_start)
card_id=item["card_id"] if key_end > key_start:
) current_key = line[key_start:key_end]
db.add(new_item) # Get the rest of the line after the key
processed += 1 line = line[key_end + 1:].strip()
if ':' in line:
line = line[line.find(':') + 1:].strip()
return processed if current_key:
# Accumulate the value
current_value.append(line)
brace_count += line.count('{') - line.count('}')
async def download_and_process_identifiers(self, db: Session) -> Dict[str, int]: if brace_count == 0 and line.endswith(','):
"""Download, unzip and process AllIdentifiers.json.zip using streaming""" # We have a complete value
self._print_progress("Starting MTGJSON identifiers processing...") value_str = ''.join(current_value).rstrip(',')
start_time = time.time() try:
value = json.loads(value_str)
yield {"type": "item", "data": {current_key: value}}
except json.JSONDecodeError as e:
logger.warning(f"Failed to parse value for key {current_key}: {e}")
current_key = None
current_value = []
# Download the file using FileService except Exception as e:
logger.error(f"Error streaming JSON file: {e}")
raise
async def get_identifiers(self, db: Session) -> Generator[Dict[str, Any], None, None]:
"""Download and get MTGJSON identifiers data"""
# Check if we have a cached version
cached_file = await self.file_service.get_file_by_filename(db, "AllIdentifiers.json")
if cached_file:
# Ensure the file exists at the path
if os.path.exists(cached_file.path):
return self._stream_json_file(cached_file.path)
# Download and process the file
file_record = await self._download_file( file_record = await self._download_file(
db=db, db=db,
url="https://mtgjson.com/api/v5/AllIdentifiers.json.zip", url="https://mtgjson.com/api/v5/AllIdentifiers.json.zip",
@ -159,87 +175,22 @@ class MTGJSONService(BaseExternalService):
subdir="identifiers" subdir="identifiers"
) )
# Get the file path from the database record # Unzip and process the file
zip_path = file_record.path json_path = await self._unzip_file(file_record, "identifiers", db)
cards_processed = 0 # Return a generator that streams the JSON file
current_batch = [] return self._stream_json_file(json_path)
total_cards = 0
last_progress_time = time.time()
self._print_progress("Processing cards...") async def get_skus(self, db: Session) -> Generator[Dict[str, Any], None, None]:
try: """Download and get MTGJSON SKUs data"""
for item in self._stream_json_file(zip_path): # Check if we have a cached version
if item["type"] == "meta": cached_file = await self.file_service.get_file_by_filename(db, "TcgplayerSkus.json")
self._print_progress(f"Processing MTGJSON data version {item['data'].get('version')} from {item['data'].get('date')}") if cached_file:
continue # Ensure the file exists at the path
if os.path.exists(cached_file.path):
return self._stream_json_file(cached_file.path)
card_data = item["data"] # Download and process the file
card_id = list(card_data.keys())[0]
card_info = card_data[card_id]
total_cards += 1
current_batch.append({
"card_id": card_id,
"name": card_info.get("name"),
"set_code": card_info.get("setCode"),
"uuid": card_info.get("uuid"),
"abu_id": card_info.get("identifiers", {}).get("abuId"),
"card_kingdom_etched_id": card_info.get("identifiers", {}).get("cardKingdomEtchedId"),
"card_kingdom_foil_id": card_info.get("identifiers", {}).get("cardKingdomFoilId"),
"card_kingdom_id": card_info.get("identifiers", {}).get("cardKingdomId"),
"cardsphere_id": card_info.get("identifiers", {}).get("cardsphereId"),
"cardsphere_foil_id": card_info.get("identifiers", {}).get("cardsphereFoilId"),
"cardtrader_id": card_info.get("identifiers", {}).get("cardtraderId"),
"csi_id": card_info.get("identifiers", {}).get("csiId"),
"mcm_id": card_info.get("identifiers", {}).get("mcmId"),
"mcm_meta_id": card_info.get("identifiers", {}).get("mcmMetaId"),
"miniaturemarket_id": card_info.get("identifiers", {}).get("miniaturemarketId"),
"mtg_arena_id": card_info.get("identifiers", {}).get("mtgArenaId"),
"mtgjson_foil_version_id": card_info.get("identifiers", {}).get("mtgjsonFoilVersionId"),
"mtgjson_non_foil_version_id": card_info.get("identifiers", {}).get("mtgjsonNonFoilVersionId"),
"mtgjson_v4_id": card_info.get("identifiers", {}).get("mtgjsonV4Id"),
"mtgo_foil_id": card_info.get("identifiers", {}).get("mtgoFoilId"),
"mtgo_id": card_info.get("identifiers", {}).get("mtgoId"),
"multiverse_id": card_info.get("identifiers", {}).get("multiverseId"),
"scg_id": card_info.get("identifiers", {}).get("scgId"),
"scryfall_id": card_info.get("identifiers", {}).get("scryfallId"),
"scryfall_card_back_id": card_info.get("identifiers", {}).get("scryfallCardBackId"),
"scryfall_oracle_id": card_info.get("identifiers", {}).get("scryfallOracleId"),
"scryfall_illustration_id": card_info.get("identifiers", {}).get("scryfallIllustrationId"),
"tcgplayer_product_id": card_info.get("identifiers", {}).get("tcgplayerProductId"),
"tcgplayer_etched_product_id": card_info.get("identifiers", {}).get("tcgplayerEtchedProductId"),
"tnt_id": card_info.get("identifiers", {}).get("tntId"),
"data": card_info
})
if len(current_batch) >= self.batch_size:
batch_processed = await self._process_batch(db, current_batch, MTGJSONCard)
cards_processed += batch_processed
current_batch = []
current_time = time.time()
if current_time - last_progress_time >= 1.0: # Update progress every second
self._print_progress(f"\r{self._format_progress(cards_processed, total_cards, start_time)}", end="")
last_progress_time = current_time
except Exception as e:
self._print_progress(f"\nError during processing: {str(e)}")
raise
# Process remaining items
if current_batch:
batch_processed = await self._process_batch(db, current_batch, MTGJSONCard)
cards_processed += batch_processed
total_time = time.time() - start_time
self._print_progress(f"\nProcessing complete! Processed {cards_processed} cards in {total_time:.1f} seconds")
return {"cards_processed": cards_processed}
async def download_and_process_skus(self, db: Session) -> Dict[str, int]:
"""Download, unzip and process TcgplayerSkus.json.zip using streaming"""
self._print_progress("Starting MTGJSON SKUs processing...")
start_time = time.time()
# Download the file using FileService
file_record = await self._download_file( file_record = await self._download_file(
db=db, db=db,
url="https://mtgjson.com/api/v5/TcgplayerSkus.json.zip", url="https://mtgjson.com/api/v5/TcgplayerSkus.json.zip",
@ -247,64 +198,21 @@ class MTGJSONService(BaseExternalService):
subdir="skus" subdir="skus"
) )
# Get the file path from the database record # Unzip and process the file
zip_path = file_record.path json_path = await self._unzip_file(file_record, "skus", db)
skus_processed = 0 # Return a generator that streams the JSON file
current_batch = [] return self._stream_json_file(json_path)
total_skus = 0
last_progress_time = time.time()
self._print_progress("Processing SKUs...") async def clear_cache(self, db: Session) -> None:
try:
for item in self._stream_json_file(zip_path):
if item["type"] == "meta":
self._print_progress(f"Processing MTGJSON SKUs version {item['data'].get('version')} from {item['data'].get('date')}")
continue
# The data structure is {card_uuid: [sku1, sku2, ...]}
for card_uuid, sku_list in item["data"].items():
for sku in sku_list:
total_skus += 1
current_batch.append({
"sku_id": str(sku.get("skuId")),
"product_id": str(sku.get("productId")),
"condition": sku.get("condition"),
"finish": sku.get("finish"),
"language": sku.get("language"),
"printing": sku.get("printing"),
"card_id": card_uuid,
"data": sku
})
if len(current_batch) >= self.batch_size:
batch_processed = await self._process_batch(db, current_batch, MTGJSONSKU)
skus_processed += batch_processed
current_batch = []
current_time = time.time()
if current_time - last_progress_time >= 1.0: # Update progress every second
self._print_progress(f"\r{self._format_progress(skus_processed, total_skus, start_time)}", end="")
last_progress_time = current_time
except Exception as e:
self._print_progress(f"\nError during processing: {str(e)}")
raise
# Process remaining items
if current_batch:
batch_processed = await self._process_batch(db, current_batch, MTGJSONSKU)
skus_processed += batch_processed
total_time = time.time() - start_time
self._print_progress(f"\nProcessing complete! Processed {skus_processed} SKUs in {total_time:.1f} seconds")
return {"skus_processed": skus_processed}
async def clear_cache(self) -> None:
"""Clear all cached data""" """Clear all cached data"""
for subdir in ["identifiers", "skus"]: try:
dir_path = os.path.join(self.cache_dir, subdir) # Delete all files in the mtgjson subdirectory
if os.path.exists(dir_path): files = await self.file_service.list_files(db, file_type=["json", "zip"])
for filename in os.listdir(dir_path): for file in files:
file_path = os.path.join(dir_path, filename) if file.path.startswith("mtgjson/"):
if os.path.isfile(file_path): await self.file_service.delete_file(db, file.id)
os.unlink(file_path) logger.info("MTGJSON cache cleared")
print("MTGJSON cache cleared") except Exception as e:
logger.error(f"Error clearing cache: {e}")
raise

View File

@ -3,256 +3,49 @@ from datetime import datetime, timedelta
import csv import csv
import io import io
from app.services.external_api.base_external_service import BaseExternalService from app.services.external_api.base_external_service import BaseExternalService
from app.models.tcgplayer_group import TCGPlayerGroup
from app.models.tcgplayer_product import TCGPlayerProduct
from app.models.tcgplayer_category import TCGPlayerCategory
from app.db.database import get_db, transaction
from sqlalchemy.orm import Session
import py7zr
import os
from app.schemas.file import FileInDB
class TCGCSVService(BaseExternalService): class TCGCSVService(BaseExternalService):
def __init__(self): def __init__(self):
super().__init__(base_url="https://tcgcsv.com/") super().__init__(base_url="https://tcgcsv.com/")
async def get_groups(self, game_ids: List[int]) -> Dict[str, Any]: async def get_groups(self, game_id: int) -> Dict[str, Any]:
"""Fetch groups for specific game IDs from TCGCSV API""" """Fetch groups for specific game IDs from TCGCSV API"""
game_ids_str = ",".join(map(str, game_ids)) endpoint = f"tcgplayer/{game_id}/groups"
endpoint = f"tcgplayer/{game_ids_str}/groups"
return await self._make_request("GET", endpoint) return await self._make_request("GET", endpoint)
async def get_products_and_prices(self, game_ids: List[int], group_id: int) -> List[Dict[str, Any]]: async def get_products_and_prices(self, game_id: str, group_id: int) -> str:
"""Fetch products and prices for a specific group from TCGCSV API""" """Fetch products and prices for a specific group from TCGCSV API"""
game_ids_str = ",".join(map(str, game_ids)) endpoint = f"tcgplayer/{game_id}/{group_id}/ProductsAndPrices.csv"
endpoint = f"tcgplayer/{game_ids_str}/{group_id}/ProductsAndPrices.csv" return await self._make_request("GET", endpoint, headers={"Accept": "text/csv"})
response = await self._make_request("GET", endpoint, headers={"Accept": "text/csv"})
# Parse CSV response
csv_data = io.StringIO(response)
reader = csv.DictReader(csv_data)
return list(reader)
async def get_categories(self) -> Dict[str, Any]: async def get_categories(self) -> Dict[str, Any]:
"""Fetch all categories from TCGCSV API""" """Fetch all categories from TCGCSV API"""
endpoint = "tcgplayer/categories" endpoint = "tcgplayer/categories"
return await self._make_request("GET", endpoint) return await self._make_request("GET", endpoint)
async def get_archived_prices_for_date(self, db: Session, date_str: str) -> str: async def get_archived_prices_for_date(self, date_str: str) -> bytes:
"""Fetch archived prices from TCGCSV API""" """Fetch archived prices from TCGCSV API"""
# Download the archive file
endpoint = f"archive/tcgplayer/prices-{date_str}.ppmd.7z" endpoint = f"archive/tcgplayer/prices-{date_str}.ppmd.7z"
response = await self._make_request("GET", endpoint, binary=True) return await self._make_request("GET", endpoint, binary=True)
# Save the archive file using FileService async def get_tcgcsv_date_range(self, start_date: datetime, end_date: datetime) -> List[datetime]:
file_record = await self.save_file( """Get a date range for a given start and end date"""
db=db,
file_data=response,
file_name=f"prices-{date_str}.ppmd.7z",
subdir=f"tcgcsv/prices/zip",
file_type="application/x-7z-compressed"
)
# Extract the 7z file
with py7zr.SevenZipFile(file_record.path, 'r') as archive:
# Extract to a directory named after the date
extract_path = f"app/data/cache/tcgcsv/prices/{date_str}"
os.makedirs(extract_path, exist_ok=True)
archive.extractall(path=extract_path)
return date_str
async def get_archived_prices_for_date_range(self, start_date: str, end_date: str):
"""Fetch archived prices for a date range from TCGCSV API"""
# Convert string dates to datetime objects
start_dt = datetime.strptime(start_date, "%Y-%m-%d") start_dt = datetime.strptime(start_date, "%Y-%m-%d")
end_dt = datetime.strptime(end_date, "%Y-%m-%d") end_dt = datetime.strptime(end_date, "%Y-%m-%d")
min_start_date = datetime.strptime("2024-02-08", "%Y-%m-%d")
# Set minimum start date max_end_date = datetime.now()
min_start_date = datetime.strptime("2025-02-08", "%Y-%m-%d")
if start_dt < min_start_date: if start_dt < min_start_date:
start_dt = min_start_date start_dt = min_start_date
if end_dt > max_end_date:
# Set maximum end date to today end_dt = max_end_date
today = datetime.now()
if end_dt > today:
end_dt = today
# Generate date range
date_range = [] date_range = []
current_dt = start_dt current_dt = start_dt
while current_dt <= end_dt: while current_dt <= end_dt:
date_range.append(current_dt.strftime("%Y-%m-%d")) date_range.append(current_dt.strftime("%Y-%m-%d"))
current_dt += timedelta(days=1) current_dt += timedelta(days=1)
return date_range
# Process each date async def get_archived_prices_for_date_range(self, start_date: datetime, end_date: datetime) -> List[datetime]:
for date_str in date_range: """Fetch archived prices for a date range from TCGCSV API"""
await self.get_archived_prices_for_date(date_str) date_range = await self.get_tcgcsv_date_range(start_date, end_date)
return date_range
async def sync_groups_to_db(self, db: Session, game_ids: List[int]) -> List[TCGPlayerGroup]:
"""Fetch groups from API and sync them to the database"""
response = await self.get_groups(game_ids)
if not response.get("success"):
raise Exception(f"Failed to fetch groups: {response.get('errors')}")
groups = response.get("results", [])
synced_groups = []
with transaction(db):
for group_data in groups:
# Convert string dates to datetime objects
published_on = datetime.fromisoformat(group_data["publishedOn"].replace("Z", "+00:00")) if group_data.get("publishedOn") else None
modified_on = datetime.fromisoformat(group_data["modifiedOn"].replace("Z", "+00:00")) if group_data.get("modifiedOn") else None
# Check if group already exists
existing_group = db.query(TCGPlayerGroup).filter(TCGPlayerGroup.group_id == group_data["groupId"]).first()
if existing_group:
# Update existing group
for key, value in {
"name": group_data["name"],
"abbreviation": group_data.get("abbreviation"),
"is_supplemental": group_data.get("isSupplemental", False),
"published_on": published_on,
"modified_on": modified_on,
"category_id": group_data.get("categoryId")
}.items():
setattr(existing_group, key, value)
synced_groups.append(existing_group)
else:
# Create new group
new_group = TCGPlayerGroup(
group_id=group_data["groupId"],
name=group_data["name"],
abbreviation=group_data.get("abbreviation"),
is_supplemental=group_data.get("isSupplemental", False),
published_on=published_on,
modified_on=modified_on,
category_id=group_data.get("categoryId")
)
db.add(new_group)
synced_groups.append(new_group)
return synced_groups
async def sync_products_to_db(self, db: Session, game_id: int, group_id: int) -> List[TCGPlayerProduct]:
"""Fetch products and prices for a group and sync them to the database"""
products_data = await self.get_products_and_prices(game_id, group_id)
synced_products = []
for product_data in products_data:
# Convert string dates to datetime objects
modified_on = datetime.fromisoformat(product_data["modifiedOn"].replace("Z", "+00:00")) if product_data.get("modifiedOn") else None
# Convert price strings to floats, handling empty strings
def parse_price(price_str):
return float(price_str) if price_str else None
# Check if product already exists
existing_product = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.product_id == int(product_data["productId"])).first()
if existing_product:
# Update existing product
for key, value in {
"name": product_data["name"],
"clean_name": product_data.get("cleanName"),
"image_url": product_data.get("imageUrl"),
"category_id": int(product_data["categoryId"]),
"group_id": int(product_data["groupId"]),
"url": product_data.get("url"),
"modified_on": modified_on,
"image_count": int(product_data.get("imageCount", 0)),
"ext_rarity": product_data.get("extRarity"),
"ext_number": product_data.get("extNumber"),
"low_price": parse_price(product_data.get("lowPrice")),
"mid_price": parse_price(product_data.get("midPrice")),
"high_price": parse_price(product_data.get("highPrice")),
"market_price": parse_price(product_data.get("marketPrice")),
"direct_low_price": parse_price(product_data.get("directLowPrice")),
"sub_type_name": product_data.get("subTypeName")
}.items():
setattr(existing_product, key, value)
synced_products.append(existing_product)
else:
# Create new product
with transaction(db):
new_product = TCGPlayerProduct(
product_id=int(product_data["productId"]),
name=product_data["name"],
clean_name=product_data.get("cleanName"),
image_url=product_data.get("imageUrl"),
category_id=int(product_data["categoryId"]),
group_id=int(product_data["groupId"]),
url=product_data.get("url"),
modified_on=modified_on,
image_count=int(product_data.get("imageCount", 0)),
ext_rarity=product_data.get("extRarity"),
ext_number=product_data.get("extNumber"),
low_price=parse_price(product_data.get("lowPrice")),
mid_price=parse_price(product_data.get("midPrice")),
high_price=parse_price(product_data.get("highPrice")),
market_price=parse_price(product_data.get("marketPrice")),
direct_low_price=parse_price(product_data.get("directLowPrice")),
sub_type_name=product_data.get("subTypeName")
)
db.add(new_product)
synced_products.append(new_product)
return synced_products
async def sync_categories_to_db(self, db: Session) -> List[TCGPlayerCategory]:
"""Fetch categories from API and sync them to the database"""
response = await self.get_categories()
if not response.get("success"):
raise Exception(f"Failed to fetch categories: {response.get('errors')}")
categories = response.get("results", [])
synced_categories = []
with transaction(db):
for category_data in categories:
# Convert string dates to datetime objects
modified_on = datetime.fromisoformat(category_data["modifiedOn"].replace("Z", "+00:00")) if category_data.get("modifiedOn") else None
# Check if category already exists
existing_category = db.query(TCGPlayerCategory).filter(TCGPlayerCategory.category_id == category_data["categoryId"]).first()
if existing_category:
# Update existing category
for key, value in {
"name": category_data["name"],
"display_name": category_data.get("displayName"),
"seo_category_name": category_data.get("seoCategoryName"),
"category_description": category_data.get("categoryDescription"),
"category_page_title": category_data.get("categoryPageTitle"),
"sealed_label": category_data.get("sealedLabel"),
"non_sealed_label": category_data.get("nonSealedLabel"),
"condition_guide_url": category_data.get("conditionGuideUrl"),
"is_scannable": category_data.get("isScannable", False),
"popularity": category_data.get("popularity", 0),
"is_direct": category_data.get("isDirect", False),
"modified_on": modified_on
}.items():
setattr(existing_category, key, value)
synced_categories.append(existing_category)
else:
# Create new category
new_category = TCGPlayerCategory(
category_id=category_data["categoryId"],
name=category_data["name"],
display_name=category_data.get("displayName"),
seo_category_name=category_data.get("seoCategoryName"),
category_description=category_data.get("categoryDescription"),
category_page_title=category_data.get("categoryPageTitle"),
sealed_label=category_data.get("sealedLabel"),
non_sealed_label=category_data.get("nonSealedLabel"),
condition_guide_url=category_data.get("conditionGuideUrl"),
is_scannable=category_data.get("isScannable", False),
popularity=category_data.get("popularity", 0),
is_direct=category_data.get("isDirect", False),
modified_on=modified_on
)
db.add(new_category)
synced_categories.append(new_category)
return synced_categories

View File

@ -150,3 +150,10 @@ class FileService:
return FileInDB.model_validate(file_record) return FileInDB.model_validate(file_record)
else: else:
return None return None
async def get_file_by_filename(self, db: Session, filename: str) -> Optional[FileInDB]:
"""Get a file record from the database by filename"""
file_record = db.query(File).filter(File.name == filename).first()
if file_record:
return FileInDB.model_validate(file_record)
return None

View File

@ -142,13 +142,14 @@ class LabelPrinterService:
logger.error(f"Unexpected error in _send_print_request: {e}") logger.error(f"Unexpected error in _send_print_request: {e}")
return False return False
async def print_file(self, file_path: Union[str, Path, FileInDB], label_size: Literal["dk1201", "dk1241"], label_type: Optional[Literal["address_label", "packing_slip", "set_label"]] = None) -> bool: async def print_file(self, file_path: Union[str, Path, FileInDB], label_size: Literal["dk1201", "dk1241"], label_type: Optional[Literal["address_label", "packing_slip", "set_label", "return_label", "pirate_ship_label"]] = None, copies: Optional[int] = None) -> bool:
"""Print a PDF or PNG file to the label printer. """Print a PDF or PNG file to the label printer.
Args: Args:
file_path: Path to the PDF or PNG file, or a FileInDB object file_path: Path to the PDF or PNG file, or a FileInDB object
label_size: Size of label to use ("dk1201" or "dk1241") label_size: Size of label to use ("dk1201" or "dk1241")
label_type: Type of label to use ("address_label" or "packing_slip" or "set_label") label_type: Type of label to use ("address_label" or "packing_slip" or "set_label")
copies: Optional number of copies to print. If None, prints once.
Returns: Returns:
bool: True if print was successful, False otherwise bool: True if print was successful, False otherwise
@ -206,7 +207,7 @@ class LabelPrinterService:
resized_image = resized_image.resize((991, 306), Image.Resampling.LANCZOS) resized_image = resized_image.resize((991, 306), Image.Resampling.LANCZOS)
# if file path contains address_label, rotate image 90 degrees # if file path contains address_label, rotate image 90 degrees
if label_type == "address_label" or label_type == "set_label": if label_type == "address_label" or label_type == "set_label" or label_type == "return_label":
rotate = "90" rotate = "90"
cut = False cut = False
else: else:
@ -240,7 +241,21 @@ class LabelPrinterService:
with open(cache_path, "wb") as f: with open(cache_path, "wb") as f:
f.write(converted_image) f.write(converted_image)
# Send to API if copies:
# Send to API for each copy
for copy in range(copies):
logger.info(f"Printing copy {copy + 1} of {copies}")
if not await self._send_print_request(cache_path):
logger.error(f"Failed to print page {i+1}, copy {copy + 1}")
return False
# Wait for printer to be ready before next copy or page
if copy < copies - 1 or i < len(images) - 1:
if not await self._wait_for_printer_ready():
logger.error("Printer not ready for next copy/page")
return False
else:
# Send to API once (original behavior)
if not await self._send_print_request(cache_path): if not await self._send_print_request(cache_path):
logger.error(f"Failed to print page {i+1}") logger.error(f"Failed to print page {i+1}")
return False return False

View File

@ -1,4 +1,4 @@
from app.db.database import transaction, get_db from app.db.database import transaction
from app.services.scheduler.base_scheduler import BaseScheduler from app.services.scheduler.base_scheduler import BaseScheduler
import logging import logging
@ -17,11 +17,10 @@ class SchedulerService:
self._service_manager = ServiceManager() self._service_manager = ServiceManager()
return self._service_manager return self._service_manager
async def update_open_orders_hourly(self): async def update_open_orders_hourly(self, db):
""" """
Hourly update of orders from TCGPlayer API to database Hourly update of orders from TCGPlayer API to database
""" """
db = next(get_db())
try: try:
logger.info("Starting hourly order update") logger.info("Starting hourly order update")
# Get order management service # Get order management service
@ -39,14 +38,11 @@ class SchedulerService:
except Exception as e: except Exception as e:
logger.error(f"Error updating open orders: {str(e)}") logger.error(f"Error updating open orders: {str(e)}")
raise raise
finally:
db.close()
async def update_all_orders_daily(self): async def update_all_orders_daily(self, db):
""" """
Daily update of all orders from TCGPlayer API to database Daily update of all orders from TCGPlayer API to database
""" """
db = next(get_db())
try: try:
logger.info("Starting daily order update") logger.info("Starting daily order update")
# Get order management service # Get order management service
@ -64,21 +60,19 @@ class SchedulerService:
except Exception as e: except Exception as e:
logger.error(f"Error updating all orders: {str(e)}") logger.error(f"Error updating all orders: {str(e)}")
raise raise
finally:
db.close()
async def start_scheduled_tasks(self): async def start_scheduled_tasks(self, db):
"""Start all scheduled tasks""" """Start all scheduled tasks"""
# Schedule open orders update to run hourly at 00 minutes # Schedule open orders update to run hourly at 00 minutes
await self.scheduler.schedule_task( await self.scheduler.schedule_task(
task_name="update_open_orders_hourly", task_name="update_open_orders_hourly",
func=self.update_open_orders_hourly, func=lambda: self.update_open_orders_hourly(db),
interval_seconds=60 * 60, # 1 hour interval_seconds=60 * 60, # 1 hour
) )
# Schedule all orders update to run daily at 1 AM # Schedule all orders update to run daily at 1 AM
await self.scheduler.schedule_task( await self.scheduler.schedule_task(
task_name="update_all_orders_daily", task_name="update_all_orders_daily",
func=self.update_all_orders_daily, func=lambda: self.update_all_orders_daily(db),
interval_seconds=24 * 60 * 60, # 24 hours interval_seconds=24 * 60 * 60, # 24 hours
) )

View File

@ -26,7 +26,9 @@ class ServiceManager:
'set_label': 'app.services.set_label_service.SetLabelService', 'set_label': 'app.services.set_label_service.SetLabelService',
'data_initialization': 'app.services.data_initialization.DataInitializationService', 'data_initialization': 'app.services.data_initialization.DataInitializationService',
'scheduler': 'app.services.scheduler.scheduler_service.SchedulerService', 'scheduler': 'app.services.scheduler.scheduler_service.SchedulerService',
'file': 'app.services.file_service.FileService' 'file': 'app.services.file_service.FileService',
'tcgcsv': 'app.services.external_api.tcgcsv.tcgcsv_service.TCGCSVService',
'mtgjson': 'app.services.external_api.mtgjson.mtgjson_service.MTGJSONService'
} }
self._service_configs = { self._service_configs = {
'label_printer': {'printer_api_url': "http://192.168.1.110:8000"}, 'label_printer': {'printer_api_url': "http://192.168.1.110:8000"},

View File

@ -228,6 +228,114 @@ async function generateAddressLabels() {
} }
} }
// Show return labels modal
function showReturnLabelsModal() {
const modal = document.getElementById('returnLabelsModal');
modal.classList.remove('hidden');
modal.classList.add('flex');
}
// Close return labels modal
function closeReturnLabelsModal() {
const modal = document.getElementById('returnLabelsModal');
modal.classList.remove('flex');
modal.classList.add('hidden');
}
// Submit return labels request
async function submitReturnLabels() {
try {
const numberOfLabels = document.getElementById('numberOfLabels').value;
if (!numberOfLabels || numberOfLabels < 1) {
showToast('Please enter a valid number of labels', 'error');
return;
}
setLoading(true);
const response = await fetch(`${API_BASE_URL}/orders/generate-return-labels`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
number_of_labels: parseInt(numberOfLabels)
})
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.detail || 'Failed to generate return labels');
}
showToast('Return labels generated successfully');
closeReturnLabelsModal();
} catch (error) {
showToast('Error generating return labels: ' + error.message, 'error');
} finally {
setLoading(false);
}
}
// Generate return labels (opens modal)
function generateReturnLabels() {
showReturnLabelsModal();
}
// Show Pirate Ship label modal
function showPirateShipModal() {
const modal = document.getElementById('pirateShipModal');
modal.classList.remove('hidden');
modal.classList.add('flex');
}
// Close Pirate Ship label modal
function closePirateShipModal() {
const modal = document.getElementById('pirateShipModal');
modal.classList.remove('flex');
modal.classList.add('hidden');
// Reset file input
document.getElementById('pirateShipFile').value = '';
}
// Submit Pirate Ship label
async function submitPirateShipLabel() {
try {
const fileInput = document.getElementById('pirateShipFile');
const file = fileInput.files[0];
if (!file) {
showToast('Please select a PDF file', 'error');
return;
}
if (file.type !== 'application/pdf') {
showToast('Please select a valid PDF file', 'error');
return;
}
setLoading(true);
const formData = new FormData();
formData.append('file', file);
const response = await fetch(`${API_BASE_URL}/orders/print-pirate-ship-label`, {
method: 'POST',
body: formData
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.detail || 'Failed to print Pirate Ship label');
}
showToast('Pirate Ship label printed successfully');
closePirateShipModal();
} catch (error) {
showToast('Error printing Pirate Ship label: ' + error.message, 'error');
} finally {
setLoading(false);
}
}
// Load orders when page loads // Load orders when page loads
document.addEventListener('DOMContentLoaded', () => { document.addEventListener('DOMContentLoaded', () => {
fetchOrders(); fetchOrders();

View File

@ -39,6 +39,12 @@
<button onclick="generateAddressLabels()" class="px-4 py-2 bg-indigo-600 text-white rounded-lg hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 transition-colors"> <button onclick="generateAddressLabels()" class="px-4 py-2 bg-indigo-600 text-white rounded-lg hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 transition-colors">
Generate Address Labels Generate Address Labels
</button> </button>
<button onclick="generateReturnLabels()" class="px-4 py-2 bg-red-600 text-white rounded-lg hover:bg-red-700 focus:outline-none focus:ring-2 focus:ring-red-500 focus:ring-offset-2 transition-colors">
Generate Return Labels
</button>
<button onclick="showPirateShipModal()" class="px-4 py-2 bg-yellow-600 text-white rounded-lg hover:bg-yellow-700 focus:outline-none focus:ring-2 focus:ring-yellow-500 focus:ring-offset-2 transition-colors">
Upload Pirate Ship Label
</button>
</div> </div>
<div id="labelOptions" class="bg-gray-700 rounded-lg p-4"> <div id="labelOptions" class="bg-gray-700 rounded-lg p-4">
<label class="block text-sm font-medium text-gray-300 mb-2">Label Type</label> <label class="block text-sm font-medium text-gray-300 mb-2">Label Type</label>
@ -49,6 +55,44 @@
</div> </div>
</div> </div>
<!-- Return Labels Modal -->
<div id="returnLabelsModal" class="fixed inset-0 bg-black bg-opacity-50 hidden items-center justify-center">
<div class="bg-gray-800 rounded-lg p-6 max-w-md w-full mx-4">
<h3 class="text-xl font-semibold text-gray-100 mb-4">Generate Return Labels</h3>
<div class="mb-4">
<label for="numberOfLabels" class="block text-sm font-medium text-gray-300 mb-2">Number of Labels</label>
<input type="number" id="numberOfLabels" min="1" value="1" class="w-full rounded-lg border-gray-600 bg-gray-700 text-gray-100 focus:ring-blue-500 focus:border-blue-500">
</div>
<div class="flex justify-end space-x-3">
<button onclick="closeReturnLabelsModal()" class="px-4 py-2 bg-gray-600 text-white rounded-lg hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-gray-500 focus:ring-offset-2 transition-colors">
Cancel
</button>
<button onclick="submitReturnLabels()" class="px-4 py-2 bg-red-600 text-white rounded-lg hover:bg-red-700 focus:outline-none focus:ring-2 focus:ring-red-500 focus:ring-offset-2 transition-colors">
Generate
</button>
</div>
</div>
</div>
<!-- Pirate Ship Label Modal -->
<div id="pirateShipModal" class="fixed inset-0 bg-black bg-opacity-50 hidden items-center justify-center">
<div class="bg-gray-800 rounded-lg p-6 max-w-md w-full mx-4">
<h3 class="text-xl font-semibold text-gray-100 mb-4">Upload Pirate Ship Label</h3>
<div class="mb-4">
<label for="pirateShipFile" class="block text-sm font-medium text-gray-300 mb-2">Select PDF File</label>
<input type="file" id="pirateShipFile" accept=".pdf" class="w-full rounded-lg border-gray-600 bg-gray-700 text-gray-100 focus:ring-blue-500 focus:border-blue-500">
</div>
<div class="flex justify-end space-x-3">
<button onclick="closePirateShipModal()" class="px-4 py-2 bg-gray-600 text-white rounded-lg hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-gray-500 focus:ring-offset-2 transition-colors">
Cancel
</button>
<button onclick="submitPirateShipLabel()" class="px-4 py-2 bg-yellow-600 text-white rounded-lg hover:bg-yellow-700 focus:outline-none focus:ring-2 focus:ring-yellow-500 focus:ring-offset-2 transition-colors">
Upload & Print
</button>
</div>
</div>
</div>
<!-- Order List Section --> <!-- Order List Section -->
<div class="bg-gray-800 rounded-xl shadow-sm p-6"> <div class="bg-gray-800 rounded-xl shadow-sm p-6">
<div class="flex items-center justify-between mb-6"> <div class="flex items-center justify-between mb-6">