prep for pricing service work
This commit is contained in:
parent
a78c3bcba3
commit
964fdd641b
1
.gitignore
vendored
1
.gitignore
vendored
@ -172,3 +172,4 @@ cython_debug/
|
||||
*.db
|
||||
temp/
|
||||
.DS_Store
|
||||
*.db-journal
|
@ -3,6 +3,10 @@ from sqlalchemy.orm import sessionmaker, Session
|
||||
from contextlib import contextmanager
|
||||
from typing import Generator
|
||||
import os
|
||||
from sqlalchemy import inspect
|
||||
from services.tcgplayer import TCGPlayerService
|
||||
#from services.pricing import PricingService
|
||||
from services.file import FileService
|
||||
|
||||
|
||||
import logging
|
||||
@ -45,11 +49,33 @@ def get_db() -> Generator[Session, None, None]:
|
||||
yield session
|
||||
|
||||
def init_db() -> None:
|
||||
"""Initialize database tables"""
|
||||
"""Initialize database tables and run first-time setup if needed"""
|
||||
from .models import Base
|
||||
try:
|
||||
inspector = inspect(engine)
|
||||
tables_exist = all(
|
||||
table in inspector.get_table_names()
|
||||
for table in Base.metadata.tables.keys()
|
||||
)
|
||||
# if tables_exist:
|
||||
# drop all tables except file
|
||||
# for table in inspector.get_table_names():
|
||||
# if table != 'files':
|
||||
# Base.metadata.drop_all(bind=engine, tables=[Base.metadata.tables[table]])
|
||||
# logger.info(f"Dropped table: {table}")
|
||||
|
||||
# Create tables if they don't exist
|
||||
Base.metadata.create_all(bind=engine)
|
||||
logger.info("Database tables created successfully")
|
||||
|
||||
# Run first-time setup only if tables were just created
|
||||
if not tables_exist:
|
||||
# with get_db_session() as session:
|
||||
# tcgplayer_service = TCGPlayerService(session, PricingService(session), FileService(session))
|
||||
# tcgplayer_service.populate_tcgplayer_groups()
|
||||
# tcgplayer_service.cron_load_prices()
|
||||
logger.info("First-time database setup completed")
|
||||
|
||||
logger.info("Database initialization completed")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize database: {str(e)}")
|
||||
raise
|
||||
|
38
db/models.py
38
db/models.py
@ -31,11 +31,8 @@ class Product(Base):
|
||||
return product_line
|
||||
|
||||
id = Column(String, primary_key=True)
|
||||
name = Column(String)
|
||||
type = Column(String) # box or card
|
||||
product_line = Column(String) # pokemon, mtg, etc.
|
||||
set_name = Column(String)
|
||||
set_code = Column(String)
|
||||
date_created = Column(DateTime, default=datetime.now)
|
||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||
|
||||
@ -133,43 +130,10 @@ class Card(Base):
|
||||
"""
|
||||
Card represents the concept of a distinct card
|
||||
Cards have metadata from different sources
|
||||
internal: box, inventory, upload
|
||||
external: price, attributes - scryfall, tcgplayer, manabox
|
||||
"""
|
||||
__tablename__ = "cards"
|
||||
|
||||
@validates("rarity")
|
||||
def validate_rarity(self, key, rarity: str):
|
||||
single_character_rarity = {'m': 'mythic', 'r': 'rare', 'u': 'uncommon', 'c': 'common', 'l': 'land', 'p': 'promo', 's': 'special'}
|
||||
if rarity not in RarityEnum:
|
||||
if rarity.lower() in RarityEnum:
|
||||
rarity = rarity.lower()
|
||||
elif rarity in single_character_rarity:
|
||||
rarity = single_character_rarity[rarity]
|
||||
else:
|
||||
raise ValueError(f"Invalid rarity: {rarity}")
|
||||
return rarity
|
||||
|
||||
@validates("condition")
|
||||
def validate_condition(self, key, condition: str):
|
||||
if condition not in ConditionEnum:
|
||||
if condition.lower() in ConditionEnum:
|
||||
condition = condition.lower()
|
||||
elif condition.lower().strip().replace(' ', '_') in ConditionEnum:
|
||||
condition = condition.lower().strip().replace(' ', '_')
|
||||
else:
|
||||
raise ValueError(f"Invalid condition: {condition}")
|
||||
return condition
|
||||
|
||||
product_id = Column(String, ForeignKey("products.id"), primary_key=True)
|
||||
number = Column(String)
|
||||
foil = Column(String)
|
||||
rarity = Column(String)
|
||||
condition = Column(String)
|
||||
language = Column(String)
|
||||
scryfall_id = Column(String)
|
||||
manabox_id = Column(String)
|
||||
tcgplayer_id = Column(Integer)
|
||||
date_created = Column(DateTime, default=datetime.now)
|
||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||
|
||||
@ -183,7 +147,7 @@ class CardManabox(Base):
|
||||
collector_number = Column(String)
|
||||
foil = Column(String)
|
||||
rarity = Column(String)
|
||||
manabox_id = Column(String)
|
||||
manabox_id = Column(Integer)
|
||||
scryfall_id = Column(String)
|
||||
condition = Column(String)
|
||||
language = Column(String)
|
||||
|
152
dependencies.py
152
dependencies.py
@ -1,6 +1,7 @@
|
||||
from typing import Annotated
|
||||
from sqlalchemy.orm import Session
|
||||
from services.data import DataService
|
||||
from services.upload import UploadService
|
||||
from fastapi import Depends, Form
|
||||
|
||||
from services.box import BoxService
|
||||
from services.tcgplayer import TCGPlayerService
|
||||
from services.pricing import PricingService
|
||||
@ -9,27 +10,77 @@ from services.product import ProductService
|
||||
from services.inventory import InventoryService
|
||||
from services.task import TaskService
|
||||
from services.storage import StorageService
|
||||
from fastapi import Depends, Form
|
||||
from db.database import get_db
|
||||
from schemas.file import CreateFileRequest
|
||||
from schemas.box import CreateBoxRequest, UpdateBoxRequest, CreateOpenBoxRequest
|
||||
|
||||
# Common type annotation for database dependency
|
||||
DB = Annotated[Session, Depends(get_db)]
|
||||
|
||||
## file
|
||||
# file
|
||||
def get_file_service(db: Session = Depends(get_db)) -> FileService:
|
||||
"""Dependency injection for FileService"""
|
||||
# Base Services (no dependencies besides DB)
|
||||
def get_file_service(db: DB) -> FileService:
|
||||
"""FileService with only database dependency"""
|
||||
return FileService(db)
|
||||
|
||||
# metadata
|
||||
def get_pricing_service(db: DB) -> PricingService:
|
||||
"""PricingService with only database dependency"""
|
||||
return PricingService(db)
|
||||
|
||||
def get_storage_service(db: DB) -> StorageService:
|
||||
"""StorageService with only database dependency"""
|
||||
return StorageService(db)
|
||||
|
||||
def get_inventory_service(db: DB) -> InventoryService:
|
||||
"""InventoryService with only database dependency"""
|
||||
return InventoryService(db)
|
||||
|
||||
# Services with dependencies on other services
|
||||
def get_tcgplayer_service(
|
||||
db: DB,
|
||||
pricing_service: Annotated[PricingService, Depends(get_pricing_service)],
|
||||
file_service: Annotated[FileService, Depends(get_file_service)]
|
||||
) -> TCGPlayerService:
|
||||
"""TCGPlayerService depends on PricingService"""
|
||||
return TCGPlayerService(db, pricing_service, file_service)
|
||||
|
||||
def get_product_service(
|
||||
db: DB,
|
||||
file_service: Annotated[FileService, Depends(get_file_service)],
|
||||
tcgplayer_service: Annotated[TCGPlayerService, Depends(get_tcgplayer_service)],
|
||||
storage_service: Annotated[StorageService, Depends(get_storage_service)]
|
||||
) -> ProductService:
|
||||
"""ProductService with multiple service dependencies"""
|
||||
return ProductService(db, file_service, tcgplayer_service, storage_service)
|
||||
|
||||
def get_box_service(
|
||||
db: DB,
|
||||
inventory_service: Annotated[InventoryService, Depends(get_inventory_service)]
|
||||
) -> BoxService:
|
||||
"""BoxService depends on InventoryService"""
|
||||
return BoxService(db, inventory_service)
|
||||
|
||||
def get_task_service(
|
||||
db: DB,
|
||||
product_service: Annotated[ProductService, Depends(get_product_service)],
|
||||
tcgplayer_service: Annotated[TCGPlayerService, Depends(get_tcgplayer_service)]
|
||||
) -> TaskService:
|
||||
"""TaskService depends on ProductService and TCGPlayerService"""
|
||||
return TaskService(db, product_service, tcgplayer_service)
|
||||
|
||||
# Form data dependencies
|
||||
def get_create_file_metadata(
|
||||
type: str = Form(...),
|
||||
source: str = Form(...),
|
||||
service: str = Form(None),
|
||||
filename: str = Form(None)
|
||||
) -> CreateFileRequest:
|
||||
"""Dependency injection for FileMetadata"""
|
||||
return CreateFileRequest(type=type, source=source, service=service, filename=filename)
|
||||
"""Form dependency for file creation"""
|
||||
return CreateFileRequest(
|
||||
type=type,
|
||||
source=source,
|
||||
service=service,
|
||||
filename=filename
|
||||
)
|
||||
|
||||
def get_box_data(
|
||||
type: str = Form(...),
|
||||
@ -37,8 +88,13 @@ def get_box_data(
|
||||
set_code: str = Form(...),
|
||||
num_cards_expected: int = Form(None)
|
||||
) -> CreateBoxRequest:
|
||||
"""Dependency injection for BoxData"""
|
||||
return CreateBoxRequest(type=type, sku=sku, set_code=set_code, num_cards_expected=num_cards_expected)
|
||||
"""Form dependency for box creation"""
|
||||
return CreateBoxRequest(
|
||||
type=type,
|
||||
sku=sku,
|
||||
set_code=set_code,
|
||||
num_cards_expected=num_cards_expected
|
||||
)
|
||||
|
||||
def get_box_update_data(
|
||||
type: str = Form(None),
|
||||
@ -46,8 +102,13 @@ def get_box_update_data(
|
||||
set_code: str = Form(None),
|
||||
num_cards_expected: int = Form(None)
|
||||
) -> UpdateBoxRequest:
|
||||
"""Dependency injection for BoxUpdateData"""
|
||||
return UpdateBoxRequest(type=type, sku=sku, set_code=set_code, num_cards_expected=num_cards_expected)
|
||||
"""Form dependency for box updates"""
|
||||
return UpdateBoxRequest(
|
||||
type=type,
|
||||
sku=sku,
|
||||
set_code=set_code,
|
||||
num_cards_expected=num_cards_expected
|
||||
)
|
||||
|
||||
def get_open_box_data(
|
||||
product_id: str = Form(...),
|
||||
@ -55,59 +116,10 @@ def get_open_box_data(
|
||||
num_cards_actual: int = Form(None),
|
||||
date_opened: str = Form(None)
|
||||
) -> CreateOpenBoxRequest:
|
||||
"""Dependency injection for OpenBoxData"""
|
||||
return CreateOpenBoxRequest(product_id=product_id, file_ids=file_ids, num_cards_actual=num_cards_actual, date_opened=date_opened)
|
||||
|
||||
def get_tcgplayer_service(
|
||||
db: Session = Depends(get_db)
|
||||
) -> TCGPlayerService:
|
||||
"""Dependency injection for TCGPlayerService"""
|
||||
return TCGPlayerService(db)
|
||||
|
||||
# storage
|
||||
|
||||
def get_storage_service(db: Session = Depends(get_db)) -> StorageService:
|
||||
"""Dependency injection for StorageService"""
|
||||
return StorageService(db)
|
||||
|
||||
# product
|
||||
def get_product_service(db: Session = Depends(get_db), file_service: FileService = Depends(get_file_service), tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service), storage_service: StorageService = Depends(get_storage_service)) -> ProductService:
|
||||
"""Dependency injection for ProductService"""
|
||||
return ProductService(db, file_service, tcgplayer_service, storage_service)
|
||||
|
||||
# task
|
||||
def get_task_service(db: Session = Depends(get_db), product_service: ProductService = Depends(get_product_service), tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service)) -> TaskService:
|
||||
"""Dependency injection for TaskService"""
|
||||
return TaskService(db, product_service, tcgplayer_service)
|
||||
|
||||
## Inventory
|
||||
def get_inventory_service(db: Session = Depends(get_db)) -> InventoryService:
|
||||
"""Dependency injection for InventoryService"""
|
||||
return InventoryService(db)
|
||||
|
||||
## Upload
|
||||
|
||||
def get_upload_service(db: Session = Depends(get_db)) -> UploadService:
|
||||
"""Dependency injection for UploadService"""
|
||||
return UploadService(db)
|
||||
|
||||
## box
|
||||
|
||||
def get_box_service(db: Session = Depends(get_db), inventory_service: InventoryService = Depends(get_inventory_service)) -> BoxService:
|
||||
"""Dependency injection for BoxService"""
|
||||
return BoxService(db, inventory_service)
|
||||
|
||||
## Pricing
|
||||
|
||||
def get_pricing_service(db: Session = Depends(get_db)) -> PricingService:
|
||||
"""Dependency injection for PricingService"""
|
||||
return PricingService(db)
|
||||
|
||||
## Data
|
||||
def get_data_service(
|
||||
db: Session = Depends(get_db),
|
||||
tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service)
|
||||
) -> DataService:
|
||||
"""Dependency injection for DataService"""
|
||||
return DataService(db, tcgplayer_service)
|
||||
|
||||
"""Form dependency for opening boxes"""
|
||||
return CreateOpenBoxRequest(
|
||||
product_id=product_id,
|
||||
file_ids=file_ids,
|
||||
num_cards_actual=num_cards_actual,
|
||||
date_opened=date_opened
|
||||
)
|
55
main.py
55
main.py
@ -1,25 +1,30 @@
|
||||
from fastapi import FastAPI
|
||||
from fastapi import FastAPI, Depends
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
import uvicorn
|
||||
from routes.routes import router
|
||||
from db.database import init_db, check_db_connection, destroy_db, get_db
|
||||
from db.utils import db_transaction
|
||||
from services.task import TaskService
|
||||
import logging
|
||||
import sys
|
||||
from services.tcgplayer import TCGPlayerService, PricingService
|
||||
from services.product import ProductService
|
||||
from services.file import FileService
|
||||
from services.storage import StorageService
|
||||
from db.models import TCGPlayerGroups
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
# Import your dependency functions
|
||||
from dependencies import (
|
||||
get_task_service,
|
||||
get_tcgplayer_service,
|
||||
get_pricing_service,
|
||||
get_file_service,
|
||||
get_product_service,
|
||||
get_storage_service,
|
||||
)
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.DEBUG,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
handlers=[
|
||||
logging.StreamHandler(sys.stdout),
|
||||
logging.FileHandler('app.log') # Added this line
|
||||
logging.FileHandler('app.log')
|
||||
]
|
||||
)
|
||||
|
||||
@ -45,38 +50,54 @@ app.add_middleware(
|
||||
# Include routers
|
||||
app.include_router(router)
|
||||
|
||||
# Optional: Add startup and shutdown events
|
||||
@app.on_event("startup")
|
||||
async def startup_event():
|
||||
try:
|
||||
# Check database connection
|
||||
if not check_db_connection():
|
||||
logger.error("Database connection failed")
|
||||
raise Exception("Database connection failed")
|
||||
# destroy db
|
||||
#destroy_db()
|
||||
|
||||
# Initialize database
|
||||
init_db()
|
||||
# get db session
|
||||
|
||||
# Get database session
|
||||
db = next(get_db())
|
||||
# populate tcgplayer groups
|
||||
|
||||
# Use dependency injection to get services
|
||||
|
||||
pricing_service = get_pricing_service(db)
|
||||
file_service = get_file_service(db)
|
||||
storage_service = get_storage_service(db)
|
||||
tcgplayer_service = get_tcgplayer_service(db, pricing_service, file_service)
|
||||
product_service = get_product_service(db, file_service, tcgplayer_service, storage_service)
|
||||
task_service = get_task_service(db, product_service, tcgplayer_service)
|
||||
|
||||
# Initialize TCGPlayer groups if needed
|
||||
if db.query(TCGPlayerGroups).count() == 0:
|
||||
with db_transaction(db):
|
||||
tcgplayer_service = TCGPlayerService(db)
|
||||
tcgplayer_service.populate_tcgplayer_groups()
|
||||
|
||||
# DEBUG
|
||||
tcgplayer_service.cron_load_prices()
|
||||
|
||||
# Start task service
|
||||
task_service = TaskService(db, ProductService(db, FileService(db), TCGPlayerService(db), StorageService(db)), TCGPlayerService(db))
|
||||
await task_service.start()
|
||||
|
||||
logger.info("Application started successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Startup failed: {str(e)}")
|
||||
raise
|
||||
|
||||
@app.on_event("shutdown")
|
||||
async def shutdown_event():
|
||||
# Clean up any connections or resources
|
||||
logger.info("Application shutting down")
|
||||
pass
|
||||
|
||||
# Root endpoint
|
||||
@app.get("/")
|
||||
async def root():
|
||||
return {"message": "Card Management API"}
|
||||
|
||||
# Run the application
|
||||
if __name__ == "__main__":
|
||||
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
|
345
routes/routes.py
345
routes/routes.py
@ -1,99 +1,102 @@
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Request, BackgroundTasks
|
||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, BackgroundTasks
|
||||
from fastapi.responses import StreamingResponse
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import Dict, Any, List, Optional
|
||||
from db.database import get_db
|
||||
from services.upload import UploadService
|
||||
from services.box import BoxService
|
||||
from services.tcgplayer import TCGPlayerService
|
||||
from services.data import DataService
|
||||
from services.file import FileService
|
||||
from services.product import ProductService
|
||||
from services.task import TaskService
|
||||
from schemas.file import FileSchema, CreateFileRequest, CreateFileResponse, GetFileResponse, DeleteFileResponse, GetFileQueryParams
|
||||
from schemas.box import CreateBoxResponse, CreateBoxRequest, BoxSchema, UpdateBoxRequest, CreateOpenBoxRequest, CreateOpenBoxResponse, OpenBoxSchema
|
||||
from dependencies import get_data_service, get_upload_service, get_tcgplayer_service, get_box_service, get_create_file_metadata, get_file_service, get_product_service, get_task_service, get_box_data, get_box_update_data, get_open_box_data
|
||||
|
||||
|
||||
from typing import Optional
|
||||
import logging
|
||||
|
||||
from schemas.file import (
|
||||
FileSchema,
|
||||
CreateFileRequest,
|
||||
CreateFileResponse,
|
||||
GetFileResponse,
|
||||
DeleteFileResponse,
|
||||
GetFileQueryParams
|
||||
)
|
||||
from schemas.box import (
|
||||
CreateBoxResponse,
|
||||
CreateBoxRequest,
|
||||
BoxSchema,
|
||||
UpdateBoxRequest,
|
||||
CreateOpenBoxRequest,
|
||||
CreateOpenBoxResponse,
|
||||
OpenBoxSchema
|
||||
)
|
||||
from services.file import FileService
|
||||
from services.box import BoxService
|
||||
from services.task import TaskService
|
||||
from dependencies import (
|
||||
get_file_service,
|
||||
get_box_service,
|
||||
get_task_service,
|
||||
get_create_file_metadata,
|
||||
get_box_data,
|
||||
get_box_update_data,
|
||||
get_open_box_data
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api", tags=["cards"])
|
||||
MAX_FILE_SIZE = 1024 * 1024 * 100 # 100 MB
|
||||
MAX_FILE_SIZE = 100 * 1024 * 1024 # 100 MB
|
||||
|
||||
async def validate_file_upload(file: UploadFile) -> bytes:
|
||||
"""Validate uploaded file and return its contents."""
|
||||
if not file.filename:
|
||||
raise HTTPException(status_code=400, detail="No filename provided")
|
||||
|
||||
content = await file.read()
|
||||
if len(content) > MAX_FILE_SIZE:
|
||||
raise HTTPException(status_code=413, detail="File too large")
|
||||
|
||||
## GIGA FOR REAL
|
||||
## FILE
|
||||
## CREATE
|
||||
@router.post(
|
||||
"/files",
|
||||
response_model=CreateFileResponse,
|
||||
status_code=201
|
||||
)
|
||||
return content
|
||||
|
||||
@router.post("/files", response_model=CreateFileResponse, status_code=201)
|
||||
async def create_file(
|
||||
background_tasks: BackgroundTasks,
|
||||
file: UploadFile = File(...),
|
||||
metadata: CreateFileRequest = Depends(get_create_file_metadata),
|
||||
file_service: FileService = Depends(get_file_service),
|
||||
task_service: TaskService = Depends(get_task_service)
|
||||
):
|
||||
) -> CreateFileResponse:
|
||||
"""Create a new file entry with the uploaded file."""
|
||||
try:
|
||||
# Validate file size before reading
|
||||
if not file.filename:
|
||||
raise HTTPException(status_code=400, detail="No filename provided")
|
||||
|
||||
# File size check
|
||||
content = await file.read()
|
||||
if len(content) > MAX_FILE_SIZE:
|
||||
raise HTTPException(status_code=413, detail="File too large")
|
||||
|
||||
content = await validate_file_upload(file)
|
||||
logger.debug(f"File received: {file.filename}")
|
||||
logger.debug(f"Metadata: {metadata}")
|
||||
|
||||
# ADD FILENAME TO METADATA
|
||||
if not metadata.filename:
|
||||
metadata.filename = file.filename
|
||||
metadata.filename = metadata.filename or file.filename
|
||||
|
||||
# VALIDATE FILE
|
||||
if not file_service.validate_file(content, metadata):
|
||||
raise HTTPException(status_code=400, detail="Invalid file content")
|
||||
|
||||
# STORE FILE
|
||||
created_file = file_service.create_file(content, metadata)
|
||||
|
||||
# Close file after processing
|
||||
await file.close()
|
||||
|
||||
# handle manabox file background task
|
||||
if metadata.source == 'manabox':
|
||||
background_tasks.add_task(task_service.process_manabox_file, created_file)
|
||||
|
||||
return CreateFileResponse(
|
||||
status_code=201,
|
||||
success=True,
|
||||
files=[FileSchema.from_orm(created_file)] # Changed to return list
|
||||
files=[FileSchema.from_orm(created_file)]
|
||||
)
|
||||
|
||||
except HTTPException as http_ex:
|
||||
await file.close()
|
||||
raise http_ex
|
||||
except Exception as e:
|
||||
await file.close()
|
||||
logger.error(f"File upload failed: {str(e)}")
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail="Internal server error occurred during file upload"
|
||||
)
|
||||
finally:
|
||||
await file.close()
|
||||
|
||||
## FILE
|
||||
## GET
|
||||
@router.get("/files/{file_id:path}", response_model=GetFileResponse)
|
||||
@router.get("/files", response_model=GetFileResponse)
|
||||
async def get_file(
|
||||
file_id: Optional[str] = None,
|
||||
query: GetFileQueryParams = Depends(),
|
||||
file_service: FileService = Depends(get_file_service)
|
||||
):
|
||||
) -> GetFileResponse:
|
||||
"""
|
||||
Get file(s) by optional ID and/or status.
|
||||
If file_id is provided, returns that specific file.
|
||||
@ -102,16 +105,11 @@ async def get_file(
|
||||
"""
|
||||
try:
|
||||
if file_id:
|
||||
# Get specific file by ID
|
||||
file = file_service.get_file(file_id)
|
||||
return GetFileResponse(
|
||||
status_code=200,
|
||||
success=True,
|
||||
files=[FileSchema.from_orm(file)]
|
||||
)
|
||||
files = [file]
|
||||
else:
|
||||
# Get multiple files with optional status filter
|
||||
files = file_service.get_files(status=query.status)
|
||||
|
||||
return GetFileResponse(
|
||||
status_code=200,
|
||||
success=True,
|
||||
@ -121,12 +119,12 @@ async def get_file(
|
||||
logger.error(f"Get file(s) failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
## DELETE
|
||||
@router.delete("/files/{file_id}", response_model=DeleteFileResponse)
|
||||
async def delete_file(
|
||||
file_id: str,
|
||||
file_service: FileService = Depends(get_file_service)
|
||||
):
|
||||
) -> DeleteFileResponse:
|
||||
"""Delete a file by ID."""
|
||||
try:
|
||||
file = file_service.delete_file(file_id)
|
||||
return DeleteFileResponse(
|
||||
@ -138,14 +136,12 @@ async def delete_file(
|
||||
logger.error(f"Delete file failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
## BOX
|
||||
## CREATE
|
||||
@router.post("/boxes", response_model=CreateBoxResponse, status_code=201)
|
||||
async def create_box(
|
||||
box_data: CreateBoxRequest = Depends(get_box_data),
|
||||
box_service: BoxService = Depends(get_box_service)
|
||||
):
|
||||
) -> CreateBoxResponse:
|
||||
"""Create a new box."""
|
||||
try:
|
||||
result = box_service.create_box(box_data)
|
||||
return CreateBoxResponse(
|
||||
@ -157,13 +153,13 @@ async def create_box(
|
||||
logger.error(f"Create box failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
## UPDATE
|
||||
@router.put("/boxes/{box_id}", response_model=CreateBoxResponse)
|
||||
async def update_box(
|
||||
box_id: str,
|
||||
box_data: UpdateBoxRequest = Depends(get_box_update_data),
|
||||
box_service: BoxService = Depends(get_box_service)
|
||||
):
|
||||
) -> CreateBoxResponse:
|
||||
"""Update an existing box."""
|
||||
try:
|
||||
result = box_service.update_box(box_id, box_data)
|
||||
return CreateBoxResponse(
|
||||
@ -175,12 +171,12 @@ async def update_box(
|
||||
logger.error(f"Update box failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
## DELETE
|
||||
@router.delete("/boxes/{box_id}", response_model=CreateBoxResponse)
|
||||
async def delete_box(
|
||||
box_id: str,
|
||||
box_service: BoxService = Depends(get_box_service)
|
||||
):
|
||||
) -> CreateBoxResponse:
|
||||
"""Delete a box by ID."""
|
||||
try:
|
||||
result = box_service.delete_box(box_id)
|
||||
return CreateBoxResponse(
|
||||
@ -192,13 +188,13 @@ async def delete_box(
|
||||
logger.error(f"Delete box failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
# BOX OPEN
|
||||
@router.post("/boxes/{box_id}/open", response_model=CreateOpenBoxResponse, status_code=201)
|
||||
async def open_box(
|
||||
box_id: str,
|
||||
box_data: CreateOpenBoxRequest = Depends(get_open_box_data),
|
||||
box_service: BoxService = Depends(get_box_service)
|
||||
):
|
||||
) -> CreateOpenBoxResponse:
|
||||
"""Open a box by ID."""
|
||||
try:
|
||||
result = box_service.open_box(box_id, box_data)
|
||||
return CreateOpenBoxResponse(
|
||||
@ -209,214 +205,3 @@ async def open_box(
|
||||
except Exception as e:
|
||||
logger.error(f"Open box failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# FILE
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
"""
|
||||
@router.post("/file/uploadManabox", response_model=FileUploadResponse)
|
||||
async def upload_file(
|
||||
background_tasks: BackgroundTasks,
|
||||
file: UploadFile = File(...),
|
||||
file_service: FileService = Depends(get_file_service),
|
||||
product_service: ProductService = Depends(get_product_service),
|
||||
metadata: FileMetadata = Depends(get_file_metadata)) -> FileUploadResponse:
|
||||
try:
|
||||
content = await file.read()
|
||||
metadata.service = 'product'
|
||||
result = file_service.upload_file(content, file.filename, metadata)
|
||||
background_tasks.add_task(product_service.bg_process_manabox_file, result.id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"File upload failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/file/getPreparedFiles", response_model=GetPreparedFilesResponse)
|
||||
async def get_prepared_files(file_service: FileService = Depends(get_file_service)) -> GetPreparedFilesResponse:
|
||||
try:
|
||||
response = file_service.get_prepared_files()
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error(f"Get prepared files failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
@router.post("/file/deleteFile", response_model=FileDeleteResponse)
|
||||
async def delete_file(file_id: str, file_service: FileService = Depends(get_file_service)) -> FileDeleteResponse:
|
||||
try:
|
||||
response = file_service.delete_file(file_id)
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error(f"Delete file failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
@router.post("/box/createBox", response_model=CreateBoxResponse)
|
||||
async def create_box(file_ids: list[str], create_box_data: CreateBoxRequestData, box_service: BoxService = Depends(get_box_service)) -> CreateBoxResponse:
|
||||
try:
|
||||
create_box_data = create_box_data.dict()
|
||||
response = box_service.create_box(create_box_data, file_ids)
|
||||
return response
|
||||
except Exception as e:
|
||||
logger.error(f"Create box failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
"""
|
||||
## all old below
|
||||
|
||||
@router.post("/upload/manabox", response_model=dict)
|
||||
async def upload_manabox(
|
||||
background_tasks: BackgroundTasks,
|
||||
upload_service: UploadService = Depends(get_upload_service),
|
||||
data_service: DataService = Depends(get_data_service),
|
||||
file: UploadFile = File(...)
|
||||
) -> dict:
|
||||
"""
|
||||
Upload endpoint for Manabox CSV files
|
||||
"""
|
||||
try:
|
||||
logger.info(f"file received: {file.filename}")
|
||||
# Read the file content
|
||||
content = await file.read()
|
||||
filename = file.filename
|
||||
file_size = len(content)
|
||||
file_size_kb = file_size / 1024
|
||||
if not content:
|
||||
logger.error("Empty file content")
|
||||
raise HTTPException(status_code=400, detail="Empty file content")
|
||||
|
||||
# You might want to validate it's a CSV file
|
||||
if not file.filename.endswith('.csv'):
|
||||
logger.error("File must be a CSV")
|
||||
raise HTTPException(status_code=400, detail="File must be a CSV")
|
||||
|
||||
result = upload_service.process_manabox_upload(content, filename, file_size_kb)
|
||||
background_tasks.add_task(data_service.bg_set_manabox_tcg_relationship, upload_id=result[1])
|
||||
return result[0]
|
||||
except Exception as e:
|
||||
logger.error(f"Manabox upload failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
|
||||
|
||||
@router.post("/createBox", response_model=dict)
|
||||
async def create_box(
|
||||
upload_id: str,
|
||||
box_service: BoxService = Depends(get_box_service)
|
||||
) -> dict:
|
||||
try:
|
||||
result = box_service.convert_upload_to_boxes(upload_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Box creation failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
return result
|
||||
|
||||
@router.post("/deleteBox", response_model=dict)
|
||||
async def delete_box(
|
||||
box_id: str,
|
||||
box_service: BoxService = Depends(get_box_service)
|
||||
) -> dict:
|
||||
try:
|
||||
result = box_service.delete_box(box_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Box deletion failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
return result
|
||||
|
||||
|
||||
|
||||
@router.post("/tcgplayer/add/box/{box_id}", response_model=dict)
|
||||
async def add_box(box_id: str = None, tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service)):
|
||||
try:
|
||||
csv_content = tcgplayer_service.add_to_tcgplayer(box_id)
|
||||
return StreamingResponse(
|
||||
iter([csv_content]),
|
||||
media_type="text/csv",
|
||||
headers={"Content-Disposition": "attachment; filename=add_to_tcgplayer.csv"}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Box add failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
@router.post("/tcgplayer/update/box/{box_id}", response_model=dict)
|
||||
async def update_box(box_id: int = None):
|
||||
"""asdf"""
|
||||
pass
|
||||
|
||||
@router.post("/tcgplayer/updateInventory", response_model=dict)
|
||||
async def update_inventory(
|
||||
background_tasks: BackgroundTasks,
|
||||
tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service),
|
||||
data_service: DataService = Depends(get_data_service)):
|
||||
try:
|
||||
result = tcgplayer_service.update_inventory('live')
|
||||
export_id = result['export_id']
|
||||
background_tasks.add_task(data_service.bg_set_tcg_inventory_product_relationship, export_id)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Inventory update failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
@router.post("/tcgplayer/updatePricing", response_model=dict)
|
||||
async def update_inventory(
|
||||
tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service),
|
||||
group_ids: Dict = None):
|
||||
try:
|
||||
result = tcgplayer_service.update_pricing(group_ids)
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Pricing update failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
@router.post("/tcgplayer/updatePricingAll", response_model=dict)
|
||||
async def update_inventory(tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service)):
|
||||
try:
|
||||
result = tcgplayer_service.update_pricing_all()
|
||||
return result
|
||||
except Exception as e:
|
||||
logger.error(f"Pricing update failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
||||
|
||||
@router.get("/tcgplayer/createLiveInventoryPricingUpdateFile")
|
||||
async def create_inventory_import(
|
||||
tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service)
|
||||
):
|
||||
try:
|
||||
csv_content = tcgplayer_service.get_live_inventory_pricing_update_csv()
|
||||
return StreamingResponse(
|
||||
iter([csv_content]),
|
||||
media_type="text/csv",
|
||||
headers={"Content-Disposition": "attachment; filename=inventory_pricing_update.csv"}
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Inventory import creation failed: {str(e)}")
|
||||
raise HTTPException(status_code=400, detail=str(e))
|
206
services/box.py
206
services/box.py
@ -1,55 +1,71 @@
|
||||
from db.models import Box, File, StagedFileProduct, Product, OpenBoxCard, OpenBox, Inventory, TCGPlayerGroups
|
||||
from db.utils import db_transaction
|
||||
from uuid import uuid4 as uuid
|
||||
from datetime import datetime
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.engine.result import Row
|
||||
from typing import Any, Dict, List, Optional
|
||||
from uuid import uuid4
|
||||
from sqlalchemy import or_
|
||||
from schemas.box import CreateBoxRequest, CreateBoxResponse, UpdateBoxRequest, CreateOpenBoxRequest
|
||||
from sqlalchemy.orm import Session
|
||||
import logging
|
||||
from typing import Any
|
||||
|
||||
from db.models import (
|
||||
Box,
|
||||
File,
|
||||
StagedFileProduct,
|
||||
Product,
|
||||
OpenBoxCard,
|
||||
OpenBox,
|
||||
Inventory,
|
||||
TCGPlayerGroups
|
||||
)
|
||||
from db.utils import db_transaction
|
||||
from schemas.box import CreateBoxRequest, CreateBoxResponse, UpdateBoxRequest, CreateOpenBoxRequest
|
||||
from services.inventory import InventoryService
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
VALID_BOX_TYPES = {"collector", "play", "draft", "set", "commander"}
|
||||
|
||||
class BoxService:
|
||||
def __init__(self, db: Session, inventory_service: InventoryService):
|
||||
self.db = db
|
||||
self.inventory_service = inventory_service
|
||||
|
||||
def validate_file_ids(self, file_ids: list[str]):
|
||||
# check if all file_ids are valid
|
||||
for file_id in file_ids:
|
||||
if self.db.query(File).filter(File.id == file_id).first() is None:
|
||||
raise Exception(f"File ID {file_id} not found")
|
||||
def validate_file_ids(self, file_ids: List[str]) -> None:
|
||||
"""Validate that all provided file IDs exist in the database."""
|
||||
invalid_files = [
|
||||
file_id for file_id in file_ids
|
||||
if not self.db.query(File).filter(File.id == file_id).first()
|
||||
]
|
||||
if invalid_files:
|
||||
raise ValueError(f"File IDs not found: {', '.join(invalid_files)}")
|
||||
|
||||
def get_staged_product_data(self, file_ids: list[str]) -> StagedFileProduct:
|
||||
staged_product_data = self.db.query(StagedFileProduct).filter(StagedFileProduct.file_id.in_(file_ids)).all()
|
||||
return staged_product_data
|
||||
def get_staged_product_data(self, file_ids: List[str]) -> List[StagedFileProduct]:
|
||||
"""Retrieve staged product data for given file IDs."""
|
||||
return self.db.query(StagedFileProduct).filter(
|
||||
StagedFileProduct.file_id.in_(file_ids)
|
||||
).all()
|
||||
|
||||
def aggregate_staged_product_data(self, staged_product_data: list[Row]) -> dict[Product, int]:
|
||||
def aggregate_staged_product_data(self, staged_product_data: List[StagedFileProduct]) -> Dict[Product, int]:
|
||||
"""Aggregate staged product data by product and quantity."""
|
||||
product_data = {}
|
||||
for row in staged_product_data:
|
||||
product = self.db.query(Product).filter(Product.id == row.product_id).first()
|
||||
if product not in product_data:
|
||||
product_data[product] = 0
|
||||
product_data[product] += row.quantity
|
||||
if product:
|
||||
product_data[product] = product_data.get(product, 0) + row.quantity
|
||||
return product_data
|
||||
|
||||
def find_product_for_box_data(self, create_box_data: dict[str, Any]) -> Product:
|
||||
existing_product = self.db.query(Product).filter(
|
||||
Product.name == create_box_data["name"], # TODO: needs complex enum
|
||||
def find_product_for_box_data(self, create_box_data: Dict[str, Any]) -> Optional[Product]:
|
||||
"""Find existing product matching box data."""
|
||||
return self.db.query(Product).filter(
|
||||
Product.name == create_box_data["name"],
|
||||
Product.type == "box",
|
||||
Product.set_code == create_box_data["set_code"], # TODO: needs complex enum
|
||||
Product.set_name == create_box_data["set_name"], # TODO: needs complex enum
|
||||
Product.product_line == create_box_data["product_line"]).first()
|
||||
return existing_product
|
||||
Product.set_code == create_box_data["set_code"],
|
||||
Product.set_name == create_box_data["set_name"],
|
||||
Product.product_line == create_box_data["product_line"]
|
||||
).first()
|
||||
|
||||
def create_product_for_box(self, create_box_data: dict[str, Any]) -> Product:
|
||||
def create_product_for_box(self, create_box_data: Dict[str, Any]) -> Product:
|
||||
"""Create a new product for a box."""
|
||||
product = Product(
|
||||
id=str(uuid()),
|
||||
id=str(uuid4()),
|
||||
name=create_box_data["name"],
|
||||
type="box",
|
||||
set_code=create_box_data["set_code"],
|
||||
@ -59,7 +75,8 @@ class BoxService:
|
||||
self.db.add(product)
|
||||
return product
|
||||
|
||||
def create_box_db(self, product: Product, create_box_data: dict[str, Any]) -> Box:
|
||||
def create_box_db(self, product: Product, create_box_data: Dict[str, Any]) -> Box:
|
||||
"""Create a new box record in the database."""
|
||||
box = Box(
|
||||
product_id=product.id,
|
||||
type=create_box_data["type"],
|
||||
@ -69,9 +86,10 @@ class BoxService:
|
||||
self.db.add(box)
|
||||
return box
|
||||
|
||||
def create_open_box(self, product: Product, create_box_data: dict[str, Any]) -> OpenBox:
|
||||
def create_open_box(self, product: Product, create_box_data: Dict[str, Any]) -> OpenBox:
|
||||
"""Create a new open box record."""
|
||||
open_box = OpenBox(
|
||||
id = str(uuid()),
|
||||
id=str(uuid4()),
|
||||
product_id=product.id,
|
||||
num_cards_actual=create_box_data["num_cards_actual"],
|
||||
date_opened=datetime.strptime(create_box_data["date_opened"], "%Y-%m-%d")
|
||||
@ -79,78 +97,79 @@ class BoxService:
|
||||
self.db.add(open_box)
|
||||
return open_box
|
||||
|
||||
def add_products_to_open_box(self, open_box: OpenBox, product_data: dict[Product, int]) -> None:
|
||||
def add_products_to_open_box(self, open_box: OpenBox, product_data: Dict[Product, int]) -> None:
|
||||
"""Add products to an open box."""
|
||||
for product, quantity in product_data.items():
|
||||
open_box_card = OpenBoxCard(
|
||||
id=str(uuid()),
|
||||
id=str(uuid4()),
|
||||
open_box_id=open_box.id,
|
||||
card_id=product.id,
|
||||
quantity=quantity
|
||||
)
|
||||
self.db.add(open_box_card)
|
||||
|
||||
def format_response(self, open_box: OpenBox = None, inventory: Inventory = None) -> CreateBoxResponse:
|
||||
response = CreateBoxResponse(success=True)
|
||||
return response
|
||||
def format_response(self, open_box: Optional[OpenBox] = None, inventory: Optional[Inventory] = None) -> CreateBoxResponse:
|
||||
"""Format the response for box creation."""
|
||||
return CreateBoxResponse(success=True)
|
||||
|
||||
def _create_box(self, create_box_data: dict[str, Any], file_ids: list[str] = None) -> CreateBoxResponse:
|
||||
def _create_box(self, create_box_data: Dict[str, Any], file_ids: Optional[List[str]] = None) -> CreateBoxResponse:
|
||||
"""Internal method to handle box creation logic."""
|
||||
sealed = create_box_data["sealed"]
|
||||
assert isinstance(sealed, bool)
|
||||
|
||||
if file_ids and sealed:
|
||||
raise ValueError("Cannot add cards with a sealed box")
|
||||
|
||||
if file_ids and not sealed:
|
||||
self.validate_file_ids(file_ids)
|
||||
staged_product_data = self.get_staged_product_data(file_ids)
|
||||
product_data = self.aggregate_staged_product_data(staged_product_data)
|
||||
elif file_ids and sealed:
|
||||
raise Exception("Cannot add cards with a sealed box")
|
||||
|
||||
# find product with all same box data
|
||||
existing_product = self.find_product_for_box_data(create_box_data)
|
||||
|
||||
if existing_product:
|
||||
box_product = existing_product
|
||||
box_product = self.find_product_for_box_data(create_box_data)
|
||||
|
||||
try:
|
||||
with db_transaction(self.db):
|
||||
if not existing_product:
|
||||
if not box_product:
|
||||
box_product = self.create_product_for_box(create_box_data)
|
||||
|
||||
box = self.create_box_db(box_product, create_box_data)
|
||||
|
||||
if not sealed:
|
||||
open_box = self.create_open_box(box_product, create_box_data)
|
||||
if file_ids:
|
||||
process_staged_products = self.inventory_service.process_staged_products(product_data)
|
||||
self.inventory_service.process_staged_products(product_data)
|
||||
self.add_products_to_open_box(open_box, product_data)
|
||||
# should be the file service handling this but im about to die irl
|
||||
# update file id status to processed
|
||||
for file_id in file_ids:
|
||||
file = self.db.query(File).filter(File.id == file_id).first()
|
||||
file.status = "processed"
|
||||
self.db.add(file)
|
||||
|
||||
# Update file statuses to processed
|
||||
self.db.query(File).filter(File.id.in_(file_ids)).update(
|
||||
{"status": "processed"}, synchronize_session=False
|
||||
)
|
||||
|
||||
return self.format_response(open_box=open_box)
|
||||
elif not file_ids and sealed:
|
||||
# add sealed box to inventory
|
||||
elif sealed:
|
||||
inventory = self.inventory_service.add_sealed_box_to_inventory(box_product, 1)
|
||||
return self.format_response(inventory=inventory)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating box: {str(e)}")
|
||||
raise e
|
||||
raise
|
||||
|
||||
def validate_box_type(self, box_type: str) -> bool:
|
||||
return box_type in ["collector", "play", "draft", "set", "commander"]
|
||||
"""Validate if the box type is supported."""
|
||||
return box_type in VALID_BOX_TYPES
|
||||
|
||||
def validate_set_code(self, set_code: str) -> bool:
|
||||
exists = self.db.query(TCGPlayerGroups).filter(
|
||||
"""Validate if the set code exists in TCGPlayer groups."""
|
||||
return self.db.query(TCGPlayerGroups).filter(
|
||||
TCGPlayerGroups.abbreviation == set_code
|
||||
).first() is not None
|
||||
return exists
|
||||
|
||||
def create_box(self, create_box_data: CreateBoxRequest) -> Box:
|
||||
# validate box data
|
||||
"""Create a new box."""
|
||||
if not self.validate_box_type(create_box_data.type):
|
||||
raise Exception("Invalid box type")
|
||||
raise ValueError("Invalid box type")
|
||||
if not self.validate_set_code(create_box_data.set_code):
|
||||
raise Exception("Invalid set code")
|
||||
# check if box exists by type and set code or sku
|
||||
raise ValueError("Invalid set code")
|
||||
|
||||
existing_box = self.db.query(Box).filter(
|
||||
or_(
|
||||
Box.type == create_box_data.type,
|
||||
@ -158,12 +177,13 @@ class BoxService:
|
||||
),
|
||||
Box.set_code == create_box_data.set_code
|
||||
).first()
|
||||
|
||||
if existing_box:
|
||||
raise Exception("Box already exists")
|
||||
# create box
|
||||
raise ValueError("Box already exists")
|
||||
|
||||
with db_transaction(self.db):
|
||||
box = Box(
|
||||
product_id=str(uuid()),
|
||||
product_id=str(uuid4()),
|
||||
type=create_box_data.type,
|
||||
set_code=create_box_data.set_code,
|
||||
sku=create_box_data.sku,
|
||||
@ -174,47 +194,63 @@ class BoxService:
|
||||
return box
|
||||
|
||||
def update_box(self, box_id: str, update_box_data: UpdateBoxRequest) -> Box:
|
||||
"""Update an existing box."""
|
||||
box = self.db.query(Box).filter(Box.product_id == box_id).first()
|
||||
if not box:
|
||||
raise Exception("Box not found")
|
||||
raise ValueError("Box not found")
|
||||
|
||||
update_data = update_box_data.dict(exclude_unset=True)
|
||||
|
||||
# Validate box type if it's being updated
|
||||
if "type" in update_data and update_data["type"] is not None:
|
||||
if not self.validate_box_type(update_data["type"]):
|
||||
raise ValueError(f"Invalid box type: {update_data['type']}")
|
||||
|
||||
# Validate set code if it's being updated
|
||||
if "set_code" in update_data and update_data["set_code"] is not None:
|
||||
if not self.validate_set_code(update_data["set_code"]):
|
||||
raise ValueError(f"Invalid set code: {update_data['set_code']}")
|
||||
|
||||
with db_transaction(self.db):
|
||||
if update_box_data.type:
|
||||
box.type = update_box_data.type
|
||||
if update_box_data.set_code:
|
||||
box.set_code = update_box_data.set_code
|
||||
if update_box_data.sku:
|
||||
box.sku = update_box_data.sku
|
||||
if update_box_data.num_cards_expected:
|
||||
box.num_cards_expected = update_box_data.num_cards_expected
|
||||
for field, value in update_data.items():
|
||||
if value is not None: # Only update non-None values
|
||||
setattr(box, field, value)
|
||||
|
||||
return box
|
||||
|
||||
def delete_box(self, box_id: str) -> Box:
|
||||
"""Delete a box."""
|
||||
box = self.db.query(Box).filter(Box.product_id == box_id).first()
|
||||
if not box:
|
||||
raise Exception("Box not found")
|
||||
raise ValueError("Box not found")
|
||||
|
||||
with db_transaction(self.db):
|
||||
self.db.delete(box)
|
||||
return box
|
||||
|
||||
def open_box(self, box_id: str, box_data: CreateOpenBoxRequest):
|
||||
def open_box(self, box_id: str, box_data: CreateOpenBoxRequest) -> OpenBox:
|
||||
"""Open a box and process its contents."""
|
||||
box = self.db.query(Box).filter(Box.product_id == box_id).first()
|
||||
if not box:
|
||||
raise Exception("Box not found")
|
||||
raise ValueError("Box not found")
|
||||
|
||||
with db_transaction(self.db):
|
||||
open_box = OpenBox(
|
||||
id=str(uuid()),
|
||||
id=str(uuid4()),
|
||||
product_id=box_id,
|
||||
num_cards_actual=box_data.num_cards_actual,
|
||||
date_opened=datetime.strptime(box_data.date_opened, "%Y-%m-%d") if box_data.date_opened else datetime.now()
|
||||
)
|
||||
self.db.add(open_box)
|
||||
|
||||
staged_product_data = self.get_staged_product_data(box_data.file_ids)
|
||||
product_data = self.aggregate_staged_product_data(staged_product_data)
|
||||
self.inventory_service.process_staged_products(product_data)
|
||||
self.add_products_to_open_box(open_box, product_data)
|
||||
# update box_id for files
|
||||
for file_id in box_data.file_ids:
|
||||
file = self.db.query(File).filter(File.id == file_id).first()
|
||||
file.box_id = open_box.id
|
||||
self.db.add(file)
|
||||
|
||||
# Update file box IDs
|
||||
self.db.query(File).filter(File.id.in_(box_data.file_ids)).update(
|
||||
{"box_id": open_box.id}, synchronize_session=False
|
||||
)
|
||||
|
||||
return open_box
|
149
services/data.py
149
services/data.py
@ -1,149 +0,0 @@
|
||||
from sqlalchemy.orm import Session
|
||||
import logging
|
||||
from fastapi import BackgroundTasks
|
||||
from db.models import TCGPlayerGroups, SetCodeGroupIdMapping, ManaboxExportData, TCGPlayerProduct, ManaboxTCGPlayerMapping, UnmatchedManaboxData, TCGPlayerInventory
|
||||
from db.utils import db_transaction
|
||||
import uuid
|
||||
from services.tcgplayer import TCGPlayerService
|
||||
from sqlalchemy.sql import exists
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class DataService:
|
||||
def __init__(self, db: Session, tcgplayer_service: TCGPlayerService):
|
||||
self.db = db
|
||||
self.tcgplayer_service = tcgplayer_service
|
||||
|
||||
def _normalize_rarity(self, rarity: str) -> str:
|
||||
if rarity.lower() == "rare":
|
||||
return "R"
|
||||
elif rarity.lower() == "mythic":
|
||||
return "M"
|
||||
elif rarity.lower() == "uncommon":
|
||||
return "U"
|
||||
elif rarity.lower() == "common":
|
||||
return "C"
|
||||
elif rarity.lower() in ["R", "M", "U", "C"]:
|
||||
return rarity.upper()
|
||||
else:
|
||||
raise ValueError(f"Invalid rarity: {rarity}")
|
||||
|
||||
def _normalize_condition(self, condition: str, foil: str) -> str:
|
||||
if condition.lower() == "near_mint":
|
||||
condition1 = "Near Mint"
|
||||
else:
|
||||
raise ValueError(f"Invalid condition: {condition}")
|
||||
if foil.lower() == "foil":
|
||||
condition2 = " Foil"
|
||||
elif foil.lower() == "normal":
|
||||
condition2 = ""
|
||||
else:
|
||||
raise ValueError(f"Invalid foil: {foil}")
|
||||
return condition1 + condition2
|
||||
|
||||
def _normalize_number(self, number: str) -> str:
|
||||
return str(number.split(".")[0])
|
||||
|
||||
def _convert_set_code_to_group_id(self, set_code: str) -> str:
|
||||
group = self.db.query(TCGPlayerGroups).filter(TCGPlayerGroups.abbreviation == set_code).first()
|
||||
return group.group_id
|
||||
|
||||
def _add_set_group_mapping(self, set_code: str, group_id: str) -> None:
|
||||
with db_transaction(self.db):
|
||||
self.db.add(SetCodeGroupIdMapping(id=str(uuid.uuid4()), set_code=set_code, group_id=group_id))
|
||||
|
||||
def _get_set_codes(self, **filters) -> list:
|
||||
query = self.db.query(ManaboxExportData.set_code).distinct()
|
||||
for field, value in filters.items():
|
||||
if value is not None:
|
||||
query = query.filter(getattr(ManaboxExportData, field) == value)
|
||||
return [code[0] for code in query.all()]
|
||||
|
||||
async def bg_set_manabox_tcg_relationship(self, box_id: str = None, upload_id: str = None) -> None:
|
||||
if not bool(box_id) ^ bool(upload_id):
|
||||
raise ValueError("Must provide exactly one of box_id or upload_id")
|
||||
|
||||
filters = {"box_id": box_id} if box_id else {"upload_id": upload_id}
|
||||
set_codes = self._get_set_codes(**filters)
|
||||
|
||||
for set_code in set_codes:
|
||||
try:
|
||||
group_id = self._convert_set_code_to_group_id(set_code)
|
||||
except AttributeError:
|
||||
logger.warning(f"No group found for set code {set_code}")
|
||||
continue
|
||||
self._add_set_group_mapping(set_code, group_id)
|
||||
# update pricing for groups
|
||||
if self.db.query(TCGPlayerProduct).filter(TCGPlayerProduct.group_id == group_id).count() == 0:
|
||||
self.tcgplayer_service.update_pricing(set_name_ids={"set_name_ids":[group_id]})
|
||||
|
||||
# match manabox data to tcgplayer pricing data
|
||||
# match on manabox - set_code (through group_id), collector_number, foil, rarity, condition
|
||||
# match on tcgplayer - group_id, number, rarity, condition (condition + foil)
|
||||
# use normalizing functions
|
||||
matched_records = self.db.query(ManaboxExportData).filter(ManaboxExportData.set_code.in_(set_codes)).all()
|
||||
for record in matched_records:
|
||||
rarity = self._normalize_rarity(record.rarity)
|
||||
condition = self._normalize_condition(record.condition, record.foil)
|
||||
number = self._normalize_number(record.collector_number)
|
||||
group_id = self._convert_set_code_to_group_id(record.set_code)
|
||||
tcg_record = self.db.query(TCGPlayerProduct).filter(
|
||||
TCGPlayerProduct.group_id == group_id,
|
||||
TCGPlayerProduct.number == number,
|
||||
TCGPlayerProduct.rarity == rarity,
|
||||
TCGPlayerProduct.condition == condition
|
||||
).all()
|
||||
if len(tcg_record) == 0:
|
||||
logger.warning(f"No match found for {record.name}")
|
||||
if self.db.query(UnmatchedManaboxData).filter(UnmatchedManaboxData.manabox_id == record.id).count() == 0:
|
||||
with db_transaction(self.db):
|
||||
self.db.add(UnmatchedManaboxData(id=str(uuid.uuid4()), manabox_id=record.id, reason="No match found"))
|
||||
elif len(tcg_record) > 1:
|
||||
logger.warning(f"Multiple matches found for {record.name}")
|
||||
if self.db.query(UnmatchedManaboxData).filter(UnmatchedManaboxData.manabox_id == record.id).count() == 0:
|
||||
with db_transaction(self.db):
|
||||
self.db.add(UnmatchedManaboxData(id=str(uuid.uuid4()), manabox_id=record.id, reason="Multiple matches found"))
|
||||
else:
|
||||
with db_transaction(self.db):
|
||||
self.db.add(ManaboxTCGPlayerMapping(id=str(uuid.uuid4()), manabox_id=record.id, tcgplayer_id=tcg_record[0].id))
|
||||
|
||||
async def bg_set_tcg_inventory_product_relationship(self, export_id: str) -> None:
|
||||
inventory_without_product = (
|
||||
self.db.query(TCGPlayerInventory.tcgplayer_id, TCGPlayerInventory.set_name)
|
||||
.filter(TCGPlayerInventory.total_quantity > 0)
|
||||
.filter(TCGPlayerInventory.product_line == "Magic")
|
||||
.filter(TCGPlayerInventory.export_id == export_id)
|
||||
.filter(TCGPlayerInventory.tcgplayer_product_id.is_(None))
|
||||
.filter(~exists().where(
|
||||
TCGPlayerProduct.id == TCGPlayerInventory.tcgplayer_product_id
|
||||
))
|
||||
.all()
|
||||
)
|
||||
|
||||
set_names = list(set(inv.set_name for inv in inventory_without_product
|
||||
if inv.set_name is not None and isinstance(inv.set_name, str)))
|
||||
|
||||
group_ids = self.db.query(TCGPlayerGroups.group_id).filter(
|
||||
TCGPlayerGroups.name.in_(set_names)
|
||||
).all()
|
||||
|
||||
group_ids = [str(group_id[0]) for group_id in group_ids]
|
||||
|
||||
self.tcgplayer_service.update_pricing(set_name_ids={"set_name_ids": group_ids})
|
||||
|
||||
for inventory in inventory_without_product:
|
||||
product = self.db.query(TCGPlayerProduct).filter(
|
||||
TCGPlayerProduct.tcgplayer_id == inventory.tcgplayer_id
|
||||
).first()
|
||||
|
||||
if product:
|
||||
with db_transaction(self.db):
|
||||
inventory_record = self.db.query(TCGPlayerInventory).filter(
|
||||
TCGPlayerInventory.tcgplayer_id == inventory.tcgplayer_id,
|
||||
TCGPlayerInventory.export_id == export_id
|
||||
).first()
|
||||
|
||||
if inventory_record:
|
||||
inventory_record.tcgplayer_product_id = product.id
|
||||
self.db.add(inventory_record)
|
139
services/file.py
139
services/file.py
@ -1,90 +1,104 @@
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import Optional, List, Dict, Any
|
||||
from uuid import uuid4
|
||||
import csv
|
||||
import logging
|
||||
import os
|
||||
from io import StringIO
|
||||
|
||||
from db.utils import db_transaction
|
||||
from db.models import File, StagedFileProduct
|
||||
from schemas.file import CreateFileRequest
|
||||
import os
|
||||
from uuid import uuid4 as uuid
|
||||
import logging
|
||||
import csv
|
||||
from io import StringIO
|
||||
from typing import Optional, List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Name,Set code,Set name,Collector number,Foil,Rarity,Quantity,ManaBox ID,Scryfall ID,Purchase price,Misprint,Altered,Condition,Language,Purchase price currency
|
||||
MANABOX_REQUIRED_FILE_HEADERS = ['Name', 'Set code', 'Set name', 'Collector number', 'Foil', 'Rarity', 'Quantity', 'ManaBox ID', 'Scryfall ID', 'Purchase price', 'Misprint', 'Altered', 'Condition', 'Language', 'Purchase price currency']
|
||||
MANABOX_ALLOWED_FILE_EXTENSIONS = ['.csv']
|
||||
MANABOX_ALLOWED_FILE_TYPES = ['scan_export_common', 'scan_export_rare']
|
||||
MANABOX_CONFIG = {
|
||||
"required_headers": MANABOX_REQUIRED_FILE_HEADERS,
|
||||
"allowed_extensions": MANABOX_ALLOWED_FILE_EXTENSIONS,
|
||||
"allowed_types": MANABOX_ALLOWED_FILE_TYPES
|
||||
}
|
||||
class FileConfig:
|
||||
"""Configuration constants for file processing"""
|
||||
TEMP_DIR = os.path.join(os.getcwd(), 'temp')
|
||||
|
||||
MANABOX_HEADERS = [
|
||||
'Name', 'Set code', 'Set name', 'Collector number', 'Foil',
|
||||
'Rarity', 'Quantity', 'ManaBox ID', 'Scryfall ID', 'Purchase price',
|
||||
'Misprint', 'Altered', 'Condition', 'Language', 'Purchase price currency'
|
||||
]
|
||||
|
||||
SOURCES = {
|
||||
"manabox": MANABOX_CONFIG
|
||||
"manabox": {
|
||||
"required_headers": MANABOX_HEADERS,
|
||||
"allowed_extensions": ['.csv'],
|
||||
"allowed_types": ['scan_export_common', 'scan_export_rare']
|
||||
}
|
||||
TEMP_DIR = os.getcwd() + '/temp/'
|
||||
}
|
||||
|
||||
class FileValidationError(Exception):
|
||||
"""Custom exception for file validation errors"""
|
||||
pass
|
||||
|
||||
class FileService:
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
# CONFIG
|
||||
def get_config(self, source: str) -> dict:
|
||||
return SOURCES.get(source)
|
||||
def get_config(self, source: str) -> Dict[str, Any]:
|
||||
"""Get configuration for a specific source"""
|
||||
config = FileConfig.SOURCES.get(source)
|
||||
if not config:
|
||||
raise FileValidationError(f"Unsupported source: {source}")
|
||||
return config
|
||||
|
||||
# VALIDATION
|
||||
def validate_file_extension(self, filename: str, config: dict) -> bool:
|
||||
return filename.endswith(tuple(config.get("allowed_extensions")))
|
||||
def validate_file_extension(self, filename: str, config: Dict[str, Any]) -> bool:
|
||||
"""Validate file extension against allowed extensions"""
|
||||
return any(filename.endswith(ext) for ext in config["allowed_extensions"])
|
||||
|
||||
def validate_file_type(self, metadata: CreateFileRequest, config: dict) -> bool:
|
||||
return metadata.type in config.get("allowed_types")
|
||||
def validate_file_type(self, metadata: CreateFileRequest, config: Dict[str, Any]) -> bool:
|
||||
"""Validate file type against allowed types"""
|
||||
return metadata.type in config["allowed_types"]
|
||||
|
||||
def validate_csv(self, content: bytes, required_headers: Optional[List[str]] = None) -> bool:
|
||||
"""Validate CSV content and headers"""
|
||||
try:
|
||||
# Try to decode and parse as CSV
|
||||
csv_text = content.decode('utf-8')
|
||||
csv_file = StringIO(csv_text)
|
||||
csv_reader = csv.reader(csv_file)
|
||||
|
||||
# Check headers if specified
|
||||
if required_headers:
|
||||
headers = next(csv_reader, None)
|
||||
if required_headers and not all(header in headers for header in required_headers):
|
||||
if not headers or not all(header in headers for header in required_headers):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
except (UnicodeDecodeError, csv.Error):
|
||||
except (UnicodeDecodeError, csv.Error) as e:
|
||||
logger.error(f"CSV validation error: {str(e)}")
|
||||
return False
|
||||
|
||||
def validate_file_content(self, content: bytes, metadata: CreateFileRequest, config: dict) -> bool:
|
||||
extension = metadata.filename.split('.')[-1]
|
||||
if extension == 'csv':
|
||||
def validate_file_content(self, content: bytes, metadata: CreateFileRequest, config: Dict[str, Any]) -> bool:
|
||||
"""Validate file content based on file type"""
|
||||
extension = os.path.splitext(metadata.filename)[1].lower()
|
||||
if extension == '.csv':
|
||||
return self.validate_csv(content, config.get("required_headers"))
|
||||
return False
|
||||
|
||||
def validate_file(self, content: bytes, metadata: CreateFileRequest) -> bool:
|
||||
# 1. Get config
|
||||
"""Validate file against all criteria"""
|
||||
config = self.get_config(metadata.source)
|
||||
# 2. Validate file extension
|
||||
|
||||
if not self.validate_file_extension(metadata.filename, config):
|
||||
raise Exception("Invalid file extension")
|
||||
# 2. validate file type
|
||||
raise FileValidationError("Invalid file extension")
|
||||
|
||||
if not self.validate_file_type(metadata, config):
|
||||
raise Exception("Invalid file type")
|
||||
# 3. Validate file content
|
||||
raise FileValidationError("Invalid file type")
|
||||
|
||||
if not self.validate_file_content(content, metadata, config):
|
||||
raise Exception("Invalid file content")
|
||||
raise FileValidationError("Invalid file content or headers")
|
||||
|
||||
return True
|
||||
|
||||
# CRUD
|
||||
# CREATE
|
||||
def create_file(self, content: bytes, metadata: CreateFileRequest) -> File:
|
||||
"""Create a new file record and save the file"""
|
||||
with db_transaction(self.db):
|
||||
file = File(
|
||||
id = str(uuid()),
|
||||
id=str(uuid4()),
|
||||
filename=metadata.filename,
|
||||
filepath = TEMP_DIR + metadata.filename, # TODO config variable
|
||||
filepath=os.path.join(FileConfig.TEMP_DIR, metadata.filename),
|
||||
type=metadata.type,
|
||||
source=metadata.source,
|
||||
filesize_kb=round(len(content) / 1024, 2),
|
||||
@ -92,31 +106,36 @@ class FileService:
|
||||
service=metadata.service
|
||||
)
|
||||
self.db.add(file)
|
||||
|
||||
os.makedirs(FileConfig.TEMP_DIR, exist_ok=True)
|
||||
with open(file.filepath, 'wb') as f:
|
||||
f.write(content)
|
||||
|
||||
return file
|
||||
|
||||
# GET
|
||||
def get_file(self, file_id: str) -> List[File]:
|
||||
def get_file(self, file_id: str) -> File:
|
||||
"""Get a file by ID"""
|
||||
file = self.db.query(File).filter(File.id == file_id).first()
|
||||
if not file:
|
||||
raise Exception(f"File with id {file_id} not found")
|
||||
raise FileValidationError(f"File with id {file_id} not found")
|
||||
return file
|
||||
|
||||
def get_files(self, status: Optional[str] = None) -> List[File]:
|
||||
"""Get all files, optionally filtered by status"""
|
||||
query = self.db.query(File)
|
||||
if status:
|
||||
return self.db.query(File).filter(File.status == status).all()
|
||||
return self.db.query(File).all()
|
||||
query = query.filter(File.status == status)
|
||||
return query.all()
|
||||
|
||||
# DELETE
|
||||
def get_staged_products(self, file_id: str) -> List[StagedFileProduct]:
|
||||
return self.db.query(StagedFileProduct).filter(StagedFileProduct.file_id == file_id).all()
|
||||
"""Get staged products for a file"""
|
||||
return self.db.query(StagedFileProduct).filter(
|
||||
StagedFileProduct.file_id == file_id
|
||||
).all()
|
||||
|
||||
def delete_file(self, file_id: str) -> List[File]:
|
||||
def delete_file(self, file_id: str) -> File:
|
||||
"""Mark a file as deleted and remove associated staged products"""
|
||||
file = self.get_file(file_id)
|
||||
if not file:
|
||||
raise Exception(f"File with id {file_id} not found")
|
||||
|
||||
staged_products = self.get_staged_products(file_id)
|
||||
|
||||
with db_transaction(self.db):
|
||||
@ -125,3 +144,13 @@ class FileService:
|
||||
self.db.delete(staged_product)
|
||||
|
||||
return file
|
||||
|
||||
def get_file_content(self, file_id: str) -> bytes:
|
||||
"""Get the content of a file"""
|
||||
file = self.get_file(file_id)
|
||||
try:
|
||||
with open(file.filepath, 'rb') as f:
|
||||
return f.read()
|
||||
except IOError as e:
|
||||
logger.error(f"Error reading file {file_id}: {str(e)}")
|
||||
raise FileValidationError(f"Could not read file content for {file_id}")
|
@ -1,28 +1,90 @@
|
||||
from typing import Dict
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from db.models import Product, Inventory
|
||||
from schemas.inventory import UpdateInventoryResponse
|
||||
from db.utils import db_transaction
|
||||
|
||||
|
||||
class InventoryService:
|
||||
def __init__(self, db: Session):
|
||||
"""Service class for managing product inventory operations."""
|
||||
|
||||
def __init__(self, db: Session) -> None:
|
||||
"""
|
||||
Initialize the InventoryService.
|
||||
|
||||
Args:
|
||||
db: SQLAlchemy database session
|
||||
"""
|
||||
self.db = db
|
||||
|
||||
def add_inventory(self, product: Product, quantity: int) -> Inventory:
|
||||
inventory = self.db.query(Inventory).filter(Inventory.product_id == product.id).first()
|
||||
"""
|
||||
Add or update inventory for a product.
|
||||
|
||||
Args:
|
||||
product: Product model instance
|
||||
quantity: Quantity to add to inventory
|
||||
|
||||
Returns:
|
||||
Updated Inventory model instance
|
||||
"""
|
||||
inventory = self.db.query(Inventory).filter(
|
||||
Inventory.product_id == product.id
|
||||
).first()
|
||||
|
||||
if inventory is None:
|
||||
inventory = Inventory(product_id=product.id, quantity=quantity)
|
||||
inventory = Inventory(
|
||||
product_id=product.id,
|
||||
quantity=quantity
|
||||
)
|
||||
self.db.add(inventory)
|
||||
else:
|
||||
inventory.quantity += quantity
|
||||
|
||||
return inventory
|
||||
|
||||
def process_staged_products(self, product_data: dict[Product, int]) -> UpdateInventoryResponse:
|
||||
def process_staged_products(
|
||||
self,
|
||||
product_data: Dict[Product, int]
|
||||
) -> UpdateInventoryResponse:
|
||||
"""
|
||||
Process multiple products and update their inventory.
|
||||
|
||||
Args:
|
||||
product_data: Dictionary mapping Products to their quantities
|
||||
|
||||
Returns:
|
||||
Response indicating success status
|
||||
"""
|
||||
try:
|
||||
with db_transaction(self.db):
|
||||
for product, quantity in product_data.items():
|
||||
self.add_inventory(product, quantity)
|
||||
return UpdateInventoryResponse(success=True)
|
||||
except SQLAlchemyError:
|
||||
return UpdateInventoryResponse(success=False)
|
||||
|
||||
def add_sealed_box_to_inventory(self, product: Product, quantity: int) -> UpdateInventoryResponse:
|
||||
def add_sealed_box_to_inventory(
|
||||
self,
|
||||
product: Product,
|
||||
quantity: int
|
||||
) -> UpdateInventoryResponse:
|
||||
"""
|
||||
Add sealed box inventory for a single product.
|
||||
|
||||
Args:
|
||||
product: Product model instance
|
||||
quantity: Quantity to add to inventory
|
||||
|
||||
Returns:
|
||||
Response indicating success status
|
||||
"""
|
||||
try:
|
||||
with db_transaction(self.db):
|
||||
inventory = self.add_inventory(product, quantity)
|
||||
self.add_inventory(product, quantity)
|
||||
return UpdateInventoryResponse(success=True)
|
||||
except SQLAlchemyError:
|
||||
return UpdateInventoryResponse(success=False)
|
@ -1,100 +0,0 @@
|
||||
from db.models import ManaboxExportData, Box, UploadHistory
|
||||
from db.utils import db_transaction
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.engine.result import Row
|
||||
|
||||
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class BoxObject:
|
||||
def __init__(
|
||||
self, upload_id: str, set_name: str,
|
||||
set_code: str, cost: float = None, date_purchased: datetime = None,
|
||||
date_opened: datetime = None, box_id: str = None):
|
||||
self.upload_id = upload_id
|
||||
self.box_id = box_id if box_id else str(uuid.uuid4())
|
||||
self.set_name = set_name
|
||||
self.set_code = set_code
|
||||
self.cost = cost
|
||||
self.date_purchased = date_purchased
|
||||
self.date_opened = date_opened
|
||||
|
||||
class BoxService:
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
def _validate_upload_id(self, upload_id: str):
|
||||
# check if upload_history status = 'success'
|
||||
if self.db.query(UploadHistory).filter(UploadHistory.upload_id == upload_id).first() is None:
|
||||
raise Exception(f"Upload ID {upload_id} not found")
|
||||
if self.db.query(UploadHistory).filter(UploadHistory.upload_id == upload_id).first().status != 'success':
|
||||
raise Exception(f"Upload ID {upload_id} not successful")
|
||||
# check if at least 1 row in manabox_export_data with upload_id
|
||||
if self.db.query(ManaboxExportData).filter(ManaboxExportData.upload_id == upload_id).first() is None:
|
||||
raise Exception(f"Upload ID {upload_id} has no data")
|
||||
|
||||
def _get_set_info(self, upload_id: str) -> list[Row[tuple[str, str]]]:
|
||||
# get distinct set_name, set_code from manabox_export_data for upload_id
|
||||
boxes = self.db.query(ManaboxExportData.set_name, ManaboxExportData.set_code).filter(ManaboxExportData.upload_id == upload_id).distinct().all()
|
||||
if not boxes or len(boxes) == 0:
|
||||
raise Exception(f"Upload ID {upload_id} has no data")
|
||||
return boxes
|
||||
|
||||
def _update_manabox_export_data_box_id(self, box: Box):
|
||||
# based on upload_id, set_name, set_code, update box_id in manabox_export_data for all rows where box id is null
|
||||
with db_transaction(self.db):
|
||||
self.db.query(ManaboxExportData).filter(
|
||||
ManaboxExportData.upload_id == box.upload_id).filter(
|
||||
ManaboxExportData.set_name == box.set_name).filter(
|
||||
ManaboxExportData.set_code == box.set_code).filter(
|
||||
ManaboxExportData.box_id == None).update({ManaboxExportData.box_id: box.id})
|
||||
|
||||
def convert_upload_to_boxes(self, upload_id: str):
|
||||
self._validate_upload_id(upload_id)
|
||||
# get distinct set_name, set_code from manabox_export_data for upload_id
|
||||
box_set_info = self._get_set_info(upload_id)
|
||||
created_boxes = []
|
||||
# create boxes
|
||||
for box in box_set_info:
|
||||
box_obj = BoxObject(upload_id, set_name = box.set_name, set_code = box.set_code)
|
||||
new_box = self.create_box(box_obj)
|
||||
logger.info(f"Created box {new_box.id} for upload {upload_id}")
|
||||
self._update_manabox_export_data_box_id(new_box)
|
||||
created_boxes.append(new_box)
|
||||
|
||||
return {"status": "success", "boxes": f"{[box.id for box in created_boxes]}"}
|
||||
|
||||
|
||||
def create_box(self, box: BoxObject):
|
||||
with db_transaction(self.db):
|
||||
box_record = Box(
|
||||
id = box.box_id,
|
||||
upload_id = box.upload_id,
|
||||
set_name = box.set_name,
|
||||
set_code = box.set_code,
|
||||
cost = box.cost,
|
||||
date_purchased = box.date_purchased,
|
||||
date_opened = box.date_opened
|
||||
)
|
||||
self.db.add(box_record)
|
||||
return box_record
|
||||
|
||||
def get_box(self):
|
||||
pass
|
||||
|
||||
def delete_box(self, box_id: str):
|
||||
# delete box
|
||||
with db_transaction(self.db):
|
||||
self.db.query(Box).filter(Box.id == box_id).delete()
|
||||
# update manabox_export_data box_id to null
|
||||
with db_transaction(self.db):
|
||||
self.db.query(ManaboxExportData).filter(ManaboxExportData.box_id == box_id).update({ManaboxExportData.box_id: None})
|
||||
return {"status": "success", "box_id": box_id}
|
||||
|
||||
def update_box(self):
|
||||
pass
|
||||
|
@ -4,5 +4,3 @@ from sqlalchemy.orm import Session
|
||||
class PricingService:
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
|
@ -1,205 +0,0 @@
|
||||
import logging
|
||||
from typing import Callable
|
||||
from db.models import TCGPlayerInventory, TCGPlayerExportHistory, TCGPlayerPricingHistory, ManaboxExportData, ManaboxTCGPlayerMapping, TCGPlayerProduct
|
||||
from sqlalchemy.orm import Session
|
||||
import pandas as pd
|
||||
from db.utils import db_transaction
|
||||
from sqlalchemy import func, and_, exists
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class PricingService:
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
def get_box_with_most_recent_prices(self, box_id: str) -> pd.DataFrame:
|
||||
latest_prices = (
|
||||
self.db.query(
|
||||
TCGPlayerPricingHistory.tcgplayer_product_id,
|
||||
func.max(TCGPlayerPricingHistory.date_created).label('max_date')
|
||||
)
|
||||
.group_by(TCGPlayerPricingHistory.tcgplayer_product_id)
|
||||
.subquery('latest') # Added name to subquery
|
||||
)
|
||||
|
||||
result = (
|
||||
self.db.query(ManaboxExportData, TCGPlayerPricingHistory, TCGPlayerProduct)
|
||||
.join(ManaboxTCGPlayerMapping, ManaboxExportData.id == ManaboxTCGPlayerMapping.manabox_id)
|
||||
.join(TCGPlayerProduct, ManaboxTCGPlayerMapping.tcgplayer_id == TCGPlayerProduct.id)
|
||||
.join(TCGPlayerPricingHistory, TCGPlayerProduct.id == TCGPlayerPricingHistory.tcgplayer_product_id)
|
||||
.join(
|
||||
latest_prices,
|
||||
and_(
|
||||
TCGPlayerPricingHistory.tcgplayer_product_id == latest_prices.c.tcgplayer_product_id,
|
||||
TCGPlayerPricingHistory.date_created == latest_prices.c.max_date
|
||||
)
|
||||
)
|
||||
.filter(ManaboxExportData.box_id == box_id) # Removed str() conversion
|
||||
.all()
|
||||
)
|
||||
|
||||
logger.debug(f"Found {len(result)} rows")
|
||||
|
||||
df = pd.DataFrame([{
|
||||
**{f"manabox_{k}": v for k, v in row[0].__dict__.items() if not k.startswith('_')},
|
||||
**{f"pricing_{k}": v for k, v in row[1].__dict__.items() if not k.startswith('_')},
|
||||
**{f"tcgproduct_{k}": v for k, v in row[2].__dict__.items() if not k.startswith('_')}
|
||||
} for row in result])
|
||||
|
||||
return df
|
||||
|
||||
def get_live_inventory_with_most_recent_prices(self) -> pd.DataFrame:
|
||||
# Get latest export IDs using subqueries
|
||||
latest_inventory_export = (
|
||||
self.db.query(TCGPlayerExportHistory.inventory_export_id)
|
||||
.filter(TCGPlayerExportHistory.type == "live_inventory")
|
||||
.order_by(TCGPlayerExportHistory.date_created.desc())
|
||||
.limit(1)
|
||||
.scalar_subquery()
|
||||
)
|
||||
# this is bad because latest pricing export is not guaranteed to be related to the latest inventory export
|
||||
latest_pricing_export = (
|
||||
self.db.query(TCGPlayerExportHistory.pricing_export_id)
|
||||
.filter(TCGPlayerExportHistory.type == "pricing")
|
||||
.order_by(TCGPlayerExportHistory.date_created.desc())
|
||||
.limit(1)
|
||||
.scalar_subquery()
|
||||
)
|
||||
|
||||
# Join inventory and pricing data in a single query
|
||||
inventory_with_pricing = (
|
||||
self.db.query(TCGPlayerInventory, TCGPlayerPricingHistory)
|
||||
.join(
|
||||
TCGPlayerPricingHistory,
|
||||
TCGPlayerInventory.tcgplayer_product_id == TCGPlayerPricingHistory.tcgplayer_product_id
|
||||
)
|
||||
.filter(
|
||||
TCGPlayerInventory.export_id == latest_inventory_export,
|
||||
TCGPlayerPricingHistory.export_id == latest_pricing_export
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Convert to pandas DataFrame
|
||||
df = pd.DataFrame([{
|
||||
# Inventory columns
|
||||
**{f"inventory_{k}": v
|
||||
for k, v in row[0].__dict__.items()
|
||||
if not k.startswith('_')},
|
||||
# Pricing columns
|
||||
**{f"pricing_{k}": v
|
||||
for k, v in row[1].__dict__.items()
|
||||
if not k.startswith('_')}
|
||||
} for row in inventory_with_pricing])
|
||||
|
||||
return df
|
||||
|
||||
def default_pricing_algo(self, df: pd.DataFrame = None):
|
||||
if df is None:
|
||||
logger.debug("No DataFrame provided, fetching live inventory with most recent prices")
|
||||
df = self.get_live_inventory_with_most_recent_prices()
|
||||
# if tcg low price is < 0.35, set my_price to 0.35
|
||||
# if either tcg low price or tcg low price with shipping is under 5, set my_price to tcg low price * 1.25
|
||||
# if tcg low price with shipping is > 25 set price to tcg low price with shipping * 1.025
|
||||
# otherwise, set price to tcg low price with shipping * 1.10
|
||||
# also round to 2 decimal places
|
||||
df['my_price'] = df.apply(lambda row: round(
|
||||
0.35 if row['pricing_tcg_low_price'] < 0.35 else
|
||||
row['pricing_tcg_low_price'] * 1.25 if row['pricing_tcg_low_price'] < 5 or row['pricing_tcg_low_price_with_shipping'] < 5 else
|
||||
row['pricing_tcg_low_price_with_shipping'] * 1.025 if row['pricing_tcg_low_price_with_shipping'] > 25 else
|
||||
row['pricing_tcg_low_price_with_shipping'] * 1.10, 2), axis=1)
|
||||
# log rows with no price
|
||||
no_price = df[df['my_price'].isnull()]
|
||||
if len(no_price) > 0:
|
||||
logger.warning(f"Found {len(no_price)} rows with no price")
|
||||
logger.warning(no_price)
|
||||
# remove rows with no price
|
||||
df = df.dropna(subset=['my_price'])
|
||||
return df
|
||||
|
||||
def convert_df_to_csv(self, df: pd.DataFrame):
|
||||
# Flip the mapping to be from current names TO desired names
|
||||
column_mapping = {
|
||||
'inventory_tcgplayer_id': 'TCGplayer Id',
|
||||
'inventory_product_line': 'Product Line',
|
||||
'inventory_set_name': 'Set Name',
|
||||
'inventory_product_name': 'Product Name',
|
||||
'inventory_title': 'Title',
|
||||
'inventory_number': 'Number',
|
||||
'inventory_rarity': 'Rarity',
|
||||
'inventory_condition': 'Condition',
|
||||
'pricing_tcg_market_price': 'TCG Market Price',
|
||||
'pricing_tcg_direct_low': 'TCG Direct Low',
|
||||
'pricing_tcg_low_price_with_shipping': 'TCG Low Price With Shipping',
|
||||
'pricing_tcg_low_price': 'TCG Low Price',
|
||||
'inventory_total_quantity': 'Total Quantity',
|
||||
'inventory_add_to_quantity': 'Add to Quantity',
|
||||
'my_price': 'TCG Marketplace Price',
|
||||
'inventory_photo_url': 'Photo URL'
|
||||
}
|
||||
|
||||
df['pricing_tcg_market_price'] = ""
|
||||
df['pricing_tcg_direct_low'] = ""
|
||||
df['pricing_tcg_low_price_with_shipping'] = ""
|
||||
df['pricing_tcg_low_price'] = ""
|
||||
df['inventory_total_quantity'] = ""
|
||||
df['inventory_add_to_quantity'] = 0
|
||||
df['inventory_photo_url'] = ""
|
||||
|
||||
# First select the columns we want (using the keys of our mapping)
|
||||
# Then rename them to the desired names (the values in our mapping)
|
||||
df = df[column_mapping.keys()].rename(columns=column_mapping)
|
||||
|
||||
return df.to_csv(index=False, quoting=1, quotechar='"')
|
||||
|
||||
def convert_add_df_to_csv(self, df: pd.DataFrame):
|
||||
column_mapping = {
|
||||
'tcgproduct_tcgplayer_id': 'TCGplayer Id',
|
||||
'tcgproduct_product_line': 'Product Line',
|
||||
'tcgproduct_set_name': 'Set Name',
|
||||
'tcgproduct_product_name': 'Product Name',
|
||||
'tcgproduct_title': 'Title',
|
||||
'tcgproduct_number': 'Number',
|
||||
'tcgproduct_rarity': 'Rarity',
|
||||
'tcgproduct_condition': 'Condition',
|
||||
'pricing_tcg_market_price': 'TCG Market Price',
|
||||
'pricing_tcg_direct_low': 'TCG Direct Low',
|
||||
'pricing_tcg_low_price_with_shipping': 'TCG Low Price With Shipping',
|
||||
'pricing_tcg_low_price': 'TCG Low Price',
|
||||
'tcgproduct_group_id': 'Total Quantity',
|
||||
'manabox_quantity': 'Add to Quantity',
|
||||
'my_price': 'TCG Marketplace Price',
|
||||
'tcgproduct_photo_url': 'Photo URL'
|
||||
}
|
||||
df['tcgproduct_group_id'] = ""
|
||||
df['pricing_tcg_market_price'] = ""
|
||||
df['pricing_tcg_direct_low'] = ""
|
||||
df['pricing_tcg_low_price_with_shipping'] = ""
|
||||
df['pricing_tcg_low_price'] = ""
|
||||
df['tcgproduct_photo_url'] = ""
|
||||
|
||||
df = df[column_mapping.keys()].rename(columns=column_mapping)
|
||||
|
||||
return df.to_csv(index=False, quoting=1, quotechar='"')
|
||||
|
||||
def create_live_inventory_pricing_update_csv(self, algo: Callable = None) -> str:
|
||||
actual_algo = algo if algo is not None else self.default_pricing_algo
|
||||
df = actual_algo()
|
||||
csv = self.convert_df_to_csv(df)
|
||||
return csv
|
||||
|
||||
def create_add_to_tcgplayer_csv(self, box_id: str = None, upload_id: str = None, algo: Callable = None) -> str:
|
||||
actual_algo = algo if algo is not None else self.default_pricing_algo
|
||||
if box_id and upload_id:
|
||||
raise ValueError("Cannot specify both box_id and upload_id")
|
||||
elif not box_id and not upload_id:
|
||||
raise ValueError("Must specify either box_id or upload_id")
|
||||
elif box_id:
|
||||
logger.debug("creating df")
|
||||
df = self.get_box_with_most_recent_prices(box_id)
|
||||
elif upload_id:
|
||||
raise NotImplementedError("Not yet implemented")
|
||||
df = actual_algo(df)
|
||||
csv = self.convert_add_df_to_csv(df)
|
||||
return csv
|
@ -1,66 +1,79 @@
|
||||
from logging import getLogger
|
||||
from uuid import uuid4
|
||||
from pandas import DataFrame
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from db.utils import db_transaction
|
||||
from db.models import Product, File, CardManabox, Card, StagedFileProduct, CardTCGPlayer
|
||||
from io import StringIO
|
||||
import pandas as pd
|
||||
from db.models import CardManabox, CardTCGPlayer, StagedFileProduct, TCGPlayerGroups
|
||||
from services.util._dataframe import ManaboxRow, DataframeUtil
|
||||
from services.file import FileService
|
||||
from services.tcgplayer import TCGPlayerService
|
||||
from services.storage import StorageService
|
||||
from uuid import uuid4 as uuid
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
class ManaboxRow:
|
||||
def __init__(self, row: pd.Series):
|
||||
self.name = row['name']
|
||||
self.set_code = row['set_code']
|
||||
self.set_name = row['set_name']
|
||||
self.collector_number = row['collector_number']
|
||||
self.foil = row['foil']
|
||||
self.rarity = row['rarity']
|
||||
self.manabox_id = row['manabox_id']
|
||||
self.scryfall_id = row['scryfall_id']
|
||||
self.condition = row['condition']
|
||||
self.language = row['language']
|
||||
self.quantity = row['quantity']
|
||||
|
||||
class ProductService:
|
||||
def __init__(self, db: Session, file_service: FileService, tcgplayer_service: TCGPlayerService, storage_service: StorageService):
|
||||
def __init__(
|
||||
self,
|
||||
db: Session,
|
||||
file_service: FileService,
|
||||
tcgplayer_service: TCGPlayerService,
|
||||
storage_service: StorageService,
|
||||
):
|
||||
self.db = db
|
||||
self.file_service = file_service
|
||||
self.tcgplayer_service = tcgplayer_service
|
||||
self.storage_service = storage_service
|
||||
self.df_util = DataframeUtil()
|
||||
|
||||
def _format_manabox_df(self, df: pd.DataFrame) -> pd.DataFrame:
|
||||
# format columns
|
||||
df.columns = df.columns.str.lower()
|
||||
df.columns = df.columns.str.replace(' ', '_')
|
||||
return df
|
||||
def create_staged_file_product(
|
||||
self, file_id: str, card_manabox: CardManabox, row: ManaboxRow
|
||||
) -> StagedFileProduct:
|
||||
"""Create a staged file product entry.
|
||||
|
||||
def _manabox_file_to_df(self, file: File) -> pd.DataFrame:
|
||||
with open(file.filepath, 'rb') as f:
|
||||
content = f.read()
|
||||
content = content.decode('utf-8')
|
||||
df = pd.read_csv(StringIO(content))
|
||||
df = self._format_manabox_df(df)
|
||||
return df
|
||||
Args:
|
||||
file_id: The ID of the file being processed
|
||||
card_manabox: The Manabox card details
|
||||
row: The row data from the Manabox file
|
||||
|
||||
def create_staged_product(self, file: File, card_manabox:CardManabox, row: ManaboxRow) -> StagedFileProduct:
|
||||
Returns:
|
||||
The created staged file product
|
||||
"""
|
||||
staged_product = StagedFileProduct(
|
||||
id = str(uuid()),
|
||||
file_id = file.id,
|
||||
id=str(uuid4()),
|
||||
file_id=file_id,
|
||||
product_id=card_manabox.product_id,
|
||||
quantity = row.quantity
|
||||
quantity=row.quantity,
|
||||
)
|
||||
with db_transaction(self.db):
|
||||
self.db.add(staged_product)
|
||||
return staged_product
|
||||
|
||||
def create_card_manabox(self, manabox_row: ManaboxRow) -> CardManabox:
|
||||
def create_card_manabox(
|
||||
self, manabox_row: ManaboxRow, card_tcgplayer: CardTCGPlayer
|
||||
) -> CardManabox:
|
||||
"""Create a Manabox card entry.
|
||||
|
||||
Args:
|
||||
manabox_row: The row data from the Manabox file
|
||||
card_tcgplayer: The TCGPlayer card details
|
||||
|
||||
Returns:
|
||||
The created Manabox card
|
||||
"""
|
||||
if not card_tcgplayer:
|
||||
group = (
|
||||
self.db.query(TCGPlayerGroups)
|
||||
.filter(TCGPlayerGroups.abbreviation == manabox_row.set_code)
|
||||
.first()
|
||||
)
|
||||
card_tcgplayer = self.tcgplayer_service.get_card_tcgplayer_from_manabox_row(
|
||||
manabox_row, group.group_id
|
||||
)
|
||||
|
||||
card_manabox = CardManabox(
|
||||
product_id = str(uuid()),
|
||||
product_id=card_tcgplayer.product_id,
|
||||
name=manabox_row.name,
|
||||
set_code=manabox_row.set_code,
|
||||
set_name=manabox_row.set_name,
|
||||
@ -70,48 +83,27 @@ class ProductService:
|
||||
manabox_id=manabox_row.manabox_id,
|
||||
scryfall_id=manabox_row.scryfall_id,
|
||||
condition=manabox_row.condition,
|
||||
language = manabox_row.language
|
||||
language=manabox_row.language,
|
||||
)
|
||||
|
||||
with db_transaction(self.db):
|
||||
self.db.add(card_manabox)
|
||||
return card_manabox
|
||||
|
||||
def create_product(self, card_manabox: CardManabox) -> Product:
|
||||
product = Product(
|
||||
id = card_manabox.product_id,
|
||||
name = card_manabox.name,
|
||||
set_code = card_manabox.set_code,
|
||||
set_name = card_manabox.set_name,
|
||||
type = 'card',
|
||||
product_line = 'mtg'
|
||||
)
|
||||
return product
|
||||
def card_manabox_lookup_create_if_not_exist(
|
||||
self, manabox_row: ManaboxRow
|
||||
) -> CardManabox:
|
||||
"""Lookup a Manabox card or create it if it doesn't exist.
|
||||
|
||||
def get_tcgplayer_card(self, card_manabox: CardManabox) -> CardTCGPlayer:
|
||||
# check if tcgplayer_id exists for product_id in CardTCGPlayer
|
||||
tcgplayer_card = self.db.query(CardTCGPlayer).filter(CardTCGPlayer.product_id == card_manabox.product_id).first()
|
||||
if tcgplayer_card:
|
||||
return tcgplayer_card
|
||||
# if not, get tcgplayer_id from tcgplayer_service
|
||||
tcgplayer_card = self.tcgplayer_service.get_tcgplayer_card(card_manabox)
|
||||
return tcgplayer_card
|
||||
Args:
|
||||
manabox_row: The row data from the Manabox file
|
||||
|
||||
def create_card(self, card_manabox: CardManabox) -> Card:
|
||||
tcgplayer_card = self.get_tcgplayer_card(card_manabox)
|
||||
card = Card(
|
||||
product_id = tcgplayer_card.product_id if tcgplayer_card else card_manabox.product_id,
|
||||
number = card_manabox.collector_number,
|
||||
foil = card_manabox.foil,
|
||||
rarity = card_manabox.rarity,
|
||||
condition = card_manabox.condition,
|
||||
language = card_manabox.language,
|
||||
scryfall_id = card_manabox.scryfall_id,
|
||||
manabox_id = card_manabox.manabox_id,
|
||||
tcgplayer_id = tcgplayer_card.tcgplayer_id if tcgplayer_card else None
|
||||
)
|
||||
return card
|
||||
|
||||
def card_manabox_lookup_create_if_not_exist(self, manabox_row: ManaboxRow) -> CardManabox:
|
||||
# query based on all fields in manabox_row
|
||||
card_manabox = self.db.query(CardManabox).filter(
|
||||
Returns:
|
||||
The existing or newly created Manabox card
|
||||
"""
|
||||
card_manabox = (
|
||||
self.db.query(CardManabox)
|
||||
.filter(
|
||||
CardManabox.name == manabox_row.name,
|
||||
CardManabox.set_code == manabox_row.set_code,
|
||||
CardManabox.set_name == manabox_row.set_name,
|
||||
@ -121,41 +113,71 @@ class ProductService:
|
||||
CardManabox.manabox_id == manabox_row.manabox_id,
|
||||
CardManabox.scryfall_id == manabox_row.scryfall_id,
|
||||
CardManabox.condition == manabox_row.condition,
|
||||
CardManabox.language == manabox_row.language
|
||||
).first()
|
||||
CardManabox.language == manabox_row.language,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not card_manabox:
|
||||
# create new card_manabox, card, and product
|
||||
with db_transaction(self.db):
|
||||
card_manabox = self.create_card_manabox(manabox_row)
|
||||
product = self.create_product(card_manabox)
|
||||
card = self.create_card(card_manabox)
|
||||
card_manabox.product_id = card.product_id
|
||||
product.id = card.product_id
|
||||
self.db.add(card_manabox)
|
||||
self.db.add(product)
|
||||
self.db.add(card)
|
||||
logger.debug(f"card_manabox not found for row: {manabox_row.__dict__}")
|
||||
group = (
|
||||
self.db.query(TCGPlayerGroups)
|
||||
.filter(TCGPlayerGroups.abbreviation == manabox_row.set_code)
|
||||
.first()
|
||||
)
|
||||
if not group:
|
||||
logger.error(f"Group not found for set code: {manabox_row.set_code}")
|
||||
logger.error(f"Row data: {manabox_row.__dict__}")
|
||||
return None
|
||||
|
||||
card_tcgplayer = self.tcgplayer_service.get_card_tcgplayer_from_manabox_row(
|
||||
manabox_row, group.group_id
|
||||
)
|
||||
if not card_tcgplayer:
|
||||
logger.error(f"Card not found for row: {manabox_row.__dict__}")
|
||||
return None
|
||||
card_manabox = self.create_card_manabox(manabox_row, card_tcgplayer)
|
||||
|
||||
return card_manabox
|
||||
|
||||
def bg_process_manabox_file(self, file_id: str):
|
||||
try:
|
||||
file = self.file_service.get_file(file_id)
|
||||
df = self._manabox_file_to_df(file)
|
||||
for index, row in df.iterrows():
|
||||
def process_manabox_df(self, df: DataFrame, file_id: str) -> None:
|
||||
"""Process a Manabox dataframe.
|
||||
|
||||
Args:
|
||||
df: The Manabox dataframe to process
|
||||
file_id: The ID of the file being processed
|
||||
"""
|
||||
for _, row in df.iterrows():
|
||||
manabox_row = ManaboxRow(row)
|
||||
# create card concepts - manabox, tcgplayer, card, product
|
||||
card_manabox = self.card_manabox_lookup_create_if_not_exist(manabox_row)
|
||||
# create staged inventory with quantity for processing down the marketplace pipeline
|
||||
staged_product = self.create_staged_product(file, card_manabox, row)
|
||||
# update file status
|
||||
if not card_manabox:
|
||||
continue
|
||||
self.create_staged_file_product(file_id, card_manabox, row)
|
||||
|
||||
def bg_process_manabox_file(self, file_id: str) -> None:
|
||||
"""Process a Manabox file in the background.
|
||||
|
||||
Args:
|
||||
file_id: The ID of the file to process
|
||||
|
||||
Raises:
|
||||
Exception: If there's an error during processing
|
||||
"""
|
||||
try:
|
||||
manabox_file = self.file_service.get_file(file_id)
|
||||
manabox_df = self.df_util.file_to_df(manabox_file)
|
||||
self.process_manabox_df(manabox_df, file_id)
|
||||
|
||||
with db_transaction(self.db):
|
||||
file.status = 'completed'
|
||||
manabox_file.status = "completed"
|
||||
|
||||
except Exception as e:
|
||||
with db_transaction(self.db):
|
||||
file.status = 'error'
|
||||
manabox_file.status = "error"
|
||||
raise e
|
||||
|
||||
try:
|
||||
# create storage records for physically storing individual cards
|
||||
self.storage_service.store_staged_products_for_file(file.id)
|
||||
self.storage_service.store_staged_products_for_file(file_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating storage records: {str(e)}")
|
||||
raise e
|
||||
|
@ -1,22 +1,47 @@
|
||||
from uuid import uuid4
|
||||
from typing import List, TypedDict, Optional
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from db.utils import db_transaction
|
||||
from db.models import Warehouse, User, StagedFileProduct, StorageBlock, ProductBlock, File, Card
|
||||
from uuid import uuid4 as uuid
|
||||
from typing import List, TypedDict
|
||||
from db.models import (
|
||||
Warehouse,
|
||||
User,
|
||||
StagedFileProduct,
|
||||
StorageBlock,
|
||||
ProductBlock,
|
||||
File,
|
||||
CardTCGPlayer
|
||||
)
|
||||
|
||||
class ProductAttributes(TypedDict):
|
||||
"""Attributes for a product to be stored."""
|
||||
product_id: str
|
||||
card_number: str
|
||||
|
||||
class StorageService:
|
||||
def __init__(self, db: Session):
|
||||
"""Service for managing product storage and warehouse operations."""
|
||||
|
||||
def __init__(self, db: Session) -> None:
|
||||
"""Initialize the storage service.
|
||||
|
||||
Args:
|
||||
db: SQLAlchemy database session
|
||||
"""
|
||||
self.db = db
|
||||
|
||||
def get_or_create_user(self, username: str) -> User:
|
||||
"""Get an existing user or create a new one if not found.
|
||||
|
||||
Args:
|
||||
username: Username to look up or create
|
||||
|
||||
Returns:
|
||||
The existing or newly created User
|
||||
"""
|
||||
user = self.db.query(User).filter(User.username == username).first()
|
||||
if user is None:
|
||||
user = User(
|
||||
id = str(uuid()),
|
||||
id=str(uuid4()),
|
||||
username=username
|
||||
)
|
||||
with db_transaction(self.db):
|
||||
@ -24,11 +49,16 @@ class StorageService:
|
||||
return user
|
||||
|
||||
def get_or_create_warehouse(self) -> Warehouse:
|
||||
"""Get the default warehouse or create it if it doesn't exist.
|
||||
|
||||
Returns:
|
||||
The existing or newly created Warehouse
|
||||
"""
|
||||
warehouse = self.db.query(Warehouse).first()
|
||||
user = self.get_or_create_user('admin')
|
||||
if warehouse is None:
|
||||
warehouse = Warehouse(
|
||||
id = str(uuid()),
|
||||
id=str(uuid4()),
|
||||
user_id=user.id
|
||||
)
|
||||
with db_transaction(self.db):
|
||||
@ -36,20 +66,38 @@ class StorageService:
|
||||
return warehouse
|
||||
|
||||
def get_staged_product(self, file_id: str) -> List[StagedFileProduct]:
|
||||
staged_product = self.db.query(StagedFileProduct).filter(StagedFileProduct.file_id == file_id).all()
|
||||
return staged_product
|
||||
"""Get all staged products for a given file.
|
||||
|
||||
Args:
|
||||
file_id: ID of the file to get staged products for
|
||||
|
||||
Returns:
|
||||
List of staged products
|
||||
"""
|
||||
return self.db.query(StagedFileProduct).filter(
|
||||
StagedFileProduct.file_id == file_id
|
||||
).all()
|
||||
|
||||
def get_storage_block_name(self, warehouse: Warehouse, file_id: str) -> str:
|
||||
# Get file type from id
|
||||
"""Generate a unique name for a new storage block.
|
||||
|
||||
Args:
|
||||
warehouse: Warehouse the block belongs to
|
||||
file_id: ID of the file being processed
|
||||
|
||||
Returns:
|
||||
Unique storage block name
|
||||
|
||||
Raises:
|
||||
ValueError: If no file is found with the given ID
|
||||
"""
|
||||
current_file = self.db.query(File).filter(File.id == file_id).first()
|
||||
if not current_file:
|
||||
raise ValueError(f"No file found with id {file_id}")
|
||||
|
||||
# Determine storage block type
|
||||
storage_block_type = 'rare' if 'rare' in current_file.type else 'common'
|
||||
prefix = storage_block_type[0]
|
||||
|
||||
# Get most recent storage block with same type and warehouse id
|
||||
latest_block = (
|
||||
self.db.query(StorageBlock)
|
||||
.filter(
|
||||
@ -60,18 +108,10 @@ class StorageService:
|
||||
.first()
|
||||
)
|
||||
|
||||
# If no existing block, start with number 1
|
||||
if not latest_block:
|
||||
return f"{prefix}1"
|
||||
|
||||
# Start with the next number after the latest block
|
||||
number = int(latest_block.name[1:])
|
||||
start_number = 1 if not latest_block else int(latest_block.name[1:]) + 1
|
||||
|
||||
while True:
|
||||
number += 1
|
||||
new_name = f"{prefix}{number}"
|
||||
|
||||
# Check if the new name already exists
|
||||
new_name = f"{prefix}{start_number}"
|
||||
exists = (
|
||||
self.db.query(StorageBlock)
|
||||
.filter(
|
||||
@ -83,8 +123,21 @@ class StorageService:
|
||||
|
||||
if not exists:
|
||||
return new_name
|
||||
start_number += 1
|
||||
|
||||
def create_storage_block(self, warehouse: Warehouse, file_id: str) -> StorageBlock:
|
||||
"""Create a new storage block for the given warehouse and file.
|
||||
|
||||
Args:
|
||||
warehouse: Warehouse to create the block in
|
||||
file_id: ID of the file being processed
|
||||
|
||||
Returns:
|
||||
Newly created StorageBlock
|
||||
|
||||
Raises:
|
||||
ValueError: If no file is found with the given ID
|
||||
"""
|
||||
current_file = self.db.query(File).filter(File.id == file_id).first()
|
||||
if not current_file:
|
||||
raise ValueError(f"No file found with id {file_id}")
|
||||
@ -92,7 +145,7 @@ class StorageService:
|
||||
storage_block_type = 'rare' if 'rare' in current_file.type else 'common'
|
||||
|
||||
storage_block = StorageBlock(
|
||||
id = str(uuid()),
|
||||
id=str(uuid4()),
|
||||
warehouse_id=warehouse.id,
|
||||
name=self.get_storage_block_name(warehouse, file_id),
|
||||
type=storage_block_type
|
||||
@ -108,9 +161,19 @@ class StorageService:
|
||||
product_attributes: ProductAttributes,
|
||||
block_index: int
|
||||
) -> ProductBlock:
|
||||
"""Create a new ProductBlock for a single unit of a staged product."""
|
||||
"""Create a new ProductBlock for a single unit of a staged product.
|
||||
|
||||
Args:
|
||||
staged_product: The staged product to store
|
||||
storage_block: The block to store the product in
|
||||
product_attributes: Additional product attributes
|
||||
block_index: Index within the storage block
|
||||
|
||||
Returns:
|
||||
Newly created ProductBlock
|
||||
"""
|
||||
product_block = ProductBlock(
|
||||
id=str(uuid()),
|
||||
id=str(uuid4()),
|
||||
product_id=staged_product.product_id,
|
||||
block_id=storage_block.id,
|
||||
block_index=block_index
|
||||
@ -125,14 +188,21 @@ class StorageService:
|
||||
self,
|
||||
staged_product: StagedFileProduct
|
||||
) -> List[ProductAttributes]:
|
||||
"""Get attributes for each unit of a staged product."""
|
||||
"""Get attributes for each unit of a staged product.
|
||||
|
||||
Args:
|
||||
staged_product: The staged product to get attributes for
|
||||
|
||||
Returns:
|
||||
List of attributes for each unit of the product
|
||||
"""
|
||||
result = (
|
||||
self.db.query(
|
||||
StagedFileProduct.product_id,
|
||||
StagedFileProduct.quantity,
|
||||
Card.number
|
||||
CardTCGPlayer.number
|
||||
)
|
||||
.join(Card, Card.product_id == StagedFileProduct.product_id)
|
||||
.join(CardTCGPlayer, CardTCGPlayer.product_id == StagedFileProduct.product_id)
|
||||
.filter(StagedFileProduct.id == staged_product.id)
|
||||
.first()
|
||||
)
|
||||
@ -145,11 +215,18 @@ class StorageService:
|
||||
product_id=result.product_id,
|
||||
card_number=result.number
|
||||
)
|
||||
for i in range(result.quantity)
|
||||
for _ in range(result.quantity)
|
||||
]
|
||||
|
||||
def store_staged_products_for_file(self, file_id: str) -> StorageBlock:
|
||||
"""Store all staged products for a file in a new storage block."""
|
||||
"""Store all staged products for a file in a new storage block.
|
||||
|
||||
Args:
|
||||
file_id: ID of the file containing staged products
|
||||
|
||||
Returns:
|
||||
The newly created StorageBlock containing all products
|
||||
"""
|
||||
warehouse = self.get_or_create_warehouse()
|
||||
storage_block = self.create_storage_block(warehouse, file_id)
|
||||
staged_products = self.get_staged_product(file_id)
|
||||
@ -162,7 +239,10 @@ class StorageService:
|
||||
all_product_attributes.append((staged_product, attrs))
|
||||
|
||||
# Sort by card number as integer to determine block indices
|
||||
sorted_attributes = sorted(all_product_attributes, key=lambda x: int(x[1]['card_number']))
|
||||
sorted_attributes = sorted(
|
||||
all_product_attributes,
|
||||
key=lambda x: int(''.join(filter(str.isdigit, x[1]['card_number'])))
|
||||
)
|
||||
|
||||
# Add products with correct block indices
|
||||
for block_index, (staged_product, product_attributes) in enumerate(sorted_attributes, 1):
|
||||
|
@ -30,12 +30,12 @@ class TaskService:
|
||||
id='daily_report'
|
||||
)
|
||||
|
||||
self.scheduler.add_job(
|
||||
self.pricing_update,
|
||||
'cron',
|
||||
minute=28,
|
||||
id='pricing_update'
|
||||
)
|
||||
# self.scheduler.add_job(
|
||||
# self.pricing_update,
|
||||
# 'cron',
|
||||
# minute=41,
|
||||
# id='pricing_update'
|
||||
# )
|
||||
|
||||
def daily_report(self): # Removed async
|
||||
self.logger.info("Generating daily report")
|
||||
|
@ -1,8 +1,10 @@
|
||||
from db.models import ManaboxExportData, Box, TCGPlayerGroups, TCGPlayerInventory, TCGPlayerExportHistory, TCGPlayerPricingHistory, TCGPlayerProduct, ManaboxTCGPlayerMapping, CardManabox, CardTCGPlayer, Price
|
||||
from db.models import ManaboxExportData, Box, TCGPlayerGroups, TCGPlayerInventory, TCGPlayerExportHistory, TCGPlayerPricingHistory, TCGPlayerProduct, ManaboxTCGPlayerMapping, CardManabox, CardTCGPlayer, Price, Product, Card, File
|
||||
import requests
|
||||
from services.util._dataframe import TCGPlayerPricingRow, DataframeUtil, ManaboxRow
|
||||
from services.file import FileService
|
||||
from sqlalchemy.orm import Session
|
||||
from db.utils import db_transaction
|
||||
import uuid
|
||||
from uuid import uuid4 as uuid
|
||||
import browser_cookie3
|
||||
import webbrowser
|
||||
from typing import Optional, Dict ,List
|
||||
@ -20,6 +22,7 @@ from services.pricing import PricingService
|
||||
from sqlalchemy.sql import exists
|
||||
import pandas as pd
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from schemas.file import CreateFileRequest
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@ -42,6 +45,8 @@ class TCGPlayerConfig:
|
||||
|
||||
class TCGPlayerService:
|
||||
def __init__(self, db: Session,
|
||||
pricing_service: PricingService,
|
||||
file_service: FileService,
|
||||
config: TCGPlayerConfig=TCGPlayerConfig(),
|
||||
browser_type: Browser=Browser.BRAVE):
|
||||
self.db = db
|
||||
@ -49,11 +54,14 @@ class TCGPlayerService:
|
||||
self.browser_type = browser_type
|
||||
self.cookies = None
|
||||
self.previous_request_time = None
|
||||
self.pricing_service = pricing_service
|
||||
self.df_util = DataframeUtil()
|
||||
self.file_service = file_service
|
||||
|
||||
def _insert_groups(self, groups):
|
||||
for group in groups:
|
||||
db_group = TCGPlayerGroups(
|
||||
id=str(uuid.uuid4()),
|
||||
id=str(uuid()),
|
||||
group_id=group['groupId'],
|
||||
name=group['name'],
|
||||
abbreviation=group['abbreviation'],
|
||||
@ -70,15 +78,27 @@ class TCGPlayerService:
|
||||
response.raise_for_status()
|
||||
groups = response.json()['results']
|
||||
# manually add broken groups
|
||||
groups.append({
|
||||
manual_groups = [
|
||||
{
|
||||
"groupId": 2422,
|
||||
"name": "Modern Horizons 2 Timeshifts",
|
||||
"abbreviation": "H2R",
|
||||
"isSupplemental": "false",
|
||||
"isSupplemental": "0",
|
||||
"publishedOn": "2018-11-08T00:00:00",
|
||||
"modifiedOn": "2018-11-08T00:00:00",
|
||||
"categoryId": 1
|
||||
})
|
||||
},
|
||||
{
|
||||
"groupId": 52,
|
||||
"name": "Store Championships",
|
||||
"abbreviation": "SCH",
|
||||
"isSupplemental": "1",
|
||||
"publishedOn": "2007-07-14T00:00:00",
|
||||
"modifiedOn": "2007-07-14T00:00:00",
|
||||
"categoryId": 1
|
||||
}
|
||||
]
|
||||
groups.extend(manual_groups)
|
||||
# Insert groups into db
|
||||
with db_transaction(self.db):
|
||||
self._insert_groups(groups)
|
||||
@ -230,7 +250,7 @@ class TCGPlayerService:
|
||||
def update_inventory(self, version: str) -> Dict:
|
||||
if version not in ['staged', 'live']:
|
||||
raise ValueError("Invalid inventory version")
|
||||
export_id = str(uuid.uuid4())
|
||||
export_id = str(uuid())
|
||||
inventory = self._get_inventory(version)
|
||||
if not inventory:
|
||||
return {"message": "No inventory to update"}
|
||||
@ -267,14 +287,14 @@ class TCGPlayerService:
|
||||
|
||||
with db_transaction(self.db):
|
||||
export_history = TCGPlayerExportHistory(
|
||||
id=str(uuid.uuid4()),
|
||||
id=str(uuid()),
|
||||
type=version + '_inventory',
|
||||
inventory_export_id=export_id
|
||||
)
|
||||
self.db.add(export_history)
|
||||
for item in inventory:
|
||||
db_item = TCGPlayerInventory(
|
||||
id=str(uuid.uuid4()),
|
||||
id=str(uuid()),
|
||||
**{db_field: item.get(csv_field)
|
||||
for csv_field, db_field in inventory_fields.items()}
|
||||
)
|
||||
@ -301,7 +321,7 @@ class TCGPlayerService:
|
||||
pass
|
||||
|
||||
def update_pricing(self, set_name_ids: Dict[str, List[str]]) -> Dict:
|
||||
export_id = str(uuid.uuid4())
|
||||
export_id = str(uuid())
|
||||
product_fields = {
|
||||
'TCGplayer Id': 'tcgplayer_id',
|
||||
'group_id': 'group_id',
|
||||
@ -337,7 +357,7 @@ class TCGPlayerService:
|
||||
else:
|
||||
with db_transaction(self.db):
|
||||
product = TCGPlayerProduct(
|
||||
id=str(uuid.uuid4()),
|
||||
id=str(uuid()),
|
||||
**{db_field: item.get(csv_field)
|
||||
for csv_field, db_field in product_fields.items()}
|
||||
)
|
||||
@ -346,7 +366,7 @@ class TCGPlayerService:
|
||||
|
||||
with db_transaction(self.db):
|
||||
ph_item = TCGPlayerPricingHistory(
|
||||
id=str(uuid.uuid4()),
|
||||
id=str(uuid()),
|
||||
**{db_field: item.get(csv_field)
|
||||
for csv_field, db_field in pricing_fields.items()}
|
||||
)
|
||||
@ -355,7 +375,7 @@ class TCGPlayerService:
|
||||
|
||||
with db_transaction(self.db):
|
||||
export_history = TCGPlayerExportHistory(
|
||||
id=str(uuid.uuid4()),
|
||||
id=str(uuid()),
|
||||
type='pricing',
|
||||
pricing_export_id=export_id
|
||||
)
|
||||
@ -457,34 +477,173 @@ class TCGPlayerService:
|
||||
add_csv = None
|
||||
return add_csv
|
||||
|
||||
def load_export_csv_to_card_tcgplayer(self, export_csv: bytes, group_id: int) -> None:
|
||||
if not export_csv:
|
||||
raise ValueError("No export CSV provided")
|
||||
|
||||
# Convert bytes to string first
|
||||
text_content = export_csv.decode('utf-8')
|
||||
csv_file = StringIO(text_content)
|
||||
try:
|
||||
reader = csv.DictReader(csv_file)
|
||||
for row in reader:
|
||||
product = CardTCGPlayer(
|
||||
product_id=str(uuid.uuid4()),
|
||||
tcgplayer_id=row['TCGplayer Id'],
|
||||
def create_tcgplayer_card(self, row: TCGPlayerPricingRow, group_id: int):
|
||||
# if card already exists, return none
|
||||
card_exists = self.db.query(CardTCGPlayer).filter(
|
||||
CardTCGPlayer.tcgplayer_id == row.tcgplayer_id,
|
||||
CardTCGPlayer.group_id == group_id
|
||||
).first()
|
||||
if card_exists:
|
||||
return card_exists
|
||||
# create product
|
||||
product = Product(
|
||||
id=str(uuid()),
|
||||
type = 'card',
|
||||
product_line = 'mtg'
|
||||
)
|
||||
# create card
|
||||
card = Card(
|
||||
product_id=product.id,
|
||||
)
|
||||
# create Cardtcgplayer
|
||||
tcgcard = CardTCGPlayer(
|
||||
product_id=product.id,
|
||||
group_id=group_id,
|
||||
product_line=row['Product Line'],
|
||||
set_name=row['Set Name'],
|
||||
product_name=row['Product Name'],
|
||||
title=row['Title'],
|
||||
number=row['Number'],
|
||||
rarity=row['Rarity'],
|
||||
condition=row['Condition']
|
||||
tcgplayer_id=row.tcgplayer_id,
|
||||
product_line=row.product_line,
|
||||
set_name=row.set_name,
|
||||
product_name=row.product_name,
|
||||
title=row.title,
|
||||
number=row.number,
|
||||
rarity=row.rarity,
|
||||
condition=row.condition
|
||||
)
|
||||
with db_transaction(self.db):
|
||||
self.db.add(product)
|
||||
finally:
|
||||
csv_file.close()
|
||||
self.db.add(card)
|
||||
self.db.add(tcgcard)
|
||||
return tcgcard
|
||||
|
||||
def match_card_tcgplayer_to_manabox(self, card: CardManabox, group_id: int) -> CardTCGPlayer:
|
||||
def create_tcgplayer_cards_batch(self, rows: list[TCGPlayerPricingRow], set_to_group: dict) -> list[CardTCGPlayer]:
|
||||
# Get existing cards in a single query
|
||||
existing_cards = {
|
||||
(card.tcgplayer_id, card.group_id): card
|
||||
for card in self.db.query(CardTCGPlayer).filter(
|
||||
CardTCGPlayer.tcgplayer_id.in_([row.tcgplayer_id for row in rows]),
|
||||
CardTCGPlayer.group_id.in_([set_to_group[row.set_name] for row in rows])
|
||||
).all()
|
||||
}
|
||||
|
||||
# Pre-allocate lists for better memory efficiency
|
||||
new_products = []
|
||||
new_cards = []
|
||||
new_tcgcards = []
|
||||
# new_prices = []
|
||||
|
||||
# price_types = {
|
||||
# 'tcg_market_price': 'tcg_market_price',
|
||||
# 'tcg_direct_low': 'tcg_direct_low',
|
||||
# 'tcg_low_price_with_shipping': 'tcg_low_price_with_shipping',
|
||||
# 'tcg_low_price': 'tcg_low_price',
|
||||
# 'tcg_marketplace_price': 'tcg_marketplace_price'
|
||||
#}
|
||||
|
||||
for row in rows:
|
||||
# Get the correct group_id for this row's set
|
||||
group_id = set_to_group[row.set_name]
|
||||
|
||||
if (row.tcgplayer_id, group_id) in existing_cards:
|
||||
continue
|
||||
|
||||
product_id = str(uuid())
|
||||
|
||||
new_products.append(Product(
|
||||
id=product_id,
|
||||
type='card',
|
||||
product_line='mtg'
|
||||
))
|
||||
|
||||
new_cards.append(Card(
|
||||
product_id=product_id,
|
||||
))
|
||||
|
||||
new_tcgcards.append(CardTCGPlayer(
|
||||
product_id=product_id,
|
||||
group_id=group_id, # Use the correct group_id for this specific row
|
||||
tcgplayer_id=row.tcgplayer_id,
|
||||
product_line=row.product_line,
|
||||
set_name=row.set_name,
|
||||
product_name=row.product_name,
|
||||
title=row.title,
|
||||
number=row.number,
|
||||
rarity=row.rarity,
|
||||
condition=row.condition
|
||||
))
|
||||
|
||||
# Batch create price objects
|
||||
# row_prices = [
|
||||
# Price(
|
||||
# id=str(uuid()),
|
||||
# product_id=product_id,
|
||||
# marketplace_id=None,
|
||||
# type=price_type,
|
||||
# price=getattr(row, col_name)
|
||||
# )
|
||||
# for col_name, price_type in price_types.items()
|
||||
# if getattr(row, col_name, None) is not None and getattr(row, col_name) > 0
|
||||
# ]
|
||||
# new_prices.extend(row_prices)
|
||||
|
||||
if new_products:
|
||||
with db_transaction(self.db):
|
||||
self.db.bulk_save_objects(new_products)
|
||||
self.db.bulk_save_objects(new_cards)
|
||||
self.db.bulk_save_objects(new_tcgcards)
|
||||
# if new_prices:
|
||||
# self.db.bulk_save_objects(new_prices)
|
||||
|
||||
return new_tcgcards
|
||||
|
||||
def load_export_csv_to_card_tcgplayer(self, export_csv: bytes, file_id: str = None, batch_size: int = 1000) -> None:
|
||||
try:
|
||||
if not export_csv:
|
||||
raise ValueError("No export CSV provided")
|
||||
|
||||
df = self.df_util.csv_bytes_to_df(export_csv)
|
||||
|
||||
logger.debug(f"Loaded {len(df)} rows from export CSV")
|
||||
|
||||
# Get all group_ids upfront in a single query
|
||||
set_to_group = dict(
|
||||
self.db.query(TCGPlayerGroups.name, TCGPlayerGroups.group_id).all()
|
||||
)
|
||||
|
||||
# Process in batches
|
||||
for i in range(0, len(df), batch_size):
|
||||
batch_df = df.iloc[i:i + batch_size]
|
||||
batch_rows = [TCGPlayerPricingRow(row) for _, row in batch_df.iterrows()]
|
||||
|
||||
# Filter rows with valid group_ids
|
||||
valid_rows = [
|
||||
row for row in batch_rows
|
||||
if row.set_name in set_to_group
|
||||
]
|
||||
|
||||
# logger.debug(f"Processing batch {i // batch_size + 1}: {len(valid_rows)} valid rows")
|
||||
|
||||
if valid_rows:
|
||||
# Pass the entire set_to_group mapping
|
||||
self.create_tcgplayer_cards_batch(valid_rows, set_to_group)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load export CSV: {e}")
|
||||
# set file upload to failed
|
||||
if file_id:
|
||||
with db_transaction(self.db):
|
||||
file = self.db.query(File).filter(File.id == file_id).first()
|
||||
if file:
|
||||
file.status = 'failed'
|
||||
self.db.add(file)
|
||||
raise
|
||||
finally:
|
||||
if file_id:
|
||||
with db_transaction(self.db):
|
||||
file = self.db.query(File).filter(File.id == file_id).first()
|
||||
if file:
|
||||
file.status = 'completed'
|
||||
self.db.add(file)
|
||||
|
||||
|
||||
def get_card_tcgplayer_from_manabox_row(self, card: ManaboxRow, group_id: int) -> CardTCGPlayer:
|
||||
# Expanded rarity mapping
|
||||
mb_to_tcg_rarity_mapping = {
|
||||
"common": "C",
|
||||
@ -516,12 +675,20 @@ class TCGPlayerService:
|
||||
return None
|
||||
|
||||
# First query for matching products without rarity filter
|
||||
# debug
|
||||
# log everything in this query
|
||||
# remove letters from card.collector_number FOR JOIN ONLY
|
||||
join_collector_number = ''.join(filter(str.isdigit, card.collector_number))
|
||||
# logger.debug(f"Querying for card: {card.name}, {card.set_code}, {card.collector_number}, {tcg_condition}, {group_id}")
|
||||
base_query = self.db.query(CardTCGPlayer).filter(
|
||||
CardTCGPlayer.number == card.collector_number,
|
||||
CardTCGPlayer.number == join_collector_number,
|
||||
CardTCGPlayer.condition == tcg_condition,
|
||||
CardTCGPlayer.group_id == group_id
|
||||
CardTCGPlayer.group_id == group_id,
|
||||
CardTCGPlayer.rarity != "T" # TOKENS ARE NOT SUPPORTED CUZ BROKE LOL
|
||||
)
|
||||
|
||||
# logger.debug(f"Base query: {base_query.statement.compile(compile_kwargs={'literal_binds': True})}")
|
||||
|
||||
# Get all potential matches
|
||||
products = base_query.all()
|
||||
|
||||
@ -535,44 +702,42 @@ class TCGPlayerService:
|
||||
if product.rarity == "L" or product.rarity == tcg_rarity:
|
||||
return product
|
||||
|
||||
# ignore rarity, just make sure only one product is returned
|
||||
if len(products) > 1:
|
||||
# try to match on name before failing
|
||||
for product in products:
|
||||
if product.product_name == card.name:
|
||||
return product
|
||||
elif len(products) == 1:
|
||||
return products[0]
|
||||
|
||||
logger.error(f"Multiple matching TCGPlayer products found for card {card.name} ({card.set_code} {card.collector_number})")
|
||||
return None
|
||||
|
||||
# If we got here, we found products but none matched our rarity criteria
|
||||
logger.error(f"No matching TCGPlayer product with correct rarity found for card {card.name} ({card.set_name} {card.collector_number})")
|
||||
return None
|
||||
# logger.error(f"No matching TCGPlayer product with correct rarity found for card {card.name} {card.rarity} {group_id} ({card.set_name} {card.collector_number})")
|
||||
# return None
|
||||
|
||||
def get_tcgplayer_card(self, card: CardManabox) -> CardTCGPlayer:
|
||||
# find tcgplayer group id for set code
|
||||
group_id = self.db.query(TCGPlayerGroups.group_id).filter(
|
||||
TCGPlayerGroups.abbreviation == card.set_code
|
||||
).first()
|
||||
if not group_id:
|
||||
logger.error(f"Group ID not found for set code {card.set_code}")
|
||||
logger.error(f"Card details: name={card.name}, set_name={card.set_name}, collector_number={card.collector_number}")
|
||||
return None
|
||||
group_id = group_id[0]
|
||||
# check for group_id in CardTCGPlayer
|
||||
group_id_exists = self.db.query(CardTCGPlayer).filter(
|
||||
CardTCGPlayer.group_id == group_id).first()
|
||||
if not group_id_exists:
|
||||
export_csv = self._get_export_csv([str(group_id)], convert=False) # TODO should be file service
|
||||
self.load_export_csv_to_card_tcgplayer(export_csv, group_id)
|
||||
# match card to tcgplayer product
|
||||
matching_product = self.match_card_tcgplayer_to_manabox(card, group_id)
|
||||
if not matching_product:
|
||||
return None
|
||||
return matching_product
|
||||
|
||||
def get_pricing_export_for_all_products(self) -> bytes:
|
||||
def get_pricing_export_for_all_products(self) -> File:
|
||||
"""
|
||||
Retrieves pricing export data for all products in TCGPlayer format.
|
||||
|
||||
Returns:
|
||||
bytes: Raw CSV data containing pricing information
|
||||
"""
|
||||
DEBUG = True
|
||||
if DEBUG:
|
||||
logger.debug("DEBUG: Using existing pricing export file")
|
||||
file = self.db.query(File).filter(File.type == 'tcgplayer_pricing_export').first()
|
||||
if file:
|
||||
return file
|
||||
try:
|
||||
all_group_ids = self.db.query(TCGPlayerGroups.group_id).all()
|
||||
all_group_ids = [str(group_id) for group_id, in all_group_ids]
|
||||
export_csv = self._get_export_csv(all_group_ids, convert=False)
|
||||
return export_csv
|
||||
export_csv_file = self.file_service.create_file(export_csv, CreateFileRequest(
|
||||
source="tcgplayer",
|
||||
type="tcgplayer_pricing_export",
|
||||
filename="tcgplayer_pricing_export.csv"
|
||||
))
|
||||
return export_csv_file
|
||||
except SQLAlchemyError as e:
|
||||
raise RuntimeError(f"Failed to retrieve group IDs: {str(e)}")
|
||||
|
||||
@ -613,63 +778,21 @@ class TCGPlayerService:
|
||||
def cron_load_prices(self) -> None:
|
||||
"""
|
||||
Scheduled task to load and update product prices.
|
||||
Fetches current prices, processes them, and saves new price records to the database.
|
||||
Uses optimized bulk processing for better performance.
|
||||
"""
|
||||
logger.debug("Running cron_load_prices...")
|
||||
try:
|
||||
# Get and process price data
|
||||
price_csv = self.get_pricing_export_for_all_products()
|
||||
price_df = self.pricing_export_to_df(price_csv)
|
||||
# Get pricing export
|
||||
export_csv_file = self.get_pricing_export_for_all_products()
|
||||
export_csv = self.file_service.get_file_content(export_csv_file.id)
|
||||
|
||||
# Clean column names
|
||||
price_df.columns = price_df.columns.str.lower().str.replace(' ', '_')
|
||||
# load to card tcgplayer
|
||||
self.load_export_csv_to_card_tcgplayer(export_csv, export_csv_file.id)
|
||||
|
||||
# Get all products efficiently
|
||||
products_query = self.db.query(
|
||||
CardTCGPlayer.tcgplayer_id,
|
||||
CardTCGPlayer.product_id
|
||||
)
|
||||
product_df = pd.read_sql(products_query.statement, self.db.bind)
|
||||
|
||||
# Merge dataframes
|
||||
merged_df = pd.merge(
|
||||
price_df,
|
||||
product_df,
|
||||
on='tcgplayer_id',
|
||||
how='inner'
|
||||
)
|
||||
|
||||
# Define price columns to process
|
||||
price_columns = [
|
||||
'tcg_market_price',
|
||||
'tcg_direct_low',
|
||||
'tcg_low_price_with_shipping',
|
||||
'tcg_low_price',
|
||||
'tcg_marketplace_price'
|
||||
]
|
||||
|
||||
# Process in batches to avoid memory issues
|
||||
BATCH_SIZE = 1000
|
||||
for price_column in price_columns:
|
||||
records = []
|
||||
|
||||
for start_idx in range(0, len(merged_df), BATCH_SIZE):
|
||||
batch_df = merged_df.iloc[start_idx:start_idx + BATCH_SIZE]
|
||||
|
||||
batch_records = [
|
||||
Price(
|
||||
id=str(uuid.uuid4()),
|
||||
product_id=row['product_id'],
|
||||
type=price_column,
|
||||
price=row[price_column]
|
||||
)
|
||||
for _, row in batch_df.iterrows()
|
||||
if pd.notna(row[price_column]) # Skip rows with NaN prices
|
||||
]
|
||||
|
||||
with db_transaction(self.db):
|
||||
self.db.bulk_save_objects(batch_records)
|
||||
self.db.flush()
|
||||
# Process the export with optimized bulk operations
|
||||
# the pricing service proves that there is no god
|
||||
# self.pricing_service.process_pricing_export(export_csv)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load prices: {str(e)}")
|
||||
logger.error(f"Failed to load prices: {e}")
|
||||
raise
|
500
services/unholy_pricing.py
Normal file
500
services/unholy_pricing.py
Normal file
@ -0,0 +1,500 @@
|
||||
from dataclasses import dataclass
|
||||
from decimal import Decimal, ROUND_HALF_UP
|
||||
from enum import Enum
|
||||
from typing import Optional, Dict, List, Any
|
||||
import pandas as pd
|
||||
import logging
|
||||
from db.models import Product, Price
|
||||
from sqlalchemy.orm import Session
|
||||
from uuid import uuid4 as uuid
|
||||
from datetime import datetime
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from sqlalchemy import text
|
||||
from services.util._dataframe import DataframeUtil
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class PriceType(str, Enum):
|
||||
TCG_MARKET = 'tcg_market_price'
|
||||
TCG_DIRECT_LOW = 'tcg_direct_low'
|
||||
TCG_LOW_WITH_SHIPPING = 'tcg_low_price_with_shipping'
|
||||
TCG_LOW = 'tcg_low_price'
|
||||
TCG_MARKETPLACE = 'tcg_marketplace_price'
|
||||
MY_PRICE = 'my_price'
|
||||
|
||||
class PricingStrategy(str, Enum):
|
||||
DEFAULT = 'default'
|
||||
AGGRESSIVE = 'aggressive'
|
||||
CONSERVATIVE = 'conservative'
|
||||
|
||||
@dataclass
|
||||
class PriceRange:
|
||||
min_price: Decimal
|
||||
max_price: Decimal
|
||||
multiplier: Decimal
|
||||
ceiling_price: Optional[Decimal] = None
|
||||
include_shipping: bool = False
|
||||
|
||||
def __post_init__(self):
|
||||
# Convert all values to Decimal for precise calculations
|
||||
self.min_price = Decimal(str(self.min_price))
|
||||
self.max_price = Decimal(str(self.max_price))
|
||||
self.multiplier = Decimal(str(self.multiplier))
|
||||
if self.ceiling_price is not None:
|
||||
self.ceiling_price = Decimal(str(self.ceiling_price))
|
||||
|
||||
def contains_price(self, price: Decimal) -> bool:
|
||||
"""Check if a price falls within this range, inclusive of min, exclusive of max."""
|
||||
return self.min_price <= price < self.max_price
|
||||
|
||||
def calculate_price(self, base_price: Decimal) -> Decimal:
|
||||
"""Calculate the final price for this range, respecting ceiling."""
|
||||
calculated = base_price * self.multiplier
|
||||
if self.ceiling_price is not None:
|
||||
calculated = min(calculated, self.ceiling_price)
|
||||
return calculated.quantize(Decimal('0.01'), rounding=ROUND_HALF_UP)
|
||||
|
||||
class PricingConfiguration:
|
||||
"""Centralized configuration for pricing rules and thresholds."""
|
||||
|
||||
# Price thresholds
|
||||
FLOOR_PRICE = Decimal('0.35')
|
||||
MAX_PRICE = Decimal('100000.00') # Safety cap for maximum price
|
||||
SHIPPING_THRESHOLD = Decimal('5.00')
|
||||
|
||||
# Multipliers
|
||||
FLOOR_MULT = Decimal('1.25')
|
||||
NEAR_FLOOR_MULT = Decimal('1.25')
|
||||
UNDER_FIVE_MULT = Decimal('1.25')
|
||||
FIVE_TO_TEN_MULT = Decimal('1.15')
|
||||
TEN_TO_TWENTYFIVE_MULT = Decimal('1.10')
|
||||
TWENTYFIVE_TO_FIFTY_MULT = Decimal('1.05')
|
||||
FIFTY_PLUS_MULT = Decimal('1.025')
|
||||
|
||||
# Price variance thresholds
|
||||
MAX_PRICE_VARIANCE = Decimal('0.50') # Maximum allowed variance between prices as a ratio
|
||||
|
||||
@classmethod
|
||||
def get_price_ranges(cls) -> list[PriceRange]:
|
||||
"""Get the list of price ranges with their respective rules."""
|
||||
return [
|
||||
PriceRange(
|
||||
min_price=Decimal('0'),
|
||||
max_price=cls.FLOOR_PRICE,
|
||||
multiplier=cls.FLOOR_MULT,
|
||||
include_shipping=False
|
||||
),
|
||||
PriceRange(
|
||||
min_price=cls.FLOOR_PRICE,
|
||||
max_price=Decimal('5'),
|
||||
multiplier=cls.UNDER_FIVE_MULT,
|
||||
ceiling_price=Decimal('4.99'),
|
||||
include_shipping=False
|
||||
),
|
||||
PriceRange(
|
||||
min_price=Decimal('5'),
|
||||
max_price=Decimal('10'),
|
||||
multiplier=cls.FIVE_TO_TEN_MULT,
|
||||
ceiling_price=Decimal('9.99'),
|
||||
include_shipping=True
|
||||
),
|
||||
PriceRange(
|
||||
min_price=Decimal('10'),
|
||||
max_price=Decimal('25'),
|
||||
multiplier=cls.TEN_TO_TWENTYFIVE_MULT,
|
||||
ceiling_price=Decimal('24.99'),
|
||||
include_shipping=True
|
||||
),
|
||||
PriceRange(
|
||||
min_price=Decimal('25'),
|
||||
max_price=Decimal('50'),
|
||||
multiplier=cls.TWENTYFIVE_TO_FIFTY_MULT,
|
||||
ceiling_price=Decimal('49.99'),
|
||||
include_shipping=True
|
||||
),
|
||||
PriceRange(
|
||||
min_price=Decimal('50'),
|
||||
max_price=cls.MAX_PRICE,
|
||||
multiplier=cls.FIFTY_PLUS_MULT,
|
||||
include_shipping=True
|
||||
)
|
||||
]
|
||||
|
||||
class PriceCalculationResult:
|
||||
"""Represents the result of a price calculation."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
product: Product,
|
||||
calculated_price: Optional[Decimal],
|
||||
base_prices: Dict[str, Decimal],
|
||||
error: Optional[str] = None
|
||||
):
|
||||
self.product = product
|
||||
self.calculated_price = calculated_price
|
||||
self.base_prices = base_prices
|
||||
self.error = error
|
||||
|
||||
@property
|
||||
def success(self) -> bool:
|
||||
return self.calculated_price is not None and self.error is None
|
||||
|
||||
@property
|
||||
def max_base_price(self) -> Optional[Decimal]:
|
||||
"""Returns the highest base price."""
|
||||
return max(self.base_prices.values()) if self.base_prices else None
|
||||
|
||||
|
||||
class PricingService:
|
||||
CHUNK_SIZE = 5000 # Configurable batch size
|
||||
MAX_WORKERS = 4 # Configurable worker count
|
||||
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
self.df_util = DataframeUtil()
|
||||
self.config = PricingConfiguration
|
||||
self.price_ranges = self.config.get_price_ranges()
|
||||
|
||||
def get_product_by_id(self, product_id: str) -> Optional[Product]:
|
||||
"""Get a product by its ID."""
|
||||
return self.db.query(Product)\
|
||||
.filter(Product.id == str(product_id))\
|
||||
.all()[0] if len(self.db.query(Product)\
|
||||
.filter(Product.id == str(product_id))\
|
||||
.all()) > 0 else None
|
||||
|
||||
def get_latest_price_for_product(self, product: Product, price_type: PriceType) -> Optional[Price]:
|
||||
"""Get the most recent price of a specific type for a product."""
|
||||
prices = self.db.query(Price)\
|
||||
.filter(
|
||||
Price.product_id == str(product.id),
|
||||
Price.type == price_type.value
|
||||
)\
|
||||
.order_by(Price.date_created.desc())\
|
||||
.all()
|
||||
return prices[0] if prices else None
|
||||
|
||||
def get_historical_prices_for_product(
|
||||
self, product: Product, price_type: Optional[PriceType] = None
|
||||
) -> dict[PriceType, list[Price]]:
|
||||
"""Get historical prices for a product, optionally filtered by type."""
|
||||
query = self.db.query(Price).filter(Price.product_id == str(product.id))
|
||||
|
||||
if price_type:
|
||||
query = query.filter(Price.type == price_type.value) # Fixed: Use enum value
|
||||
|
||||
prices = query.order_by(Price.date_created.desc()).all()
|
||||
|
||||
if price_type:
|
||||
return {price_type: prices}
|
||||
|
||||
# Group prices by type
|
||||
result = {t: [] for t in PriceType}
|
||||
for price in prices:
|
||||
result[PriceType(price.type)].append(price) # Fixed: Convert string to enum
|
||||
return result
|
||||
|
||||
def _validate_price_data(self, prices: dict[str, Optional[Price]]) -> Optional[str]:
|
||||
"""Validate price data and return error message if invalid."""
|
||||
# Filter out None values and get valid prices
|
||||
valid_prices = {k: v for k, v in prices.items() if v is not None}
|
||||
|
||||
if not valid_prices:
|
||||
return "No valid price data available"
|
||||
|
||||
for price in valid_prices.values():
|
||||
if price.price < 0:
|
||||
return f"Negative price found: {price.price}"
|
||||
if price.price > self.config.MAX_PRICE:
|
||||
return f"Price exceeds maximum allowed: {price.price}"
|
||||
|
||||
return None
|
||||
|
||||
def _check_price_variance(self, prices: Dict[str, Decimal]) -> bool:
|
||||
"""Check if the variance between prices is within acceptable limits."""
|
||||
if not prices:
|
||||
return True
|
||||
|
||||
min_price = min(prices.values())
|
||||
max_price = max(prices.values())
|
||||
|
||||
if min_price == 0:
|
||||
return False
|
||||
|
||||
variance_ratio = max_price / min_price
|
||||
return variance_ratio <= (1 + self.config.MAX_PRICE_VARIANCE)
|
||||
|
||||
def _get_relevant_prices(self, product: Product) -> dict[str, Optional[Price]]:
|
||||
"""Get all relevant prices for a product."""
|
||||
return {
|
||||
PriceType.TCG_LOW.value: self.get_latest_price_for_product(product, PriceType.TCG_LOW),
|
||||
PriceType.TCG_DIRECT_LOW.value: self.get_latest_price_for_product(product, PriceType.TCG_DIRECT_LOW),
|
||||
PriceType.TCG_MARKET.value: self.get_latest_price_for_product(product, PriceType.TCG_MARKET),
|
||||
PriceType.TCG_LOW_WITH_SHIPPING.value: self.get_latest_price_for_product(product, PriceType.TCG_LOW_WITH_SHIPPING)
|
||||
}
|
||||
|
||||
def _get_base_prices(
|
||||
self, prices: dict[str, Price], include_shipping: bool = False
|
||||
) -> Dict[str, Decimal]:
|
||||
"""Get base prices, excluding None values."""
|
||||
base_prices = {}
|
||||
|
||||
# Add core prices if they exist
|
||||
if tcg_low := prices.get(PriceType.TCG_LOW.value):
|
||||
base_prices[PriceType.TCG_LOW.value] = Decimal(str(tcg_low.price))
|
||||
if tcg_direct := prices.get(PriceType.TCG_DIRECT_LOW.value):
|
||||
base_prices[PriceType.TCG_DIRECT_LOW.value] = Decimal(str(tcg_direct.price))
|
||||
if tcg_market := prices.get(PriceType.TCG_MARKET.value):
|
||||
base_prices[PriceType.TCG_MARKET.value] = Decimal(str(tcg_market.price))
|
||||
|
||||
# Add shipping price if requested and available
|
||||
if include_shipping:
|
||||
if tcg_shipping := prices.get(PriceType.TCG_LOW_WITH_SHIPPING.value):
|
||||
base_prices[PriceType.TCG_LOW_WITH_SHIPPING.value] = Decimal(str(tcg_shipping.price))
|
||||
|
||||
return base_prices
|
||||
|
||||
def _get_price_range(self, price: Decimal) -> Optional[PriceRange]:
|
||||
"""Get the appropriate price range for a given price."""
|
||||
for price_range in self.price_ranges:
|
||||
if price_range.contains_price(price):
|
||||
return price_range
|
||||
return None
|
||||
|
||||
def _handle_floor_price_cases(
|
||||
self, base_prices: Dict[str, Decimal]
|
||||
) -> Optional[Decimal]:
|
||||
"""Handle special cases for prices near or below floor price."""
|
||||
if all(price < self.config.FLOOR_PRICE for price in base_prices.values()):
|
||||
return self.config.FLOOR_PRICE
|
||||
|
||||
if any(price < self.config.FLOOR_PRICE for price in base_prices.values()):
|
||||
max_price = max(base_prices.values())
|
||||
return max_price * self.config.NEAR_FLOOR_MULT
|
||||
|
||||
return None
|
||||
|
||||
def calculate_price(
|
||||
self, product_id: str, strategy: PricingStrategy = PricingStrategy.DEFAULT
|
||||
) -> PriceCalculationResult:
|
||||
"""Calculate the final price for a product using the specified pricing strategy."""
|
||||
# get product
|
||||
product = self.get_product_by_id(str(product_id)) # Fixed: Ensure string UUID
|
||||
if not product:
|
||||
logger.error(f"Product not found: {product_id}")
|
||||
return PriceCalculationResult(product, None, {}, "Product not found")
|
||||
|
||||
# Get all relevant prices
|
||||
prices = self._get_relevant_prices(product)
|
||||
|
||||
# Validate price data
|
||||
if error := self._validate_price_data(prices):
|
||||
logger.error(f"Invalid price data: {error}")
|
||||
logger.error(f"product: {product.id}")
|
||||
return PriceCalculationResult(product, None, {}, error)
|
||||
|
||||
# Get initial base prices without shipping
|
||||
base_prices = self._get_base_prices(prices, include_shipping=False)
|
||||
|
||||
# Check price variance
|
||||
if not self._check_price_variance(base_prices):
|
||||
logger.error(f"Price variance exceeds acceptable threshold")
|
||||
logger.error(f"Base prices: {base_prices}")
|
||||
logger.error(f"product: {product.id}")
|
||||
return PriceCalculationResult(
|
||||
product, None, base_prices,
|
||||
"Price variance exceeds acceptable threshold"
|
||||
)
|
||||
|
||||
# Handle floor price cases
|
||||
if floor_price := self._handle_floor_price_cases(base_prices):
|
||||
return PriceCalculationResult(product, floor_price, base_prices)
|
||||
|
||||
# Get max base price and its range
|
||||
max_base_price = max(base_prices.values())
|
||||
price_range = self._get_price_range(max_base_price)
|
||||
|
||||
if not price_range:
|
||||
logger.error(f"No valid price range found for price")
|
||||
logger.error(f"Base prices: {base_prices}, max_base_price: {max_base_price}")
|
||||
logger.error(f"product: {product.id}")
|
||||
return PriceCalculationResult(
|
||||
product, None, base_prices,
|
||||
f"No valid price range found for price: {max_base_price}"
|
||||
)
|
||||
|
||||
# Include shipping prices if necessary
|
||||
if price_range.include_shipping:
|
||||
base_prices = self._get_base_prices(prices, include_shipping=True)
|
||||
max_base_price = max(base_prices.values())
|
||||
|
||||
# Recheck price range with shipping
|
||||
price_range = self._get_price_range(max_base_price)
|
||||
|
||||
if not price_range:
|
||||
logger.error(f"No valid price range found for price with shipping")
|
||||
logger.error(f"Base prices: {base_prices}, max_base_price: {max_base_price}")
|
||||
logger.error(f"product: {product.id}")
|
||||
return PriceCalculationResult(
|
||||
product, None, base_prices,
|
||||
f"No valid price range found for price with shipping: {max_base_price}"
|
||||
)
|
||||
|
||||
# Calculate final price using the price range
|
||||
calculated_price = price_range.calculate_price(max_base_price)
|
||||
|
||||
# Apply strategy-specific adjustments
|
||||
if strategy == PricingStrategy.AGGRESSIVE:
|
||||
calculated_price *= Decimal('0.95')
|
||||
elif strategy == PricingStrategy.CONSERVATIVE:
|
||||
calculated_price *= Decimal('1.05')
|
||||
|
||||
debug_base_prices_with_name_string = ", ".join([f"{k}: {v}" for k, v in base_prices.items()])
|
||||
|
||||
logger.debug(f"Set price for to {calculated_price.quantize(Decimal('0.01'), rounding=ROUND_HALF_UP)} based on {debug_base_prices_with_name_string}")
|
||||
|
||||
return PriceCalculationResult(
|
||||
product,
|
||||
calculated_price.quantize(Decimal('0.01'), rounding=ROUND_HALF_UP),
|
||||
base_prices
|
||||
)
|
||||
|
||||
def _bulk_generate_uuids(self, size: int) -> List[str]:
|
||||
"""Generate UUIDs in bulk for better performance."""
|
||||
return [str(uuid()) for _ in range(size)]
|
||||
|
||||
def _prepare_price_records(self, df: pd.DataFrame, price_type: str, uuids: List[str]) -> List[Dict]:
|
||||
"""Prepare price records in bulk using vectorized operations."""
|
||||
records = []
|
||||
df['price_id'] = uuids[:len(df)]
|
||||
df['type'] = price_type # price_type should already be a string value
|
||||
df['date_created'] = datetime.utcnow()
|
||||
|
||||
return df[['price_id', 'product_id', 'type', 'price', 'date_created']].to_dict('records')
|
||||
|
||||
def _calculate_suggested_prices_batch(self, product_ids: List[str]) -> Dict[str, float]:
|
||||
"""Calculate suggested prices in parallel for a batch of products."""
|
||||
with ThreadPoolExecutor(max_workers=self.MAX_WORKERS) as executor:
|
||||
future_to_id = {
|
||||
executor.submit(self.calculate_price, str(pid)): pid # Fixed: Ensure string UUID
|
||||
for pid in product_ids
|
||||
}
|
||||
|
||||
results = {}
|
||||
for future in as_completed(future_to_id):
|
||||
product_id = future_to_id[future]
|
||||
try:
|
||||
result = future.result()
|
||||
if result.success:
|
||||
results[str(product_id)] = float(result.calculated_price) # Fixed: Ensure string UUID
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to calculate price for product {product_id}: {e}")
|
||||
|
||||
return results
|
||||
|
||||
def _bulk_insert_prices(self, records: List[Dict]) -> None:
|
||||
"""Efficiently insert price records in bulk."""
|
||||
if not records:
|
||||
return
|
||||
|
||||
try:
|
||||
df = pd.DataFrame(records)
|
||||
df.to_sql('prices', self.db.bind,
|
||||
if_exists='append',
|
||||
index=False,
|
||||
method='multi',
|
||||
chunksize=self.CHUNK_SIZE)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to bulk insert prices: {e}")
|
||||
raise
|
||||
|
||||
def process_pricing_export(self, export_csv: bytes) -> None:
|
||||
"""Process pricing export with optimized bulk operations."""
|
||||
try:
|
||||
# Convert CSV to DataFrame
|
||||
df = self.df_util.csv_bytes_to_df(export_csv)
|
||||
df.columns = df.columns.str.lower().str.replace(' ', '_')
|
||||
|
||||
# Get product mappings efficiently - SQLite compatible with chunking
|
||||
SQLITE_MAX_VARS = 999 # SQLite parameter limit
|
||||
tcgplayer_ids = df['tcgplayer_id'].tolist()
|
||||
all_product_dfs = []
|
||||
|
||||
for i in range(0, len(tcgplayer_ids), SQLITE_MAX_VARS):
|
||||
chunk_ids = tcgplayer_ids[i:i + SQLITE_MAX_VARS]
|
||||
placeholders = ','.join([':id_' + str(j) for j in range(len(chunk_ids))])
|
||||
product_query = f"""
|
||||
SELECT tcgplayer_id, product_id
|
||||
FROM card_tcgplayer
|
||||
WHERE tcgplayer_id IN ({placeholders})
|
||||
"""
|
||||
|
||||
# Create a dictionary of parameters
|
||||
params = {f'id_{j}': id_val for j, id_val in enumerate(chunk_ids)}
|
||||
|
||||
chunk_df = pd.read_sql(
|
||||
text(product_query),
|
||||
self.db.bind,
|
||||
params=params
|
||||
)
|
||||
all_product_dfs.append(chunk_df)
|
||||
|
||||
# Combine all chunks
|
||||
product_df = pd.concat(all_product_dfs) if all_product_dfs else pd.DataFrame()
|
||||
|
||||
# Merge dataframes efficiently
|
||||
merged_df = pd.merge(
|
||||
df,
|
||||
product_df,
|
||||
on='tcgplayer_id',
|
||||
how='inner'
|
||||
)
|
||||
|
||||
# Define price columns mapping - using enum values directly
|
||||
price_columns = {
|
||||
'tcg_market_price': PriceType.TCG_MARKET.value,
|
||||
'tcg_direct_low': PriceType.TCG_DIRECT_LOW.value,
|
||||
'tcg_low_price_with_shipping': PriceType.TCG_LOW_WITH_SHIPPING.value,
|
||||
'tcg_low_price': PriceType.TCG_LOW.value,
|
||||
'tcg_marketplace_price': PriceType.TCG_MARKETPLACE.value
|
||||
}
|
||||
|
||||
# Process each price type in chunks
|
||||
for price_col, price_type in price_columns.items():
|
||||
valid_prices_df = merged_df[merged_df[price_col].notna()].copy()
|
||||
|
||||
for chunk_start in range(0, len(valid_prices_df), self.CHUNK_SIZE):
|
||||
chunk_df = valid_prices_df.iloc[chunk_start:chunk_start + self.CHUNK_SIZE].copy()
|
||||
uuids = self._bulk_generate_uuids(len(chunk_df))
|
||||
|
||||
chunk_df['price'] = chunk_df[price_col]
|
||||
chunk_df['product_id'] = chunk_df['product_id'].astype(str) # Fixed: Ensure string UUIDs
|
||||
records = self._prepare_price_records(chunk_df, price_type, uuids)
|
||||
self._bulk_insert_prices(records)
|
||||
|
||||
# Handle suggested prices separately with parallel processing
|
||||
product_ids = merged_df['product_id'].unique()
|
||||
suggested_prices = {}
|
||||
|
||||
for chunk_start in range(0, len(product_ids), self.CHUNK_SIZE):
|
||||
chunk_ids = product_ids[chunk_start:chunk_start + self.CHUNK_SIZE]
|
||||
chunk_prices = self._calculate_suggested_prices_batch(chunk_ids)
|
||||
suggested_prices.update(chunk_prices)
|
||||
|
||||
# Create suggested price records
|
||||
if suggested_prices:
|
||||
suggested_df = pd.DataFrame([
|
||||
{'product_id': str(pid), 'price': price} # Fixed: Ensure string UUIDs
|
||||
for pid, price in suggested_prices.items()
|
||||
])
|
||||
|
||||
uuids = self._bulk_generate_uuids(len(suggested_df))
|
||||
records = self._prepare_price_records(suggested_df, 'suggested_price', uuids)
|
||||
self._bulk_insert_prices(records)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to process pricing export: {e}")
|
||||
logger.error(f"Error occurred during price processing: {str(e)}")
|
||||
raise
|
@ -1,100 +0,0 @@
|
||||
from db.models import ManaboxExportData, UploadHistory
|
||||
import pandas as pd
|
||||
from io import StringIO
|
||||
import uuid
|
||||
from sqlalchemy.orm import Session
|
||||
from db.utils import db_transaction
|
||||
from exceptions import FailedUploadException
|
||||
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class UploadObject:
|
||||
def __init__(self,
|
||||
content: bytes = None,
|
||||
upload_id: str = None,
|
||||
filename: str = None,
|
||||
df: pd.DataFrame = None):
|
||||
self.content = content
|
||||
self.upload_id = upload_id
|
||||
self.filename = filename
|
||||
self.df = df
|
||||
|
||||
|
||||
class UploadService:
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
def _content_to_df(self, content: bytes) -> pd.DataFrame:
|
||||
df = pd.read_csv(StringIO(content.decode('utf-8')))
|
||||
df.columns = df.columns.str.lower().str.replace(' ', '_')
|
||||
return df
|
||||
|
||||
def _create_upload_id(self) -> str:
|
||||
return str(uuid.uuid4())
|
||||
|
||||
def _prepare_manabox_df(self, content: bytes, upload_id: str) -> pd.DataFrame:
|
||||
df = self._content_to_df(content)
|
||||
df['upload_id'] = upload_id
|
||||
df['box_id'] = None
|
||||
|
||||
return df
|
||||
|
||||
def _create_file_upload_record(self, upload_id: str, filename: str, file_size_kb: float, num_rows: int) -> UploadHistory:
|
||||
file_upload_record = UploadHistory(
|
||||
id = str(uuid.uuid4()),
|
||||
upload_id = upload_id,
|
||||
filename = filename,
|
||||
status = "pending",
|
||||
file_size_kb = file_size_kb,
|
||||
num_rows = num_rows
|
||||
)
|
||||
self.db.add(file_upload_record)
|
||||
return file_upload_record
|
||||
|
||||
def _update_manabox_data(self, df: pd.DataFrame) -> bool:
|
||||
for index, row in df.iterrows():
|
||||
try:
|
||||
add_row = ManaboxExportData(
|
||||
id = str(uuid.uuid4()),
|
||||
upload_id = row['upload_id'],
|
||||
box_id = row['box_id'],
|
||||
name = row['name'],
|
||||
set_code = row['set_code'],
|
||||
set_name = row['set_name'],
|
||||
collector_number = row['collector_number'],
|
||||
foil = row['foil'],
|
||||
rarity = row['rarity'],
|
||||
quantity = row['quantity'],
|
||||
manabox_id = row['manabox_id'],
|
||||
scryfall_id = row['scryfall_id'],
|
||||
purchase_price = row['purchase_price'],
|
||||
misprint = row['misprint'],
|
||||
altered = row['altered'],
|
||||
condition = row['condition'],
|
||||
language = row['language'],
|
||||
purchase_price_currency = row['purchase_price_currency']
|
||||
)
|
||||
self.db.add(add_row)
|
||||
except Exception as e:
|
||||
logger.error(f"Error adding row to ManaboxExportData")
|
||||
return False
|
||||
return True
|
||||
|
||||
def process_manabox_upload(self, content: bytes, filename: str, file_size_kb: float) -> dict:
|
||||
upload = UploadObject(content=content, filename=filename)
|
||||
upload.upload_id = self._create_upload_id()
|
||||
upload.df = self._prepare_manabox_df(upload.content, upload.upload_id)
|
||||
num_rows = len(upload.df)
|
||||
|
||||
with db_transaction(self.db):
|
||||
file_upload_record = self._create_file_upload_record(upload.upload_id, upload.filename, file_size_kb, num_rows)
|
||||
if not self._update_manabox_data(upload.df):
|
||||
# set upload to failed
|
||||
file_upload_record.status = "failed"
|
||||
raise FailedUploadException(file_upload_record)
|
||||
else:
|
||||
# set upload_history status to success
|
||||
file_upload_record.status = "success"
|
||||
return {"message": f"Manabox upload successful. Upload ID: {upload.upload_id}"}, upload.upload_id
|
68
services/util/_dataframe.py
Normal file
68
services/util/_dataframe.py
Normal file
@ -0,0 +1,68 @@
|
||||
import pandas as pd
|
||||
from io import StringIO
|
||||
from db.models import File
|
||||
|
||||
|
||||
class ManaboxRow:
|
||||
def __init__(self, row: pd.Series):
|
||||
# Integer field
|
||||
try:
|
||||
self.manabox_id = int(row['manabox_id'])
|
||||
except (ValueError, TypeError):
|
||||
raise ValueError(f"manabox_id must be convertible to integer, got: {row['manabox_id']}")
|
||||
|
||||
# String fields with None/NaN handling
|
||||
self.name = str(row['name']) if pd.notna(row['name']) else ''
|
||||
self.set_code = str(row['set_code']) if pd.notna(row['set_code']) else ''
|
||||
self.set_name = str(row['set_name']) if pd.notna(row['set_name']) else ''
|
||||
self.collector_number = str(row['collector_number']) if pd.notna(row['collector_number']) else ''
|
||||
self.foil = str(row['foil']) if pd.notna(row['foil']) else ''
|
||||
self.rarity = str(row['rarity']) if pd.notna(row['rarity']) else ''
|
||||
self.scryfall_id = str(row['scryfall_id']) if pd.notna(row['scryfall_id']) else ''
|
||||
self.condition = str(row['condition']) if pd.notna(row['condition']) else ''
|
||||
self.language = str(row['language']) if pd.notna(row['language']) else ''
|
||||
self.quantity = str(row['quantity']) if pd.notna(row['quantity']) else ''
|
||||
|
||||
|
||||
class TCGPlayerPricingRow:
|
||||
def __init__(self, row: pd.Series):
|
||||
self.tcgplayer_id = row['tcgplayer_id']
|
||||
self.product_line = row['product_line']
|
||||
self.set_name = row['set_name']
|
||||
self.product_name = row['product_name']
|
||||
self.title = row['title']
|
||||
self.number = row['number']
|
||||
self.rarity = row['rarity']
|
||||
self.condition = row['condition']
|
||||
self.tcg_market_price = row['tcg_market_price']
|
||||
self.tcg_direct_low = row['tcg_direct_low']
|
||||
self.tcg_low_price_with_shipping = row['tcg_low_price_with_shipping']
|
||||
self.tcg_low_price = row['tcg_low_price']
|
||||
self.total_quantity = row['total_quantity']
|
||||
self.add_to_quantity = row['add_to_quantity']
|
||||
self.tcg_marketplace_price = row['tcg_marketplace_price']
|
||||
self.photo_url = row['photo_url']
|
||||
|
||||
|
||||
class DataframeUtil:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def format_df_columns(self, df: pd.DataFrame) -> pd.DataFrame:
|
||||
df.columns = df.columns.str.lower()
|
||||
df.columns = df.columns.str.replace(' ', '_')
|
||||
return df
|
||||
|
||||
def file_to_df(self, file: File) -> pd.DataFrame:
|
||||
with open(file.filepath, 'rb') as f:
|
||||
content = f.read()
|
||||
content = content.decode('utf-8')
|
||||
df = pd.read_csv(StringIO(content))
|
||||
df = self.format_df_columns(df)
|
||||
return df
|
||||
|
||||
def csv_bytes_to_df(self, content: bytes) -> pd.DataFrame:
|
||||
content = content.decode('utf-8')
|
||||
df = pd.read_csv(StringIO(content))
|
||||
df = self.format_df_columns(df)
|
||||
return df
|
Loading…
x
Reference in New Issue
Block a user