Squashed commit of the following:
commit893b229cc6
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 22:14:08 2025 -0500 j commit06f539aea2
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:55:30 2025 -0500 fk commitd0c2960ec9
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:50:53 2025 -0500 frick commit6b1362c166
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:49:40 2025 -0500 database commit8cadc6df4c
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:38:09 2025 -0500 asdf commit1ca6f98684
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:32:50 2025 -0500 fffff commit8bb337a9c3
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:31:13 2025 -0500 ffff commit65aba280c5
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:26:16 2025 -0500 aa commit59ef03a59e
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:24:21 2025 -0500 asdf commitf44d5740fc
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:23:32 2025 -0500 aaa commit13c96b1643
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:18:54 2025 -0500 sdf commit949c795fd1
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:17:53 2025 -0500 asdf commit8c3cd423fe
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:56:01 2025 -0500 app2 commit78eafc739e
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:54:55 2025 -0500 app commitdc47eced14
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:43:15 2025 -0500 asdfasdfasdf commite24bcae88c
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:39:44 2025 -0500 a commitc894451bfe
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:38:20 2025 -0500 req commit3d09869562
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:33:27 2025 -0500 wrong number = code dont work lol i love computers commit4c93a1271b
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:29:39 2025 -0500 q commit1f5361da88
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 18:27:20 2025 -0500 same as original code now -5 days of my life commit511b070cbb
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 13:52:28 2025 -0500 pricey worky commit964fdd641b
Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 11:37:29 2025 -0500 prep for pricing service work commita78c3bcba3
Author: zman <joshua.k.rzemien@gmail.com> Date: Wed Feb 5 21:51:22 2025 -0500 more stuff yay commitbd9cfca7a9
Author: zman <joshua.k.rzemien@gmail.com> Date: Tue Feb 4 22:30:33 2025 -0500 GIGA FIXED EVERYTHING OMG commit85510a4671
Author: zman <joshua.k.rzemien@gmail.com> Date: Tue Feb 4 00:01:34 2025 -0500 data model change and some new services
This commit is contained in:
0
app/services/__init__.py
Normal file
0
app/services/__init__.py
Normal file
202
app/services/box.py
Normal file
202
app/services/box.py
Normal file
@@ -0,0 +1,202 @@
|
||||
from datetime import datetime
|
||||
from typing import Dict, List
|
||||
from uuid import uuid4
|
||||
from sqlalchemy import or_
|
||||
from sqlalchemy.orm import Session
|
||||
import logging
|
||||
|
||||
from app.db.models import (
|
||||
Box,
|
||||
File,
|
||||
StagedFileProduct,
|
||||
Product,
|
||||
OpenBoxCard,
|
||||
OpenBox,
|
||||
TCGPlayerGroups,
|
||||
Inventory
|
||||
)
|
||||
from app.db.utils import db_transaction
|
||||
from app.schemas.box import CreateBoxRequest, UpdateBoxRequest, CreateOpenBoxRequest
|
||||
from app.services.inventory import InventoryService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
VALID_BOX_TYPES = {"collector", "play", "draft", "set", "commander"}
|
||||
|
||||
class BoxService:
|
||||
def __init__(self, db: Session, inventory_service: InventoryService):
|
||||
self.db = db
|
||||
self.inventory_service = inventory_service
|
||||
|
||||
def get_staged_product_data(self, file_ids: List[str]) -> List[StagedFileProduct]:
|
||||
"""Retrieve staged product data for given file IDs."""
|
||||
return self.db.query(StagedFileProduct).filter(
|
||||
StagedFileProduct.file_id.in_(file_ids)
|
||||
).all()
|
||||
|
||||
def aggregate_staged_product_data(self, staged_product_data: List[StagedFileProduct]) -> Dict[Product, int]:
|
||||
"""Aggregate staged product data by product and quantity."""
|
||||
product_data = {}
|
||||
for row in staged_product_data:
|
||||
product = self.db.query(Product).filter(Product.id == row.product_id).first()
|
||||
if product:
|
||||
product_data[product] = product_data.get(product, 0) + row.quantity
|
||||
return product_data
|
||||
|
||||
def add_products_to_open_box(self, open_box: OpenBox, product_data: Dict[Product, int]) -> None:
|
||||
"""Add products to an open box."""
|
||||
for product, quantity in product_data.items():
|
||||
open_box_card = OpenBoxCard(
|
||||
id=str(uuid4()),
|
||||
open_box_id=open_box.id,
|
||||
card_id=product.id,
|
||||
quantity=quantity
|
||||
)
|
||||
self.db.add(open_box_card)
|
||||
|
||||
def validate_box_type(self, box_type: str) -> bool:
|
||||
"""Validate if the box type is supported."""
|
||||
return box_type in VALID_BOX_TYPES
|
||||
|
||||
def validate_set_code(self, set_code: str) -> bool:
|
||||
"""Validate if the set code exists in TCGPlayer groups."""
|
||||
return self.db.query(TCGPlayerGroups).filter(
|
||||
TCGPlayerGroups.abbreviation == set_code
|
||||
).first() is not None
|
||||
|
||||
def create_box(self, create_box_data: CreateBoxRequest) -> Box:
|
||||
"""Create a new box."""
|
||||
if not self.validate_box_type(create_box_data.type):
|
||||
raise ValueError("Invalid box type")
|
||||
if not self.validate_set_code(create_box_data.set_code):
|
||||
raise ValueError("Invalid set code")
|
||||
|
||||
existing_box = self.db.query(Box).filter(
|
||||
Box.type == create_box_data.type,
|
||||
Box.set_code == create_box_data.set_code,
|
||||
or_(Box.sku == create_box_data.sku, Box.sku.is_(None))
|
||||
).first()
|
||||
|
||||
if existing_box:
|
||||
return existing_box, False
|
||||
else:
|
||||
with db_transaction(self.db):
|
||||
product = Product(
|
||||
id=str(uuid4()),
|
||||
type='box',
|
||||
product_line='mtg'
|
||||
)
|
||||
box = Box(
|
||||
product_id=product.id,
|
||||
type=create_box_data.type,
|
||||
set_code=create_box_data.set_code,
|
||||
sku=create_box_data.sku,
|
||||
num_cards_expected=create_box_data.num_cards_expected
|
||||
)
|
||||
self.db.add(product)
|
||||
self.db.add(box)
|
||||
|
||||
return box, True
|
||||
|
||||
def update_box(self, box_id: str, update_box_data: UpdateBoxRequest) -> Box:
|
||||
"""Update an existing box."""
|
||||
box = self.db.query(Box).filter(Box.product_id == box_id).first()
|
||||
if not box:
|
||||
raise ValueError("Box not found")
|
||||
|
||||
update_data = update_box_data.dict(exclude_unset=True)
|
||||
|
||||
# Validate box type if it's being updated
|
||||
if "type" in update_data and update_data["type"] is not None:
|
||||
if not self.validate_box_type(update_data["type"]):
|
||||
raise ValueError(f"Invalid box type: {update_data['type']}")
|
||||
|
||||
# Validate set code if it's being updated
|
||||
if "set_code" in update_data and update_data["set_code"] is not None:
|
||||
if not self.validate_set_code(update_data["set_code"]):
|
||||
raise ValueError(f"Invalid set code: {update_data['set_code']}")
|
||||
|
||||
with db_transaction(self.db):
|
||||
for field, value in update_data.items():
|
||||
if value is not None: # Only update non-None values
|
||||
setattr(box, field, value)
|
||||
|
||||
return box
|
||||
|
||||
def delete_box(self, box_id: str) -> Box:
|
||||
"""Delete a box."""
|
||||
box = self.db.query(Box).filter(Box.product_id == box_id).first()
|
||||
product = self.db.query(Product).filter(Product.id == box_id).first()
|
||||
if not box:
|
||||
raise ValueError("Box not found")
|
||||
|
||||
with db_transaction(self.db):
|
||||
self.db.delete(box)
|
||||
self.db.delete(product)
|
||||
return box
|
||||
|
||||
def open_box(self, box_id: str, box_data: CreateOpenBoxRequest) -> OpenBox:
|
||||
"""Open a box and process its contents."""
|
||||
box = self.db.query(Box).filter(Box.product_id == box_id).first()
|
||||
if not box:
|
||||
raise ValueError("Box not found")
|
||||
|
||||
with db_transaction(self.db):
|
||||
open_box = OpenBox(
|
||||
id=str(uuid4()),
|
||||
product_id=box_id,
|
||||
num_cards_actual=box_data.num_cards_actual,
|
||||
date_opened=datetime.strptime(box_data.date_opened, "%Y-%m-%d") if box_data.date_opened else datetime.now()
|
||||
)
|
||||
self.db.add(open_box)
|
||||
|
||||
staged_product_data = self.get_staged_product_data(box_data.file_ids)
|
||||
product_data = self.aggregate_staged_product_data(staged_product_data)
|
||||
self.inventory_service.process_staged_products(product_data)
|
||||
self.add_products_to_open_box(open_box, product_data)
|
||||
|
||||
# Update file box IDs
|
||||
self.db.query(File).filter(File.id.in_(box_data.file_ids)).update(
|
||||
{"box_id": open_box.id}, synchronize_session=False
|
||||
)
|
||||
|
||||
return open_box
|
||||
|
||||
def delete_open_box(self, box_id: str) -> OpenBox:
|
||||
# Fetch open box and related cards in one query
|
||||
open_box = (
|
||||
self.db.query(OpenBox)
|
||||
.filter(OpenBox.id == box_id)
|
||||
.first()
|
||||
)
|
||||
if not open_box:
|
||||
raise ValueError("Open box not found")
|
||||
|
||||
# Get all open box cards and related inventory items in one query
|
||||
open_box_cards = (
|
||||
self.db.query(OpenBoxCard, Inventory)
|
||||
.join(
|
||||
Inventory,
|
||||
OpenBoxCard.card_id == Inventory.product_id
|
||||
)
|
||||
.filter(OpenBoxCard.open_box_id == open_box.id)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Process inventory adjustments
|
||||
for open_box_card, inventory_item in open_box_cards:
|
||||
if open_box_card.quantity > inventory_item.quantity:
|
||||
raise ValueError("Open box quantity exceeds inventory quantity")
|
||||
|
||||
inventory_item.quantity -= open_box_card.quantity
|
||||
if inventory_item.quantity == 0:
|
||||
self.db.delete(inventory_item)
|
||||
|
||||
# Delete the open box card
|
||||
self.db.delete(open_box_card)
|
||||
|
||||
# Execute all database operations in a single transaction
|
||||
with db_transaction(self.db):
|
||||
self.db.delete(open_box)
|
||||
|
||||
return open_box
|
156
app/services/file.py
Normal file
156
app/services/file.py
Normal file
@@ -0,0 +1,156 @@
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import Optional, List, Dict, Any
|
||||
from uuid import uuid4
|
||||
import csv
|
||||
import logging
|
||||
import os
|
||||
from io import StringIO
|
||||
|
||||
from app.db.utils import db_transaction
|
||||
from app.db.models import File, StagedFileProduct
|
||||
from app.schemas.file import CreateFileRequest
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class FileConfig:
|
||||
"""Configuration constants for file processing"""
|
||||
TEMP_DIR = os.path.join(os.getcwd(), 'app/' + 'temp')
|
||||
|
||||
MANABOX_HEADERS = [
|
||||
'Name', 'Set code', 'Set name', 'Collector number', 'Foil',
|
||||
'Rarity', 'Quantity', 'ManaBox ID', 'Scryfall ID', 'Purchase price',
|
||||
'Misprint', 'Altered', 'Condition', 'Language', 'Purchase price currency'
|
||||
]
|
||||
|
||||
SOURCES = {
|
||||
"manabox": {
|
||||
"required_headers": MANABOX_HEADERS,
|
||||
"allowed_extensions": ['.csv'],
|
||||
"allowed_types": ['scan_export_common', 'scan_export_rare']
|
||||
}
|
||||
}
|
||||
|
||||
class FileValidationError(Exception):
|
||||
"""Custom exception for file validation errors"""
|
||||
pass
|
||||
|
||||
class FileService:
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
def get_config(self, source: str) -> Dict[str, Any]:
|
||||
"""Get configuration for a specific source"""
|
||||
config = FileConfig.SOURCES.get(source)
|
||||
if not config:
|
||||
raise FileValidationError(f"Unsupported source: {source}")
|
||||
return config
|
||||
|
||||
def validate_file_extension(self, filename: str, config: Dict[str, Any]) -> bool:
|
||||
"""Validate file extension against allowed extensions"""
|
||||
return any(filename.endswith(ext) for ext in config["allowed_extensions"])
|
||||
|
||||
def validate_file_type(self, metadata: CreateFileRequest, config: Dict[str, Any]) -> bool:
|
||||
"""Validate file type against allowed types"""
|
||||
return metadata.type in config["allowed_types"]
|
||||
|
||||
def validate_csv(self, content: bytes, required_headers: Optional[List[str]] = None) -> bool:
|
||||
"""Validate CSV content and headers"""
|
||||
try:
|
||||
csv_text = content.decode('utf-8')
|
||||
csv_file = StringIO(csv_text)
|
||||
csv_reader = csv.reader(csv_file)
|
||||
|
||||
if required_headers:
|
||||
headers = next(csv_reader, None)
|
||||
if not headers or not all(header in headers for header in required_headers):
|
||||
return False
|
||||
return True
|
||||
|
||||
except (UnicodeDecodeError, csv.Error) as e:
|
||||
logger.error(f"CSV validation error: {str(e)}")
|
||||
return False
|
||||
|
||||
def validate_file_content(self, content: bytes, metadata: CreateFileRequest, config: Dict[str, Any]) -> bool:
|
||||
"""Validate file content based on file type"""
|
||||
extension = os.path.splitext(metadata.filename)[1].lower()
|
||||
if extension == '.csv':
|
||||
return self.validate_csv(content, config.get("required_headers"))
|
||||
return False
|
||||
|
||||
def validate_file(self, content: bytes, metadata: CreateFileRequest) -> bool:
|
||||
"""Validate file against all criteria"""
|
||||
config = self.get_config(metadata.source)
|
||||
|
||||
if not self.validate_file_extension(metadata.filename, config):
|
||||
raise FileValidationError("Invalid file extension")
|
||||
|
||||
if not self.validate_file_type(metadata, config):
|
||||
raise FileValidationError("Invalid file type")
|
||||
|
||||
if not self.validate_file_content(content, metadata, config):
|
||||
raise FileValidationError("Invalid file content or headers")
|
||||
|
||||
return True
|
||||
|
||||
def create_file(self, content: bytes, metadata: CreateFileRequest) -> File:
|
||||
"""Create a new file record and save the file"""
|
||||
with db_transaction(self.db):
|
||||
file = File(
|
||||
id=str(uuid4()),
|
||||
filename=metadata.filename,
|
||||
filepath=os.path.join(FileConfig.TEMP_DIR, metadata.filename),
|
||||
type=metadata.type,
|
||||
source=metadata.source,
|
||||
filesize_kb=round(len(content) / 1024, 2),
|
||||
status='pending',
|
||||
service=metadata.service
|
||||
)
|
||||
self.db.add(file)
|
||||
|
||||
os.makedirs(FileConfig.TEMP_DIR, exist_ok=True)
|
||||
with open(file.filepath, 'wb') as f:
|
||||
f.write(content)
|
||||
|
||||
return file
|
||||
|
||||
def get_file(self, file_id: str) -> File:
|
||||
"""Get a file by ID"""
|
||||
file = self.db.query(File).filter(File.id == file_id).first()
|
||||
if not file:
|
||||
raise FileValidationError(f"File with id {file_id} not found")
|
||||
return file
|
||||
|
||||
def get_files(self, status: Optional[str] = None) -> List[File]:
|
||||
"""Get all files, optionally filtered by status"""
|
||||
query = self.db.query(File)
|
||||
if status:
|
||||
query = query.filter(File.status == status)
|
||||
return query.all()
|
||||
|
||||
def get_staged_products(self, file_id: str) -> List[StagedFileProduct]:
|
||||
"""Get staged products for a file"""
|
||||
return self.db.query(StagedFileProduct).filter(
|
||||
StagedFileProduct.file_id == file_id
|
||||
).all()
|
||||
|
||||
def delete_file(self, file_id: str) -> File:
|
||||
"""Mark a file as deleted and remove associated staged products"""
|
||||
file = self.get_file(file_id)
|
||||
staged_products = self.get_staged_products(file_id)
|
||||
|
||||
with db_transaction(self.db):
|
||||
file.status = 'deleted'
|
||||
for staged_product in staged_products:
|
||||
self.db.delete(staged_product)
|
||||
|
||||
return file
|
||||
|
||||
def get_file_content(self, file_id: str) -> bytes:
|
||||
"""Get the content of a file"""
|
||||
file = self.get_file(file_id)
|
||||
try:
|
||||
with open(file.filepath, 'rb') as f:
|
||||
return f.read()
|
||||
except IOError as e:
|
||||
logger.error(f"Error reading file {file_id}: {str(e)}")
|
||||
raise FileValidationError(f"Could not read file content for {file_id}")
|
90
app/services/inventory.py
Normal file
90
app/services/inventory.py
Normal file
@@ -0,0 +1,90 @@
|
||||
from typing import Dict
|
||||
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
from app.db.models import Product, Inventory
|
||||
from app.schemas.inventory import UpdateInventoryResponse
|
||||
from app.db.utils import db_transaction
|
||||
|
||||
|
||||
class InventoryService:
|
||||
"""Service class for managing product inventory operations."""
|
||||
|
||||
def __init__(self, db: Session) -> None:
|
||||
"""
|
||||
Initialize the InventoryService.
|
||||
|
||||
Args:
|
||||
db: SQLAlchemy database session
|
||||
"""
|
||||
self.db = db
|
||||
|
||||
def add_inventory(self, product: Product, quantity: int) -> Inventory:
|
||||
"""
|
||||
Add or update inventory for a product.
|
||||
|
||||
Args:
|
||||
product: Product model instance
|
||||
quantity: Quantity to add to inventory
|
||||
|
||||
Returns:
|
||||
Updated Inventory model instance
|
||||
"""
|
||||
inventory = self.db.query(Inventory).filter(
|
||||
Inventory.product_id == product.id
|
||||
).first()
|
||||
|
||||
if inventory is None:
|
||||
inventory = Inventory(
|
||||
product_id=product.id,
|
||||
quantity=quantity
|
||||
)
|
||||
self.db.add(inventory)
|
||||
else:
|
||||
inventory.quantity += quantity
|
||||
|
||||
return inventory
|
||||
|
||||
def process_staged_products(
|
||||
self,
|
||||
product_data: Dict[Product, int]
|
||||
) -> UpdateInventoryResponse:
|
||||
"""
|
||||
Process multiple products and update their inventory.
|
||||
|
||||
Args:
|
||||
product_data: Dictionary mapping Products to their quantities
|
||||
|
||||
Returns:
|
||||
Response indicating success status
|
||||
"""
|
||||
try:
|
||||
with db_transaction(self.db):
|
||||
for product, quantity in product_data.items():
|
||||
self.add_inventory(product, quantity)
|
||||
return UpdateInventoryResponse(success=True)
|
||||
except SQLAlchemyError:
|
||||
return UpdateInventoryResponse(success=False)
|
||||
|
||||
def add_sealed_box_to_inventory(
|
||||
self,
|
||||
product: Product,
|
||||
quantity: int
|
||||
) -> UpdateInventoryResponse:
|
||||
"""
|
||||
Add sealed box inventory for a single product.
|
||||
|
||||
Args:
|
||||
product: Product model instance
|
||||
quantity: Quantity to add to inventory
|
||||
|
||||
Returns:
|
||||
Response indicating success status
|
||||
"""
|
||||
try:
|
||||
with db_transaction(self.db):
|
||||
self.add_inventory(product, quantity)
|
||||
return UpdateInventoryResponse(success=True)
|
||||
except SQLAlchemyError:
|
||||
return UpdateInventoryResponse(success=False)
|
219
app/services/pricing.py
Normal file
219
app/services/pricing.py
Normal file
@@ -0,0 +1,219 @@
|
||||
from sqlalchemy.orm import Session
|
||||
from app.db.models import File, CardTCGPlayer, Price
|
||||
from app.services.util._dataframe import TCGPlayerPricingRow, DataframeUtil
|
||||
from app.services.file import FileService
|
||||
from app.services.tcgplayer import TCGPlayerService
|
||||
from uuid import uuid4
|
||||
from app.db.utils import db_transaction
|
||||
from typing import List, Dict
|
||||
import pandas as pd
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class PricingService:
|
||||
def __init__(self, db: Session, file_service: FileService, tcgplayer_service: TCGPlayerService):
|
||||
self.db = db
|
||||
self.file_service = file_service
|
||||
self.tcgplayer_service = tcgplayer_service
|
||||
self.df_util = DataframeUtil()
|
||||
|
||||
# function for taking a tcgplayer pricing export with all set ids and loading it into the price table
|
||||
# can be run as needed or scheduled
|
||||
def get_pricing_export_content(self, file: File = None) -> bytes:
|
||||
if file:
|
||||
file_content = self.file_service.get_file_content(file.id)
|
||||
else:
|
||||
file = self.tcgplayer_service.get_pricing_export_for_all_products()
|
||||
file_content = self.file_service.get_file_content(file.id)
|
||||
return file_content
|
||||
|
||||
def load_pricing_csv_content_to_db(self, file_content: bytes):
|
||||
try:
|
||||
if not file_content:
|
||||
raise ValueError("No file content provided")
|
||||
|
||||
price_types = {
|
||||
"tcg_market_price": "tcg_market_price",
|
||||
"tcg_direct_low": "tcg_direct_low",
|
||||
"tcg_low_price_with_shipping": "tcg_low_price_with_shipping",
|
||||
"tcg_low_price": "tcg_low_price",
|
||||
"tcg_marketplace_price": "listed_price"
|
||||
}
|
||||
|
||||
required_columns = ["tcgplayer_id"] + list(price_types.keys())
|
||||
df = self.df_util.csv_bytes_to_df(file_content)
|
||||
|
||||
# Validate columns
|
||||
missing_columns = set(required_columns) - set(df.columns)
|
||||
if missing_columns:
|
||||
raise ValueError(f"Missing required columns: {missing_columns}")
|
||||
|
||||
# Process in true batches
|
||||
for i in range(0, len(df), 1000):
|
||||
batch = df.iloc[i:i+1000]
|
||||
pricing_rows = [TCGPlayerPricingRow(row) for _, row in batch.iterrows()]
|
||||
|
||||
# Query cards for this batch only
|
||||
tcgplayer_ids = [row.tcgplayer_id for row in pricing_rows]
|
||||
batch_cards = self.db.query(CardTCGPlayer).filter(
|
||||
CardTCGPlayer.tcgplayer_id.in_(tcgplayer_ids)
|
||||
).all()
|
||||
|
||||
existing_cards = {card.tcgplayer_id: card for card in batch_cards}
|
||||
|
||||
new_prices = []
|
||||
for row in pricing_rows:
|
||||
if row.tcgplayer_id not in existing_cards:
|
||||
continue
|
||||
|
||||
card = existing_cards[row.tcgplayer_id]
|
||||
row_prices = [
|
||||
Price(
|
||||
id=str(uuid4()),
|
||||
product_id=card.product_id,
|
||||
marketplace_id=None,
|
||||
type=price_type, # Added missing price_type
|
||||
price=getattr(row, col_name)
|
||||
)
|
||||
for col_name, price_type in price_types.items()
|
||||
if getattr(row, col_name, None) is not None and getattr(row, col_name) > 0
|
||||
]
|
||||
new_prices.extend(row_prices)
|
||||
|
||||
# Save each batch separately
|
||||
if new_prices:
|
||||
with db_transaction(self.db):
|
||||
self.db.bulk_save_objects(new_prices)
|
||||
|
||||
except Exception as e:
|
||||
raise e # Consider adding logging here
|
||||
|
||||
|
||||
def cron_load_prices(self, file: File = None):
|
||||
file_content = self.get_pricing_export_content(file)
|
||||
self.load_pricing_csv_content_to_db(file_content)
|
||||
|
||||
def get_all_prices_for_products(self, product_ids: List[str]) -> Dict[str, Dict[str, float]]:
|
||||
all_prices = self.db.query(Price).filter(
|
||||
Price.product_id.in_(product_ids)
|
||||
).all()
|
||||
|
||||
price_lookup = {}
|
||||
for price in all_prices:
|
||||
if price.product_id not in price_lookup:
|
||||
price_lookup[price.product_id] = {}
|
||||
price_lookup[price.product_id][price.type] = price.price
|
||||
return price_lookup
|
||||
|
||||
def apply_price_to_df_columns(self, row: pd.Series, price_lookup: Dict[str, Dict[str, float]]) -> pd.Series:
|
||||
product_prices = price_lookup.get(row['product_id'], {})
|
||||
for price_type, price in product_prices.items():
|
||||
row[price_type] = price
|
||||
return row
|
||||
|
||||
def default_pricing_algo(self, row: pd.Series) -> pd.Series:
|
||||
"""Default pricing algorithm with complex pricing rules"""
|
||||
tcg_low = row.get('tcg_low_price')
|
||||
tcg_low_shipping = row.get('tcg_low_price_with_shipping')
|
||||
|
||||
if pd.isna(tcg_low) or pd.isna(tcg_low_shipping):
|
||||
logger.warning(f"Missing pricing data for row: {row}")
|
||||
row['new_price'] = None
|
||||
return row
|
||||
|
||||
# Apply pricing rules
|
||||
if tcg_low < 0.35:
|
||||
new_price = 0.35
|
||||
elif tcg_low < 5 or tcg_low_shipping < 5:
|
||||
new_price = round(tcg_low * 1.25, 2)
|
||||
elif tcg_low_shipping > 25:
|
||||
new_price = round(tcg_low_shipping * 1.025, 2)
|
||||
else:
|
||||
new_price = round(tcg_low_shipping * 1.10, 2)
|
||||
|
||||
row['new_price'] = new_price
|
||||
return row
|
||||
|
||||
def apply_pricing_algo(self, row: pd.Series, pricing_algo: callable = None) -> pd.Series:
|
||||
"""Modified to handle the pricing algorithm as an instance method"""
|
||||
if pricing_algo is None:
|
||||
pricing_algo = self.default_pricing_algo
|
||||
return pricing_algo(row)
|
||||
|
||||
def generate_tcgplayer_inventory_update_file_with_pricing(self, open_box_ids: List[str] = None) -> bytes:
|
||||
desired_columns = [
|
||||
'TCGplayer Id', 'Product Line', 'Set Name', 'Product Name',
|
||||
'Title', 'Number', 'Rarity', 'Condition', 'TCG Market Price',
|
||||
'TCG Direct Low', 'TCG Low Price With Shipping', 'TCG Low Price',
|
||||
'Total Quantity', 'Add to Quantity', 'TCG Marketplace Price', 'Photo URL'
|
||||
]
|
||||
|
||||
if open_box_ids:
|
||||
# Get initial dataframe
|
||||
update_type = 'add'
|
||||
df = self.tcgplayer_service.open_box_cards_to_tcgplayer_inventory_df(open_box_ids)
|
||||
else:
|
||||
update_type = 'update'
|
||||
df = self.tcgplayer_service.get_inventory_df('live')
|
||||
# remove rows with total quantity of 0
|
||||
df = df[df['total_quantity'] != 0]
|
||||
tcgplayer_ids = df['tcgplayer_id'].unique().tolist()
|
||||
|
||||
# Make a single query to get all matching records
|
||||
product_id_mapping = {
|
||||
card.tcgplayer_id: card.product_id
|
||||
for card in self.db.query(CardTCGPlayer)
|
||||
.filter(CardTCGPlayer.tcgplayer_id.in_(tcgplayer_ids))
|
||||
.all()
|
||||
}
|
||||
|
||||
# Map the ids using the dictionary
|
||||
df['product_id'] = df['tcgplayer_id'].map(product_id_mapping)
|
||||
|
||||
price_lookup = self.get_all_prices_for_products(df['product_id'].unique())
|
||||
|
||||
# Apply price columns
|
||||
df = df.apply(lambda row: self.apply_price_to_df_columns(row, price_lookup), axis=1)
|
||||
|
||||
# Apply pricing algorithm
|
||||
df = df.apply(self.apply_pricing_algo, axis=1)
|
||||
|
||||
# if update type is update, remove rows where new_price == listed_price
|
||||
if update_type == 'update':
|
||||
df = df[df['new_price'] != df['listed_price']]
|
||||
|
||||
# Set marketplace price
|
||||
df['TCG Marketplace Price'] = df['new_price']
|
||||
|
||||
column_mapping = {
|
||||
'tcgplayer_id': 'TCGplayer Id',
|
||||
'product_line': 'Product Line',
|
||||
'set_name': 'Set Name',
|
||||
'product_name': 'Product Name',
|
||||
'title': 'Title',
|
||||
'number': 'Number',
|
||||
'rarity': 'Rarity',
|
||||
'condition': 'Condition',
|
||||
'tcg_market_price': 'TCG Market Price',
|
||||
'tcg_direct_low': 'TCG Direct Low',
|
||||
'tcg_low_price_with_shipping': 'TCG Low Price With Shipping',
|
||||
'tcg_low_price': 'TCG Low Price',
|
||||
'total_quantity': 'Total Quantity',
|
||||
'add_to_quantity': 'Add to Quantity',
|
||||
'photo_url': 'Photo URL'
|
||||
}
|
||||
df = df.rename(columns=column_mapping)
|
||||
|
||||
# Now do your column selection
|
||||
df = df[desired_columns]
|
||||
|
||||
# remove any rows with no price
|
||||
#df = df[df['TCG Marketplace Price'] != 0]
|
||||
#df = df[df['TCG Marketplace Price'].notna()]
|
||||
|
||||
# Convert to CSV bytes
|
||||
csv_bytes = self.df_util.df_to_csv_bytes(df)
|
||||
|
||||
return csv_bytes
|
183
app/services/product.py
Normal file
183
app/services/product.py
Normal file
@@ -0,0 +1,183 @@
|
||||
from logging import getLogger
|
||||
from uuid import uuid4
|
||||
from pandas import DataFrame
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db.utils import db_transaction
|
||||
from app.db.models import CardManabox, CardTCGPlayer, StagedFileProduct, TCGPlayerGroups
|
||||
from app.services.util._dataframe import ManaboxRow, DataframeUtil
|
||||
from app.services.file import FileService
|
||||
from app.services.tcgplayer import TCGPlayerService
|
||||
from app.services.storage import StorageService
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
class ProductService:
|
||||
def __init__(
|
||||
self,
|
||||
db: Session,
|
||||
file_service: FileService,
|
||||
tcgplayer_service: TCGPlayerService,
|
||||
storage_service: StorageService,
|
||||
):
|
||||
self.db = db
|
||||
self.file_service = file_service
|
||||
self.tcgplayer_service = tcgplayer_service
|
||||
self.storage_service = storage_service
|
||||
self.df_util = DataframeUtil()
|
||||
|
||||
def create_staged_file_product(
|
||||
self, file_id: str, card_manabox: CardManabox, row: ManaboxRow
|
||||
) -> StagedFileProduct:
|
||||
"""Create a staged file product entry.
|
||||
|
||||
Args:
|
||||
file_id: The ID of the file being processed
|
||||
card_manabox: The Manabox card details
|
||||
row: The row data from the Manabox file
|
||||
|
||||
Returns:
|
||||
The created staged file product
|
||||
"""
|
||||
staged_product = StagedFileProduct(
|
||||
id=str(uuid4()),
|
||||
file_id=file_id,
|
||||
product_id=card_manabox.product_id,
|
||||
quantity=row.quantity,
|
||||
)
|
||||
with db_transaction(self.db):
|
||||
self.db.add(staged_product)
|
||||
return staged_product
|
||||
|
||||
def create_card_manabox(
|
||||
self, manabox_row: ManaboxRow, card_tcgplayer: CardTCGPlayer
|
||||
) -> CardManabox:
|
||||
"""Create a Manabox card entry.
|
||||
|
||||
Args:
|
||||
manabox_row: The row data from the Manabox file
|
||||
card_tcgplayer: The TCGPlayer card details
|
||||
|
||||
Returns:
|
||||
The created Manabox card
|
||||
"""
|
||||
if not card_tcgplayer:
|
||||
group = (
|
||||
self.db.query(TCGPlayerGroups)
|
||||
.filter(TCGPlayerGroups.abbreviation == manabox_row.set_code)
|
||||
.first()
|
||||
)
|
||||
card_tcgplayer = self.tcgplayer_service.get_card_tcgplayer_from_manabox_row(
|
||||
manabox_row, group.group_id
|
||||
)
|
||||
|
||||
card_manabox = CardManabox(
|
||||
product_id=card_tcgplayer.product_id,
|
||||
name=manabox_row.name,
|
||||
set_code=manabox_row.set_code,
|
||||
set_name=manabox_row.set_name,
|
||||
collector_number=manabox_row.collector_number,
|
||||
foil=manabox_row.foil,
|
||||
rarity=manabox_row.rarity,
|
||||
manabox_id=manabox_row.manabox_id,
|
||||
scryfall_id=manabox_row.scryfall_id,
|
||||
condition=manabox_row.condition,
|
||||
language=manabox_row.language,
|
||||
)
|
||||
|
||||
with db_transaction(self.db):
|
||||
self.db.add(card_manabox)
|
||||
return card_manabox
|
||||
|
||||
def card_manabox_lookup_create_if_not_exist(
|
||||
self, manabox_row: ManaboxRow
|
||||
) -> CardManabox:
|
||||
"""Lookup a Manabox card or create it if it doesn't exist.
|
||||
|
||||
Args:
|
||||
manabox_row: The row data from the Manabox file
|
||||
|
||||
Returns:
|
||||
The existing or newly created Manabox card
|
||||
"""
|
||||
card_manabox = (
|
||||
self.db.query(CardManabox)
|
||||
.filter(
|
||||
CardManabox.name == manabox_row.name,
|
||||
CardManabox.set_code == manabox_row.set_code,
|
||||
CardManabox.set_name == manabox_row.set_name,
|
||||
CardManabox.collector_number == manabox_row.collector_number,
|
||||
CardManabox.foil == manabox_row.foil,
|
||||
CardManabox.rarity == manabox_row.rarity,
|
||||
CardManabox.manabox_id == manabox_row.manabox_id,
|
||||
CardManabox.scryfall_id == manabox_row.scryfall_id,
|
||||
CardManabox.condition == manabox_row.condition,
|
||||
CardManabox.language == manabox_row.language,
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not card_manabox:
|
||||
logger.debug(f"card_manabox not found for row: {manabox_row.__dict__}")
|
||||
group = (
|
||||
self.db.query(TCGPlayerGroups)
|
||||
.filter(TCGPlayerGroups.abbreviation == manabox_row.set_code)
|
||||
.first()
|
||||
)
|
||||
if not group:
|
||||
logger.error(f"Group not found for set code: {manabox_row.set_code}")
|
||||
logger.error(f"Row data: {manabox_row.__dict__}")
|
||||
return None
|
||||
|
||||
card_tcgplayer = self.tcgplayer_service.get_card_tcgplayer_from_manabox_row(
|
||||
manabox_row, group.group_id
|
||||
)
|
||||
if not card_tcgplayer:
|
||||
logger.error(f"Card not found for row: {manabox_row.__dict__}")
|
||||
return None
|
||||
card_manabox = self.create_card_manabox(manabox_row, card_tcgplayer)
|
||||
|
||||
return card_manabox
|
||||
|
||||
def process_manabox_df(self, df: DataFrame, file_id: str) -> None:
|
||||
"""Process a Manabox dataframe.
|
||||
|
||||
Args:
|
||||
df: The Manabox dataframe to process
|
||||
file_id: The ID of the file being processed
|
||||
"""
|
||||
for _, row in df.iterrows():
|
||||
manabox_row = ManaboxRow(row)
|
||||
card_manabox = self.card_manabox_lookup_create_if_not_exist(manabox_row)
|
||||
if not card_manabox:
|
||||
continue
|
||||
self.create_staged_file_product(file_id, card_manabox, row)
|
||||
|
||||
def bg_process_manabox_file(self, file_id: str) -> None:
|
||||
"""Process a Manabox file in the background.
|
||||
|
||||
Args:
|
||||
file_id: The ID of the file to process
|
||||
|
||||
Raises:
|
||||
Exception: If there's an error during processing
|
||||
"""
|
||||
try:
|
||||
manabox_file = self.file_service.get_file(file_id)
|
||||
manabox_df = self.df_util.file_to_df(manabox_file)
|
||||
self.process_manabox_df(manabox_df, file_id)
|
||||
|
||||
with db_transaction(self.db):
|
||||
manabox_file.status = "completed"
|
||||
|
||||
except Exception as e:
|
||||
with db_transaction(self.db):
|
||||
manabox_file.status = "error"
|
||||
raise e
|
||||
|
||||
try:
|
||||
self.storage_service.store_staged_products_for_file(file_id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating storage records: {str(e)}")
|
||||
raise e
|
256
app/services/storage.py
Normal file
256
app/services/storage.py
Normal file
@@ -0,0 +1,256 @@
|
||||
from uuid import uuid4
|
||||
from typing import List, TypedDict
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
from app.db.utils import db_transaction
|
||||
from app.db.models import (
|
||||
Warehouse,
|
||||
User,
|
||||
StagedFileProduct,
|
||||
StorageBlock,
|
||||
ProductBlock,
|
||||
File,
|
||||
CardTCGPlayer
|
||||
)
|
||||
|
||||
class ProductAttributes(TypedDict):
|
||||
"""Attributes for a product to be stored."""
|
||||
product_id: str
|
||||
card_number: str
|
||||
|
||||
class StorageService:
|
||||
"""Service for managing product storage and warehouse operations."""
|
||||
|
||||
def __init__(self, db: Session) -> None:
|
||||
"""Initialize the storage service.
|
||||
|
||||
Args:
|
||||
db: SQLAlchemy database session
|
||||
"""
|
||||
self.db = db
|
||||
|
||||
def get_or_create_user(self, username: str) -> User:
|
||||
"""Get an existing user or create a new one if not found.
|
||||
|
||||
Args:
|
||||
username: Username to look up or create
|
||||
|
||||
Returns:
|
||||
The existing or newly created User
|
||||
"""
|
||||
user = self.db.query(User).filter(User.username == username).first()
|
||||
if user is None:
|
||||
user = User(
|
||||
id=str(uuid4()),
|
||||
username=username
|
||||
)
|
||||
with db_transaction(self.db):
|
||||
self.db.add(user)
|
||||
return user
|
||||
|
||||
def get_or_create_warehouse(self) -> Warehouse:
|
||||
"""Get the default warehouse or create it if it doesn't exist.
|
||||
|
||||
Returns:
|
||||
The existing or newly created Warehouse
|
||||
"""
|
||||
warehouse = self.db.query(Warehouse).first()
|
||||
user = self.get_or_create_user('admin')
|
||||
if warehouse is None:
|
||||
warehouse = Warehouse(
|
||||
id=str(uuid4()),
|
||||
user_id=user.id
|
||||
)
|
||||
with db_transaction(self.db):
|
||||
self.db.add(warehouse)
|
||||
return warehouse
|
||||
|
||||
def get_staged_product(self, file_id: str) -> List[StagedFileProduct]:
|
||||
"""Get all staged products for a given file.
|
||||
|
||||
Args:
|
||||
file_id: ID of the file to get staged products for
|
||||
|
||||
Returns:
|
||||
List of staged products
|
||||
"""
|
||||
return self.db.query(StagedFileProduct).filter(
|
||||
StagedFileProduct.file_id == file_id
|
||||
).all()
|
||||
|
||||
def get_storage_block_name(self, warehouse: Warehouse, file_id: str) -> str:
|
||||
"""Generate a unique name for a new storage block.
|
||||
|
||||
Args:
|
||||
warehouse: Warehouse the block belongs to
|
||||
file_id: ID of the file being processed
|
||||
|
||||
Returns:
|
||||
Unique storage block name
|
||||
|
||||
Raises:
|
||||
ValueError: If no file is found with the given ID
|
||||
"""
|
||||
current_file = self.db.query(File).filter(File.id == file_id).first()
|
||||
if not current_file:
|
||||
raise ValueError(f"No file found with id {file_id}")
|
||||
|
||||
storage_block_type = 'rare' if 'rare' in current_file.type else 'common'
|
||||
prefix = storage_block_type[0]
|
||||
|
||||
latest_block = (
|
||||
self.db.query(StorageBlock)
|
||||
.filter(
|
||||
StorageBlock.warehouse_id == warehouse.id,
|
||||
StorageBlock.type == storage_block_type
|
||||
)
|
||||
.order_by(StorageBlock.date_created.desc())
|
||||
.first()
|
||||
)
|
||||
|
||||
start_number = 1 if not latest_block else int(latest_block.name[1:]) + 1
|
||||
|
||||
while True:
|
||||
new_name = f"{prefix}{start_number}"
|
||||
exists = (
|
||||
self.db.query(StorageBlock)
|
||||
.filter(
|
||||
StorageBlock.warehouse_id == warehouse.id,
|
||||
StorageBlock.name == new_name
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not exists:
|
||||
return new_name
|
||||
start_number += 1
|
||||
|
||||
def create_storage_block(self, warehouse: Warehouse, file_id: str) -> StorageBlock:
|
||||
"""Create a new storage block for the given warehouse and file.
|
||||
|
||||
Args:
|
||||
warehouse: Warehouse to create the block in
|
||||
file_id: ID of the file being processed
|
||||
|
||||
Returns:
|
||||
Newly created StorageBlock
|
||||
|
||||
Raises:
|
||||
ValueError: If no file is found with the given ID
|
||||
"""
|
||||
current_file = self.db.query(File).filter(File.id == file_id).first()
|
||||
if not current_file:
|
||||
raise ValueError(f"No file found with id {file_id}")
|
||||
|
||||
storage_block_type = 'rare' if 'rare' in current_file.type else 'common'
|
||||
|
||||
storage_block = StorageBlock(
|
||||
id=str(uuid4()),
|
||||
warehouse_id=warehouse.id,
|
||||
name=self.get_storage_block_name(warehouse, file_id),
|
||||
type=storage_block_type
|
||||
)
|
||||
with db_transaction(self.db):
|
||||
self.db.add(storage_block)
|
||||
return storage_block
|
||||
|
||||
def add_staged_product_to_product_block(
|
||||
self,
|
||||
staged_product: StagedFileProduct,
|
||||
storage_block: StorageBlock,
|
||||
product_attributes: ProductAttributes,
|
||||
block_index: int
|
||||
) -> ProductBlock:
|
||||
"""Create a new ProductBlock for a single unit of a staged product.
|
||||
|
||||
Args:
|
||||
staged_product: The staged product to store
|
||||
storage_block: The block to store the product in
|
||||
product_attributes: Additional product attributes
|
||||
block_index: Index within the storage block
|
||||
|
||||
Returns:
|
||||
Newly created ProductBlock
|
||||
"""
|
||||
product_block = ProductBlock(
|
||||
id=str(uuid4()),
|
||||
product_id=staged_product.product_id,
|
||||
block_id=storage_block.id,
|
||||
block_index=block_index
|
||||
)
|
||||
|
||||
with db_transaction(self.db):
|
||||
self.db.add(product_block)
|
||||
|
||||
return product_block
|
||||
|
||||
def get_staged_product_attributes_for_storage(
|
||||
self,
|
||||
staged_product: StagedFileProduct
|
||||
) -> List[ProductAttributes]:
|
||||
"""Get attributes for each unit of a staged product.
|
||||
|
||||
Args:
|
||||
staged_product: The staged product to get attributes for
|
||||
|
||||
Returns:
|
||||
List of attributes for each unit of the product
|
||||
"""
|
||||
result = (
|
||||
self.db.query(
|
||||
StagedFileProduct.product_id,
|
||||
StagedFileProduct.quantity,
|
||||
CardTCGPlayer.number
|
||||
)
|
||||
.join(CardTCGPlayer, CardTCGPlayer.product_id == StagedFileProduct.product_id)
|
||||
.filter(StagedFileProduct.id == staged_product.id)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not result:
|
||||
return []
|
||||
|
||||
return [
|
||||
ProductAttributes(
|
||||
product_id=result.product_id,
|
||||
card_number=result.number
|
||||
)
|
||||
for _ in range(result.quantity)
|
||||
]
|
||||
|
||||
def store_staged_products_for_file(self, file_id: str) -> StorageBlock:
|
||||
"""Store all staged products for a file in a new storage block.
|
||||
|
||||
Args:
|
||||
file_id: ID of the file containing staged products
|
||||
|
||||
Returns:
|
||||
The newly created StorageBlock containing all products
|
||||
"""
|
||||
warehouse = self.get_or_create_warehouse()
|
||||
storage_block = self.create_storage_block(warehouse, file_id)
|
||||
staged_products = self.get_staged_product(file_id)
|
||||
|
||||
# Collect all product attributes first
|
||||
all_product_attributes = []
|
||||
for staged_product in staged_products:
|
||||
product_attributes_list = self.get_staged_product_attributes_for_storage(staged_product)
|
||||
for attrs in product_attributes_list:
|
||||
all_product_attributes.append((staged_product, attrs))
|
||||
|
||||
# Sort by card number as integer to determine block indices
|
||||
sorted_attributes = sorted(
|
||||
all_product_attributes,
|
||||
key=lambda x: int(''.join(filter(str.isdigit, x[1]['card_number'])))
|
||||
)
|
||||
|
||||
# Add products with correct block indices
|
||||
for block_index, (staged_product, product_attributes) in enumerate(sorted_attributes, 1):
|
||||
self.add_staged_product_to_product_block(
|
||||
staged_product=staged_product,
|
||||
storage_block=storage_block,
|
||||
product_attributes=product_attributes,
|
||||
block_index=block_index
|
||||
)
|
||||
|
||||
return storage_block
|
37
app/services/task.py
Normal file
37
app/services/task.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from apscheduler.schedulers.background import BackgroundScheduler
|
||||
import logging
|
||||
from typing import Dict, Callable
|
||||
from sqlalchemy.orm import Session
|
||||
from app.services.product import ProductService
|
||||
from app.db.models import File
|
||||
from app.services.pricing import PricingService
|
||||
|
||||
|
||||
class TaskService:
|
||||
def __init__(self, db: Session, product_service: ProductService, pricing_service: PricingService):
|
||||
self.scheduler = BackgroundScheduler()
|
||||
self.logger = logging.getLogger(__name__)
|
||||
self.tasks: Dict[str, Callable] = {}
|
||||
self.db = db
|
||||
self.product_service = product_service
|
||||
self.pricing_service = pricing_service
|
||||
|
||||
async def start(self):
|
||||
self.scheduler.start()
|
||||
self.logger.info("Task scheduler started.")
|
||||
self.register_scheduled_tasks()
|
||||
# self.pricing_service.generate_tcgplayer_inventory_update_file_with_pricing(['e20cc342-23cb-4593-89cb-56a0cb3ed3f3'])
|
||||
|
||||
def register_scheduled_tasks(self):
|
||||
self.scheduler.add_job(self.hourly_pricing, 'cron', minute='45')
|
||||
self.logger.info("Scheduled tasks registered.")
|
||||
|
||||
def hourly_pricing(self):
|
||||
self.logger.info("Running hourly pricing task")
|
||||
self.pricing_service.cron_load_prices()
|
||||
self.logger.info("Finished hourly pricing task")
|
||||
|
||||
async def process_manabox_file(self, file: File):
|
||||
self.logger.info("Processing ManaBox file")
|
||||
self.product_service.bg_process_manabox_file(file.id)
|
||||
self.logger.info("Finished processing ManaBox file")
|
592
app/services/tcgplayer.py
Normal file
592
app/services/tcgplayer.py
Normal file
@@ -0,0 +1,592 @@
|
||||
from app.db.models import TCGPlayerGroups, CardTCGPlayer, Product, Card, File, Inventory, OpenBox, OpenBoxCard
|
||||
import requests
|
||||
from app.services.util._dataframe import TCGPlayerPricingRow, DataframeUtil, ManaboxRow
|
||||
from app.services.file import FileService
|
||||
from app.services.inventory import InventoryService
|
||||
from sqlalchemy.orm import Session
|
||||
from app.db.utils import db_transaction
|
||||
from uuid import uuid4 as uuid
|
||||
import browser_cookie3
|
||||
import webbrowser
|
||||
from typing import Optional, Dict ,List
|
||||
from enum import Enum
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
import urllib.parse
|
||||
import json
|
||||
from datetime import datetime
|
||||
import time
|
||||
from typing import List, Dict, Optional
|
||||
import pandas as pd
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
from app.schemas.file import CreateFileRequest
|
||||
import os
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class Browser(Enum):
|
||||
"""Supported browser types for cookie extraction"""
|
||||
BRAVE = "brave"
|
||||
CHROME = "chrome"
|
||||
FIREFOX = "firefox"
|
||||
|
||||
@dataclass
|
||||
class TCGPlayerConfig:
|
||||
"""Configuration for TCGPlayer API interactions"""
|
||||
tcgplayer_base_url: str = "https://store.tcgplayer.com"
|
||||
tcgplayer_login_path: str = "/oauth/login"
|
||||
staged_inventory_download_path: str = "/Admin/Pricing/DownloadStagedInventoryExportCSV?type=Pricing"
|
||||
live_inventory_download_path = "/Admin/Pricing/DownloadMyExportCSV?type=Pricing"
|
||||
pricing_export_path: str = "/admin/pricing/downloadexportcsv"
|
||||
max_retries: int = 1
|
||||
|
||||
class TCGPlayerService:
|
||||
def __init__(self, db: Session,
|
||||
file_service: FileService,
|
||||
config: TCGPlayerConfig=TCGPlayerConfig(),
|
||||
browser_type: Browser=Browser.BRAVE):
|
||||
self.db = db
|
||||
self.config = config
|
||||
self.browser_type = browser_type
|
||||
self.cookies = None
|
||||
self.previous_request_time = None
|
||||
self.df_util = DataframeUtil()
|
||||
self.file_service = file_service
|
||||
|
||||
def _insert_groups(self, groups):
|
||||
for group in groups:
|
||||
db_group = TCGPlayerGroups(
|
||||
id=str(uuid()),
|
||||
group_id=group['groupId'],
|
||||
name=group['name'],
|
||||
abbreviation=group['abbreviation'],
|
||||
is_supplemental=group['isSupplemental'],
|
||||
published_on=group['publishedOn'],
|
||||
modified_on=group['modifiedOn'],
|
||||
category_id=group['categoryId']
|
||||
)
|
||||
self.db.add(db_group)
|
||||
|
||||
def populate_tcgplayer_groups(self):
|
||||
group_endpoint = "https://tcgcsv.com/tcgplayer/1/groups"
|
||||
response = requests.get(group_endpoint)
|
||||
response.raise_for_status()
|
||||
groups = response.json()['results']
|
||||
# manually add broken groups
|
||||
manual_groups = [
|
||||
{
|
||||
"groupId": 2422,
|
||||
"name": "Modern Horizons 2 Timeshifts",
|
||||
"abbreviation": "H2R",
|
||||
"isSupplemental": "0",
|
||||
"publishedOn": "2018-11-08T00:00:00",
|
||||
"modifiedOn": "2018-11-08T00:00:00",
|
||||
"categoryId": 1
|
||||
},
|
||||
{
|
||||
"groupId": 52,
|
||||
"name": "Store Championships",
|
||||
"abbreviation": "SCH",
|
||||
"isSupplemental": "1",
|
||||
"publishedOn": "2007-07-14T00:00:00",
|
||||
"modifiedOn": "2007-07-14T00:00:00",
|
||||
"categoryId": 1
|
||||
}
|
||||
]
|
||||
groups.extend(manual_groups)
|
||||
# Insert groups into db
|
||||
with db_transaction(self.db):
|
||||
self._insert_groups(groups)
|
||||
|
||||
def get_cookies_from_file(self) -> Dict:
|
||||
# check if cookies file exists
|
||||
if not os.path.exists('cookies/tcg_cookies.json'):
|
||||
raise ValueError("Cookies file not found")
|
||||
with open('cookies/tcg_cookies.json', 'r') as f:
|
||||
logger.debug("Loading cookies from file")
|
||||
cookies = json.load(f)
|
||||
return cookies
|
||||
|
||||
def _get_browser_cookies(self) -> Optional[Dict]:
|
||||
"""Retrieve cookies from the specified browser"""
|
||||
try:
|
||||
cookie_getter = getattr(browser_cookie3, self.browser_type.value, None)
|
||||
if not cookie_getter:
|
||||
raise ValueError(f"Unsupported browser type: {self.browser_type.value}")
|
||||
return cookie_getter()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get browser cookies: {str(e)}")
|
||||
return None
|
||||
|
||||
def is_in_docker(self) -> bool:
|
||||
"""Check if we're running inside a Docker container using multiple methods"""
|
||||
# Method 1: Check cgroup
|
||||
try:
|
||||
with open('/proc/1/cgroup', 'r') as f:
|
||||
content = f.read().lower()
|
||||
if any(container_id in content for container_id in ['docker', 'containerd', 'kubepods']):
|
||||
logger.debug("Docker detected via cgroup")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.debug(f"Could not read cgroup file: {e}")
|
||||
|
||||
# Method 2: Check /.dockerenv file
|
||||
if os.path.exists('/.dockerenv'):
|
||||
logger.debug("Docker detected via /.dockerenv file")
|
||||
return True
|
||||
|
||||
# Method 3: Check environment variables
|
||||
docker_env = any(os.environ.get(var, False) for var in [
|
||||
'DOCKER_CONTAINER',
|
||||
'IN_DOCKER',
|
||||
'KUBERNETES_SERVICE_HOST', # For k8s
|
||||
'DOCKER_HOST'
|
||||
])
|
||||
if docker_env:
|
||||
logger.debug("Docker detected via environment variables")
|
||||
return True
|
||||
|
||||
# Method 4: Check container runtime
|
||||
try:
|
||||
with open('/proc/self/mountinfo', 'r') as f:
|
||||
content = f.read().lower()
|
||||
if any(rt in content for rt in ['docker', 'containerd', 'kubernetes']):
|
||||
logger.debug("Docker detected via mountinfo")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.debug(f"Could not read mountinfo: {e}")
|
||||
|
||||
logger.debug("No Docker environment detected")
|
||||
return False
|
||||
|
||||
def _send_request(self, url: str, method: str, data=None, except_302=False) -> requests.Response:
|
||||
"""Send a request with the specified cookies"""
|
||||
# Rate limiting logic
|
||||
if self.previous_request_time:
|
||||
time_diff = (datetime.now() - self.previous_request_time).total_seconds()
|
||||
if time_diff < 10:
|
||||
logger.info(f"Waiting 10 seconds before next request...")
|
||||
time.sleep(10 - time_diff)
|
||||
|
||||
headers = self._set_headers(method)
|
||||
|
||||
# Move cookie initialization outside and make it more explicit
|
||||
if not self.cookies:
|
||||
if self.is_in_docker():
|
||||
logger.debug("Running in Docker - using cookies from file")
|
||||
self.cookies = self.get_cookies_from_file()
|
||||
else:
|
||||
logger.debug("Not in Docker - using browser cookies")
|
||||
self.cookies = self._get_browser_cookies()
|
||||
|
||||
if not self.cookies:
|
||||
raise ValueError("Failed to retrieve cookies")
|
||||
|
||||
try:
|
||||
#logger.info(f"debug: request url {url}, method {method}, data {data}")
|
||||
response = requests.request(method, url, headers=headers, cookies=self.cookies, data=data)
|
||||
response.raise_for_status()
|
||||
|
||||
if response.status_code == 302 and not except_302:
|
||||
logger.warning("Redirecting to login page...")
|
||||
self._refresh_authentication()
|
||||
return self._send_request(url, method, except_302=True)
|
||||
|
||||
elif response.status_code == 302 and except_302:
|
||||
raise ValueError("Redirected to login page after authentication refresh")
|
||||
|
||||
self.previous_request_time = datetime.now()
|
||||
|
||||
return response
|
||||
|
||||
except requests.RequestException as e:
|
||||
logger.error(f"Request failed: {str(e)}")
|
||||
return None
|
||||
|
||||
def _set_headers(self, method: str) -> Dict:
|
||||
base_headers = {
|
||||
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8',
|
||||
'accept-language': 'en-US,en;q=0.8',
|
||||
'priority': 'u=0, i',
|
||||
'referer': 'https://store.tcgplayer.com/admin/pricing',
|
||||
'sec-ch-ua': '"Not A(Brand";v="8", "Chromium";v="132", "Brave";v="132"',
|
||||
'sec-ch-ua-mobile': '?0',
|
||||
'sec-ch-ua-platform': '"macOS"',
|
||||
'sec-fetch-dest': 'document',
|
||||
'sec-fetch-mode': 'navigate',
|
||||
'sec-fetch-site': 'same-origin',
|
||||
'sec-fetch-user': '?1',
|
||||
'sec-gpc': '1',
|
||||
'upgrade-insecure-requests': '1',
|
||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36'
|
||||
}
|
||||
|
||||
if method == 'POST':
|
||||
post_headers = {
|
||||
'cache-control': 'max-age=0',
|
||||
'content-type': 'application/x-www-form-urlencoded',
|
||||
'origin': 'https://store.tcgplayer.com'
|
||||
}
|
||||
base_headers.update(post_headers)
|
||||
|
||||
return base_headers
|
||||
|
||||
def _set_pricing_export_payload(self, set_name_ids: List[str]) -> Dict:
|
||||
data = {
|
||||
"PricingType": "Pricing",
|
||||
"CategoryId": "1",
|
||||
"SetNameIds": set_name_ids,
|
||||
"ConditionIds": ["1"],
|
||||
"RarityIds": ["0"],
|
||||
"LanguageIds": ["1"],
|
||||
"PrintingIds": ["0"],
|
||||
"CompareAgainstPrice": False,
|
||||
"PriceToCompare": 3,
|
||||
"ValueToCompare": 1,
|
||||
"PriceValueToCompare": None,
|
||||
"MyInventory": False,
|
||||
"ExcludeListos": False,
|
||||
"ExportLowestListingNotMe": False
|
||||
}
|
||||
payload = "model=" + urllib.parse.quote(json.dumps(data))
|
||||
return payload
|
||||
|
||||
def _refresh_authentication(self) -> None:
|
||||
"""Open browser for user to refresh authentication"""
|
||||
login_url = f"{self.config.tcgplayer_base_url}{self.config.tcgplayer_login_path}"
|
||||
logger.info("Opening browser for authentication refresh...")
|
||||
webbrowser.open(login_url)
|
||||
input('Please login and press Enter to continue...')
|
||||
# Clear existing cookies to force refresh
|
||||
self.cookies = None
|
||||
|
||||
def get_inventory_df(self, version: str) -> pd.DataFrame:
|
||||
if version == 'staged':
|
||||
inventory_download_url = f"{self.config.tcgplayer_base_url}{self.config.staged_inventory_download_path}"
|
||||
elif version == 'live':
|
||||
inventory_download_url = f"{self.config.tcgplayer_base_url}{self.config.live_inventory_download_path}"
|
||||
else:
|
||||
raise ValueError("Invalid inventory version")
|
||||
response = self._send_request(inventory_download_url, 'GET')
|
||||
df = self.df_util.csv_bytes_to_df(response.content)
|
||||
return df
|
||||
|
||||
def _get_export_csv(self, set_name_ids: List[str]) -> bytes:
|
||||
"""
|
||||
Download export CSV and save to specified path
|
||||
Returns True if successful, False otherwise
|
||||
"""
|
||||
logger.info(f"Downloading pricing export from tcgplayer with ids {set_name_ids}")
|
||||
payload = self._set_pricing_export_payload(set_name_ids)
|
||||
export_csv_download_url = f"{self.config.tcgplayer_base_url}{self.config.pricing_export_path}"
|
||||
response = self._send_request(export_csv_download_url, method='POST', data=payload)
|
||||
return response.content
|
||||
|
||||
def create_tcgplayer_card(self, row: TCGPlayerPricingRow, group_id: int):
|
||||
# if card already exists, return none
|
||||
card_exists = self.db.query(CardTCGPlayer).filter(
|
||||
CardTCGPlayer.tcgplayer_id == row.tcgplayer_id,
|
||||
CardTCGPlayer.group_id == group_id
|
||||
).first()
|
||||
if card_exists:
|
||||
return card_exists
|
||||
# create product
|
||||
product = Product(
|
||||
id=str(uuid()),
|
||||
type = 'card',
|
||||
product_line = 'mtg'
|
||||
)
|
||||
# create card
|
||||
card = Card(
|
||||
product_id=product.id,
|
||||
)
|
||||
# create Cardtcgplayer
|
||||
tcgcard = CardTCGPlayer(
|
||||
product_id=product.id,
|
||||
group_id=group_id,
|
||||
tcgplayer_id=row.tcgplayer_id,
|
||||
product_line=row.product_line,
|
||||
set_name=row.set_name,
|
||||
product_name=row.product_name,
|
||||
title=row.title,
|
||||
number=row.number,
|
||||
rarity=row.rarity,
|
||||
condition=row.condition
|
||||
)
|
||||
with db_transaction(self.db):
|
||||
self.db.add(product)
|
||||
self.db.add(card)
|
||||
self.db.add(tcgcard)
|
||||
return tcgcard
|
||||
|
||||
def create_tcgplayer_cards_batch(self, rows: list[TCGPlayerPricingRow], set_to_group: dict) -> list[CardTCGPlayer]:
|
||||
# Get existing cards in a single query
|
||||
existing_cards = {
|
||||
(card.tcgplayer_id, card.group_id): card
|
||||
for card in self.db.query(CardTCGPlayer).filter(
|
||||
CardTCGPlayer.tcgplayer_id.in_([row.tcgplayer_id for row in rows]),
|
||||
CardTCGPlayer.group_id.in_([set_to_group[row.set_name] for row in rows])
|
||||
).all()
|
||||
}
|
||||
|
||||
# Pre-allocate lists for better memory efficiency
|
||||
new_products = []
|
||||
new_cards = []
|
||||
new_tcgcards = []
|
||||
|
||||
for row in rows:
|
||||
# Get the correct group_id for this row's set
|
||||
group_id = set_to_group[row.set_name]
|
||||
|
||||
if (row.tcgplayer_id, group_id) in existing_cards:
|
||||
continue
|
||||
|
||||
product_id = str(uuid())
|
||||
|
||||
new_products.append(Product(
|
||||
id=product_id,
|
||||
type='card',
|
||||
product_line='mtg'
|
||||
))
|
||||
|
||||
new_cards.append(Card(
|
||||
product_id=product_id,
|
||||
))
|
||||
|
||||
new_tcgcards.append(CardTCGPlayer(
|
||||
product_id=product_id,
|
||||
group_id=group_id, # Use the correct group_id for this specific row
|
||||
tcgplayer_id=row.tcgplayer_id,
|
||||
product_line=row.product_line,
|
||||
set_name=row.set_name,
|
||||
product_name=row.product_name,
|
||||
title=row.title,
|
||||
number=row.number,
|
||||
rarity=row.rarity,
|
||||
condition=row.condition
|
||||
))
|
||||
|
||||
# Batch create price objects
|
||||
# row_prices = [
|
||||
# Price(
|
||||
# id=str(uuid()),
|
||||
# product_id=product_id,
|
||||
# marketplace_id=None,
|
||||
# type=price_type,
|
||||
# price=getattr(row, col_name)
|
||||
# )
|
||||
# for col_name, price_type in price_types.items()
|
||||
# if getattr(row, col_name, None) is not None and getattr(row, col_name) > 0
|
||||
# ]
|
||||
# new_prices.extend(row_prices)
|
||||
|
||||
if new_products:
|
||||
with db_transaction(self.db):
|
||||
self.db.bulk_save_objects(new_products)
|
||||
self.db.bulk_save_objects(new_cards)
|
||||
self.db.bulk_save_objects(new_tcgcards)
|
||||
# if new_prices:
|
||||
# self.db.bulk_save_objects(new_prices)
|
||||
|
||||
return new_tcgcards
|
||||
|
||||
def load_export_csv_to_card_tcgplayer(self, export_csv: bytes, file_id: str = None, batch_size: int = 1000) -> None:
|
||||
try:
|
||||
if not export_csv:
|
||||
raise ValueError("No export CSV provided")
|
||||
|
||||
df = self.df_util.csv_bytes_to_df(export_csv)
|
||||
|
||||
logger.debug(f"Loaded {len(df)} rows from export CSV")
|
||||
|
||||
# Get all group_ids upfront in a single query
|
||||
set_to_group = dict(
|
||||
self.db.query(TCGPlayerGroups.name, TCGPlayerGroups.group_id).all()
|
||||
)
|
||||
|
||||
# Process in batches
|
||||
for i in range(0, len(df), batch_size):
|
||||
batch_df = df.iloc[i:i + batch_size]
|
||||
batch_rows = [TCGPlayerPricingRow(row) for _, row in batch_df.iterrows()]
|
||||
|
||||
# Filter rows with valid group_ids
|
||||
valid_rows = [
|
||||
row for row in batch_rows
|
||||
if row.set_name in set_to_group
|
||||
]
|
||||
|
||||
# logger.debug(f"Processing batch {i // batch_size + 1}: {len(valid_rows)} valid rows")
|
||||
|
||||
if valid_rows:
|
||||
# Pass the entire set_to_group mapping
|
||||
self.create_tcgplayer_cards_batch(valid_rows, set_to_group)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load export CSV: {e}")
|
||||
# set file upload to failed
|
||||
if file_id:
|
||||
with db_transaction(self.db):
|
||||
file = self.db.query(File).filter(File.id == file_id).first()
|
||||
if file:
|
||||
file.status = 'failed'
|
||||
self.db.add(file)
|
||||
raise
|
||||
finally:
|
||||
if file_id:
|
||||
with db_transaction(self.db):
|
||||
file = self.db.query(File).filter(File.id == file_id).first()
|
||||
if file:
|
||||
file.status = 'completed'
|
||||
self.db.add(file)
|
||||
|
||||
|
||||
def get_card_tcgplayer_from_manabox_row(self, card: ManaboxRow, group_id: int) -> CardTCGPlayer:
|
||||
# Expanded rarity mapping
|
||||
mb_to_tcg_rarity_mapping = {
|
||||
"common": "C",
|
||||
"uncommon": "U",
|
||||
"rare": "R",
|
||||
"mythic": "M",
|
||||
"special": "S"
|
||||
}
|
||||
|
||||
# Mapping from Manabox condition+foil to TCGPlayer condition
|
||||
mb_to_tcg_condition_mapping = {
|
||||
("near_mint", "foil"): "Near Mint Foil",
|
||||
("near_mint", "normal"): "Near Mint",
|
||||
("near_mint", "etched"): "Near Mint Foil"
|
||||
}
|
||||
|
||||
# Get TCGPlayer condition from Manabox condition+foil combination
|
||||
tcg_condition = mb_to_tcg_condition_mapping.get((card.condition, card.foil))
|
||||
if tcg_condition is None:
|
||||
logger.error(f"Unsupported condition/foil combination: {card.condition}, {card.foil}")
|
||||
logger.error(f"Card details: name={card.name}, set_name={card.set_name}, collector_number={card.collector_number}")
|
||||
return None
|
||||
|
||||
# Get TCGPlayer rarity from Manabox rarity
|
||||
tcg_rarity = mb_to_tcg_rarity_mapping.get(card.rarity)
|
||||
if tcg_rarity is None:
|
||||
logger.error(f"Unsupported rarity: {card.rarity}")
|
||||
logger.error(f"Card details: name={card.name}, set_name={card.set_name}, collector_number={card.collector_number}")
|
||||
return None
|
||||
|
||||
# First query for matching products without rarity filter
|
||||
# debug
|
||||
# log everything in this query
|
||||
# remove letters from card.collector_number FOR JOIN ONLY
|
||||
join_collector_number = ''.join(filter(str.isdigit, card.collector_number))
|
||||
# logger.debug(f"Querying for card: {card.name}, {card.set_code}, {card.collector_number}, {tcg_condition}, {group_id}")
|
||||
base_query = self.db.query(CardTCGPlayer).filter(
|
||||
CardTCGPlayer.number == join_collector_number,
|
||||
CardTCGPlayer.condition == tcg_condition,
|
||||
CardTCGPlayer.group_id == group_id,
|
||||
CardTCGPlayer.rarity != "T" # TOKENS ARE NOT SUPPORTED CUZ BROKE LOL
|
||||
)
|
||||
|
||||
# logger.debug(f"Base query: {base_query.statement.compile(compile_kwargs={'literal_binds': True})}")
|
||||
|
||||
# Get all potential matches
|
||||
products = base_query.all()
|
||||
|
||||
# If no products found, return None
|
||||
if not products:
|
||||
logger.error(f"No matching TCGPlayer product found for card {card.name} ({card.set_code} {card.collector_number})")
|
||||
return None
|
||||
|
||||
# Look for an exact match including rarity, unless the TCGPlayer product is a land
|
||||
for product in products:
|
||||
if product.rarity == "L" or product.rarity == tcg_rarity:
|
||||
return product
|
||||
|
||||
# ignore rarity, just make sure only one product is returned
|
||||
if len(products) > 1:
|
||||
# try to match on name before failing
|
||||
for product in products:
|
||||
if product.product_name == card.name:
|
||||
return product
|
||||
elif len(products) == 1:
|
||||
return products[0]
|
||||
|
||||
logger.error(f"Multiple matching TCGPlayer products found for card {card.name} ({card.set_code} {card.collector_number})")
|
||||
return None
|
||||
|
||||
# If we got here, we found products but none matched our rarity criteria
|
||||
# logger.error(f"No matching TCGPlayer product with correct rarity found for card {card.name} {card.rarity} {group_id} ({card.set_name} {card.collector_number})")
|
||||
# return None
|
||||
|
||||
def get_pricing_export_for_all_products(self) -> File:
|
||||
"""
|
||||
"""
|
||||
DEBUG = False
|
||||
if DEBUG:
|
||||
logger.debug("DEBUG: Using existing pricing export file")
|
||||
file = self.db.query(File).filter(File.type == 'tcgplayer_pricing_export').first()
|
||||
if file:
|
||||
return file
|
||||
try:
|
||||
all_group_ids = self.db.query(TCGPlayerGroups.group_id).all()
|
||||
all_group_ids = [str(group_id) for group_id, in all_group_ids]
|
||||
export_csv = self._get_export_csv(all_group_ids)
|
||||
export_csv_file = self.file_service.create_file(export_csv, CreateFileRequest(
|
||||
source="tcgplayer",
|
||||
type="tcgplayer_pricing_export",
|
||||
filename="tcgplayer_pricing_export.csv"
|
||||
))
|
||||
return export_csv_file
|
||||
except SQLAlchemyError as e:
|
||||
raise RuntimeError(f"Failed to retrieve group IDs: {str(e)}")
|
||||
|
||||
def load_tcgplayer_cards(self) -> File:
|
||||
try:
|
||||
# Get pricing export
|
||||
export_csv_file = self.get_pricing_export_for_all_products()
|
||||
export_csv = self.file_service.get_file_content(export_csv_file.id)
|
||||
|
||||
# load to card tcgplayer
|
||||
self.load_export_csv_to_card_tcgplayer(export_csv, export_csv_file.id)
|
||||
|
||||
return export_csv_file
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load prices: {e}")
|
||||
raise
|
||||
|
||||
def open_box_cards_to_tcgplayer_inventory_df(self, open_box_ids: List[str]) -> pd.DataFrame:
|
||||
tcgcards = (self.db.query(OpenBoxCard, CardTCGPlayer)
|
||||
.filter(OpenBoxCard.open_box_id.in_(open_box_ids))
|
||||
.join(CardTCGPlayer, OpenBoxCard.card_id == CardTCGPlayer.product_id)
|
||||
.all())
|
||||
|
||||
if not tcgcards:
|
||||
return None
|
||||
|
||||
# Create dataframe
|
||||
df = pd.DataFrame([(tcg.product_id, tcg.tcgplayer_id, tcg.product_line, tcg.set_name, tcg.product_name,
|
||||
tcg.title, tcg.number, tcg.rarity, tcg.condition, obc.quantity)
|
||||
for obc, tcg in tcgcards],
|
||||
columns=['product_id', 'tcgplayer_id', 'product_line', 'set_name', 'product_name',
|
||||
'title', 'number', 'rarity', 'condition', 'quantity'])
|
||||
|
||||
# Add empty columns
|
||||
df['Total Quantity'] = ''
|
||||
df['Add to Quantity'] = df['quantity']
|
||||
df['TCG Marketplace Price'] = ''
|
||||
df['Photo URL'] = ''
|
||||
|
||||
# Rename columns
|
||||
df = df.rename(columns={
|
||||
'tcgplayer_id': 'TCGplayer Id',
|
||||
'product_line': 'Product Line',
|
||||
'set_name': 'Set Name',
|
||||
'product_name': 'Product Name',
|
||||
'title': 'Title',
|
||||
'number': 'Number',
|
||||
'rarity': 'Rarity',
|
||||
'condition': 'Condition'
|
||||
})
|
||||
|
||||
return df
|
||||
|
||||
|
||||
|
500
app/services/unholy_pricing.py
Normal file
500
app/services/unholy_pricing.py
Normal file
@@ -0,0 +1,500 @@
|
||||
from dataclasses import dataclass
|
||||
from decimal import Decimal, ROUND_HALF_UP
|
||||
from enum import Enum
|
||||
from typing import Optional, Dict, List, Any
|
||||
import pandas as pd
|
||||
import logging
|
||||
from db.models import Product, Price
|
||||
from sqlalchemy.orm import Session
|
||||
from uuid import uuid4 as uuid
|
||||
from datetime import datetime
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
from sqlalchemy import text
|
||||
from services.util._dataframe import DataframeUtil
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class PriceType(str, Enum):
|
||||
TCG_MARKET = 'tcg_market_price'
|
||||
TCG_DIRECT_LOW = 'tcg_direct_low'
|
||||
TCG_LOW_WITH_SHIPPING = 'tcg_low_price_with_shipping'
|
||||
TCG_LOW = 'tcg_low_price'
|
||||
TCG_MARKETPLACE = 'tcg_marketplace_price'
|
||||
MY_PRICE = 'my_price'
|
||||
|
||||
class PricingStrategy(str, Enum):
|
||||
DEFAULT = 'default'
|
||||
AGGRESSIVE = 'aggressive'
|
||||
CONSERVATIVE = 'conservative'
|
||||
|
||||
@dataclass
|
||||
class PriceRange:
|
||||
min_price: Decimal
|
||||
max_price: Decimal
|
||||
multiplier: Decimal
|
||||
ceiling_price: Optional[Decimal] = None
|
||||
include_shipping: bool = False
|
||||
|
||||
def __post_init__(self):
|
||||
# Convert all values to Decimal for precise calculations
|
||||
self.min_price = Decimal(str(self.min_price))
|
||||
self.max_price = Decimal(str(self.max_price))
|
||||
self.multiplier = Decimal(str(self.multiplier))
|
||||
if self.ceiling_price is not None:
|
||||
self.ceiling_price = Decimal(str(self.ceiling_price))
|
||||
|
||||
def contains_price(self, price: Decimal) -> bool:
|
||||
"""Check if a price falls within this range, inclusive of min, exclusive of max."""
|
||||
return self.min_price <= price < self.max_price
|
||||
|
||||
def calculate_price(self, base_price: Decimal) -> Decimal:
|
||||
"""Calculate the final price for this range, respecting ceiling."""
|
||||
calculated = base_price * self.multiplier
|
||||
if self.ceiling_price is not None:
|
||||
calculated = min(calculated, self.ceiling_price)
|
||||
return calculated.quantize(Decimal('0.01'), rounding=ROUND_HALF_UP)
|
||||
|
||||
class PricingConfiguration:
|
||||
"""Centralized configuration for pricing rules and thresholds."""
|
||||
|
||||
# Price thresholds
|
||||
FLOOR_PRICE = Decimal('0.35')
|
||||
MAX_PRICE = Decimal('100000.00') # Safety cap for maximum price
|
||||
SHIPPING_THRESHOLD = Decimal('5.00')
|
||||
|
||||
# Multipliers
|
||||
FLOOR_MULT = Decimal('1.25')
|
||||
NEAR_FLOOR_MULT = Decimal('1.25')
|
||||
UNDER_FIVE_MULT = Decimal('1.25')
|
||||
FIVE_TO_TEN_MULT = Decimal('1.15')
|
||||
TEN_TO_TWENTYFIVE_MULT = Decimal('1.10')
|
||||
TWENTYFIVE_TO_FIFTY_MULT = Decimal('1.05')
|
||||
FIFTY_PLUS_MULT = Decimal('1.025')
|
||||
|
||||
# Price variance thresholds
|
||||
MAX_PRICE_VARIANCE = Decimal('0.50') # Maximum allowed variance between prices as a ratio
|
||||
|
||||
@classmethod
|
||||
def get_price_ranges(cls) -> list[PriceRange]:
|
||||
"""Get the list of price ranges with their respective rules."""
|
||||
return [
|
||||
PriceRange(
|
||||
min_price=Decimal('0'),
|
||||
max_price=cls.FLOOR_PRICE,
|
||||
multiplier=cls.FLOOR_MULT,
|
||||
include_shipping=False
|
||||
),
|
||||
PriceRange(
|
||||
min_price=cls.FLOOR_PRICE,
|
||||
max_price=Decimal('5'),
|
||||
multiplier=cls.UNDER_FIVE_MULT,
|
||||
ceiling_price=Decimal('4.99'),
|
||||
include_shipping=False
|
||||
),
|
||||
PriceRange(
|
||||
min_price=Decimal('5'),
|
||||
max_price=Decimal('10'),
|
||||
multiplier=cls.FIVE_TO_TEN_MULT,
|
||||
ceiling_price=Decimal('9.99'),
|
||||
include_shipping=True
|
||||
),
|
||||
PriceRange(
|
||||
min_price=Decimal('10'),
|
||||
max_price=Decimal('25'),
|
||||
multiplier=cls.TEN_TO_TWENTYFIVE_MULT,
|
||||
ceiling_price=Decimal('24.99'),
|
||||
include_shipping=True
|
||||
),
|
||||
PriceRange(
|
||||
min_price=Decimal('25'),
|
||||
max_price=Decimal('50'),
|
||||
multiplier=cls.TWENTYFIVE_TO_FIFTY_MULT,
|
||||
ceiling_price=Decimal('49.99'),
|
||||
include_shipping=True
|
||||
),
|
||||
PriceRange(
|
||||
min_price=Decimal('50'),
|
||||
max_price=cls.MAX_PRICE,
|
||||
multiplier=cls.FIFTY_PLUS_MULT,
|
||||
include_shipping=True
|
||||
)
|
||||
]
|
||||
|
||||
class PriceCalculationResult:
|
||||
"""Represents the result of a price calculation."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
product: Product,
|
||||
calculated_price: Optional[Decimal],
|
||||
base_prices: Dict[str, Decimal],
|
||||
error: Optional[str] = None
|
||||
):
|
||||
self.product = product
|
||||
self.calculated_price = calculated_price
|
||||
self.base_prices = base_prices
|
||||
self.error = error
|
||||
|
||||
@property
|
||||
def success(self) -> bool:
|
||||
return self.calculated_price is not None and self.error is None
|
||||
|
||||
@property
|
||||
def max_base_price(self) -> Optional[Decimal]:
|
||||
"""Returns the highest base price."""
|
||||
return max(self.base_prices.values()) if self.base_prices else None
|
||||
|
||||
|
||||
class PricingService:
|
||||
CHUNK_SIZE = 5000 # Configurable batch size
|
||||
MAX_WORKERS = 4 # Configurable worker count
|
||||
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
self.df_util = DataframeUtil()
|
||||
self.config = PricingConfiguration
|
||||
self.price_ranges = self.config.get_price_ranges()
|
||||
|
||||
def get_product_by_id(self, product_id: str) -> Optional[Product]:
|
||||
"""Get a product by its ID."""
|
||||
return self.db.query(Product)\
|
||||
.filter(Product.id == str(product_id))\
|
||||
.all()[0] if len(self.db.query(Product)\
|
||||
.filter(Product.id == str(product_id))\
|
||||
.all()) > 0 else None
|
||||
|
||||
def get_latest_price_for_product(self, product: Product, price_type: PriceType) -> Optional[Price]:
|
||||
"""Get the most recent price of a specific type for a product."""
|
||||
prices = self.db.query(Price)\
|
||||
.filter(
|
||||
Price.product_id == str(product.id),
|
||||
Price.type == price_type.value
|
||||
)\
|
||||
.order_by(Price.date_created.desc())\
|
||||
.all()
|
||||
return prices[0] if prices else None
|
||||
|
||||
def get_historical_prices_for_product(
|
||||
self, product: Product, price_type: Optional[PriceType] = None
|
||||
) -> dict[PriceType, list[Price]]:
|
||||
"""Get historical prices for a product, optionally filtered by type."""
|
||||
query = self.db.query(Price).filter(Price.product_id == str(product.id))
|
||||
|
||||
if price_type:
|
||||
query = query.filter(Price.type == price_type.value) # Fixed: Use enum value
|
||||
|
||||
prices = query.order_by(Price.date_created.desc()).all()
|
||||
|
||||
if price_type:
|
||||
return {price_type: prices}
|
||||
|
||||
# Group prices by type
|
||||
result = {t: [] for t in PriceType}
|
||||
for price in prices:
|
||||
result[PriceType(price.type)].append(price) # Fixed: Convert string to enum
|
||||
return result
|
||||
|
||||
def _validate_price_data(self, prices: dict[str, Optional[Price]]) -> Optional[str]:
|
||||
"""Validate price data and return error message if invalid."""
|
||||
# Filter out None values and get valid prices
|
||||
valid_prices = {k: v for k, v in prices.items() if v is not None}
|
||||
|
||||
if not valid_prices:
|
||||
return "No valid price data available"
|
||||
|
||||
for price in valid_prices.values():
|
||||
if price.price < 0:
|
||||
return f"Negative price found: {price.price}"
|
||||
if price.price > self.config.MAX_PRICE:
|
||||
return f"Price exceeds maximum allowed: {price.price}"
|
||||
|
||||
return None
|
||||
|
||||
def _check_price_variance(self, prices: Dict[str, Decimal]) -> bool:
|
||||
"""Check if the variance between prices is within acceptable limits."""
|
||||
if not prices:
|
||||
return True
|
||||
|
||||
min_price = min(prices.values())
|
||||
max_price = max(prices.values())
|
||||
|
||||
if min_price == 0:
|
||||
return False
|
||||
|
||||
variance_ratio = max_price / min_price
|
||||
return variance_ratio <= (1 + self.config.MAX_PRICE_VARIANCE)
|
||||
|
||||
def _get_relevant_prices(self, product: Product) -> dict[str, Optional[Price]]:
|
||||
"""Get all relevant prices for a product."""
|
||||
return {
|
||||
PriceType.TCG_LOW.value: self.get_latest_price_for_product(product, PriceType.TCG_LOW),
|
||||
PriceType.TCG_DIRECT_LOW.value: self.get_latest_price_for_product(product, PriceType.TCG_DIRECT_LOW),
|
||||
PriceType.TCG_MARKET.value: self.get_latest_price_for_product(product, PriceType.TCG_MARKET),
|
||||
PriceType.TCG_LOW_WITH_SHIPPING.value: self.get_latest_price_for_product(product, PriceType.TCG_LOW_WITH_SHIPPING)
|
||||
}
|
||||
|
||||
def _get_base_prices(
|
||||
self, prices: dict[str, Price], include_shipping: bool = False
|
||||
) -> Dict[str, Decimal]:
|
||||
"""Get base prices, excluding None values."""
|
||||
base_prices = {}
|
||||
|
||||
# Add core prices if they exist
|
||||
if tcg_low := prices.get(PriceType.TCG_LOW.value):
|
||||
base_prices[PriceType.TCG_LOW.value] = Decimal(str(tcg_low.price))
|
||||
if tcg_direct := prices.get(PriceType.TCG_DIRECT_LOW.value):
|
||||
base_prices[PriceType.TCG_DIRECT_LOW.value] = Decimal(str(tcg_direct.price))
|
||||
if tcg_market := prices.get(PriceType.TCG_MARKET.value):
|
||||
base_prices[PriceType.TCG_MARKET.value] = Decimal(str(tcg_market.price))
|
||||
|
||||
# Add shipping price if requested and available
|
||||
if include_shipping:
|
||||
if tcg_shipping := prices.get(PriceType.TCG_LOW_WITH_SHIPPING.value):
|
||||
base_prices[PriceType.TCG_LOW_WITH_SHIPPING.value] = Decimal(str(tcg_shipping.price))
|
||||
|
||||
return base_prices
|
||||
|
||||
def _get_price_range(self, price: Decimal) -> Optional[PriceRange]:
|
||||
"""Get the appropriate price range for a given price."""
|
||||
for price_range in self.price_ranges:
|
||||
if price_range.contains_price(price):
|
||||
return price_range
|
||||
return None
|
||||
|
||||
def _handle_floor_price_cases(
|
||||
self, base_prices: Dict[str, Decimal]
|
||||
) -> Optional[Decimal]:
|
||||
"""Handle special cases for prices near or below floor price."""
|
||||
if all(price < self.config.FLOOR_PRICE for price in base_prices.values()):
|
||||
return self.config.FLOOR_PRICE
|
||||
|
||||
if any(price < self.config.FLOOR_PRICE for price in base_prices.values()):
|
||||
max_price = max(base_prices.values())
|
||||
return max_price * self.config.NEAR_FLOOR_MULT
|
||||
|
||||
return None
|
||||
|
||||
def calculate_price(
|
||||
self, product_id: str, strategy: PricingStrategy = PricingStrategy.DEFAULT
|
||||
) -> PriceCalculationResult:
|
||||
"""Calculate the final price for a product using the specified pricing strategy."""
|
||||
# get product
|
||||
product = self.get_product_by_id(str(product_id)) # Fixed: Ensure string UUID
|
||||
if not product:
|
||||
logger.error(f"Product not found: {product_id}")
|
||||
return PriceCalculationResult(product, None, {}, "Product not found")
|
||||
|
||||
# Get all relevant prices
|
||||
prices = self._get_relevant_prices(product)
|
||||
|
||||
# Validate price data
|
||||
if error := self._validate_price_data(prices):
|
||||
logger.error(f"Invalid price data: {error}")
|
||||
logger.error(f"product: {product.id}")
|
||||
return PriceCalculationResult(product, None, {}, error)
|
||||
|
||||
# Get initial base prices without shipping
|
||||
base_prices = self._get_base_prices(prices, include_shipping=False)
|
||||
|
||||
# Check price variance
|
||||
if not self._check_price_variance(base_prices):
|
||||
logger.error(f"Price variance exceeds acceptable threshold")
|
||||
logger.error(f"Base prices: {base_prices}")
|
||||
logger.error(f"product: {product.id}")
|
||||
return PriceCalculationResult(
|
||||
product, None, base_prices,
|
||||
"Price variance exceeds acceptable threshold"
|
||||
)
|
||||
|
||||
# Handle floor price cases
|
||||
if floor_price := self._handle_floor_price_cases(base_prices):
|
||||
return PriceCalculationResult(product, floor_price, base_prices)
|
||||
|
||||
# Get max base price and its range
|
||||
max_base_price = max(base_prices.values())
|
||||
price_range = self._get_price_range(max_base_price)
|
||||
|
||||
if not price_range:
|
||||
logger.error(f"No valid price range found for price")
|
||||
logger.error(f"Base prices: {base_prices}, max_base_price: {max_base_price}")
|
||||
logger.error(f"product: {product.id}")
|
||||
return PriceCalculationResult(
|
||||
product, None, base_prices,
|
||||
f"No valid price range found for price: {max_base_price}"
|
||||
)
|
||||
|
||||
# Include shipping prices if necessary
|
||||
if price_range.include_shipping:
|
||||
base_prices = self._get_base_prices(prices, include_shipping=True)
|
||||
max_base_price = max(base_prices.values())
|
||||
|
||||
# Recheck price range with shipping
|
||||
price_range = self._get_price_range(max_base_price)
|
||||
|
||||
if not price_range:
|
||||
logger.error(f"No valid price range found for price with shipping")
|
||||
logger.error(f"Base prices: {base_prices}, max_base_price: {max_base_price}")
|
||||
logger.error(f"product: {product.id}")
|
||||
return PriceCalculationResult(
|
||||
product, None, base_prices,
|
||||
f"No valid price range found for price with shipping: {max_base_price}"
|
||||
)
|
||||
|
||||
# Calculate final price using the price range
|
||||
calculated_price = price_range.calculate_price(max_base_price)
|
||||
|
||||
# Apply strategy-specific adjustments
|
||||
if strategy == PricingStrategy.AGGRESSIVE:
|
||||
calculated_price *= Decimal('0.95')
|
||||
elif strategy == PricingStrategy.CONSERVATIVE:
|
||||
calculated_price *= Decimal('1.05')
|
||||
|
||||
debug_base_prices_with_name_string = ", ".join([f"{k}: {v}" for k, v in base_prices.items()])
|
||||
|
||||
logger.debug(f"Set price for to {calculated_price.quantize(Decimal('0.01'), rounding=ROUND_HALF_UP)} based on {debug_base_prices_with_name_string}")
|
||||
|
||||
return PriceCalculationResult(
|
||||
product,
|
||||
calculated_price.quantize(Decimal('0.01'), rounding=ROUND_HALF_UP),
|
||||
base_prices
|
||||
)
|
||||
|
||||
def _bulk_generate_uuids(self, size: int) -> List[str]:
|
||||
"""Generate UUIDs in bulk for better performance."""
|
||||
return [str(uuid()) for _ in range(size)]
|
||||
|
||||
def _prepare_price_records(self, df: pd.DataFrame, price_type: str, uuids: List[str]) -> List[Dict]:
|
||||
"""Prepare price records in bulk using vectorized operations."""
|
||||
records = []
|
||||
df['price_id'] = uuids[:len(df)]
|
||||
df['type'] = price_type # price_type should already be a string value
|
||||
df['date_created'] = datetime.utcnow()
|
||||
|
||||
return df[['price_id', 'product_id', 'type', 'price', 'date_created']].to_dict('records')
|
||||
|
||||
def _calculate_suggested_prices_batch(self, product_ids: List[str]) -> Dict[str, float]:
|
||||
"""Calculate suggested prices in parallel for a batch of products."""
|
||||
with ThreadPoolExecutor(max_workers=self.MAX_WORKERS) as executor:
|
||||
future_to_id = {
|
||||
executor.submit(self.calculate_price, str(pid)): pid # Fixed: Ensure string UUID
|
||||
for pid in product_ids
|
||||
}
|
||||
|
||||
results = {}
|
||||
for future in as_completed(future_to_id):
|
||||
product_id = future_to_id[future]
|
||||
try:
|
||||
result = future.result()
|
||||
if result.success:
|
||||
results[str(product_id)] = float(result.calculated_price) # Fixed: Ensure string UUID
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to calculate price for product {product_id}: {e}")
|
||||
|
||||
return results
|
||||
|
||||
def _bulk_insert_prices(self, records: List[Dict]) -> None:
|
||||
"""Efficiently insert price records in bulk."""
|
||||
if not records:
|
||||
return
|
||||
|
||||
try:
|
||||
df = pd.DataFrame(records)
|
||||
df.to_sql('prices', self.db.bind,
|
||||
if_exists='append',
|
||||
index=False,
|
||||
method='multi',
|
||||
chunksize=self.CHUNK_SIZE)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to bulk insert prices: {e}")
|
||||
raise
|
||||
|
||||
def process_pricing_export(self, export_csv: bytes) -> None:
|
||||
"""Process pricing export with optimized bulk operations."""
|
||||
try:
|
||||
# Convert CSV to DataFrame
|
||||
df = self.df_util.csv_bytes_to_df(export_csv)
|
||||
df.columns = df.columns.str.lower().str.replace(' ', '_')
|
||||
|
||||
# Get product mappings efficiently - SQLite compatible with chunking
|
||||
SQLITE_MAX_VARS = 999 # SQLite parameter limit
|
||||
tcgplayer_ids = df['tcgplayer_id'].tolist()
|
||||
all_product_dfs = []
|
||||
|
||||
for i in range(0, len(tcgplayer_ids), SQLITE_MAX_VARS):
|
||||
chunk_ids = tcgplayer_ids[i:i + SQLITE_MAX_VARS]
|
||||
placeholders = ','.join([':id_' + str(j) for j in range(len(chunk_ids))])
|
||||
product_query = f"""
|
||||
SELECT tcgplayer_id, product_id
|
||||
FROM card_tcgplayer
|
||||
WHERE tcgplayer_id IN ({placeholders})
|
||||
"""
|
||||
|
||||
# Create a dictionary of parameters
|
||||
params = {f'id_{j}': id_val for j, id_val in enumerate(chunk_ids)}
|
||||
|
||||
chunk_df = pd.read_sql(
|
||||
text(product_query),
|
||||
self.db.bind,
|
||||
params=params
|
||||
)
|
||||
all_product_dfs.append(chunk_df)
|
||||
|
||||
# Combine all chunks
|
||||
product_df = pd.concat(all_product_dfs) if all_product_dfs else pd.DataFrame()
|
||||
|
||||
# Merge dataframes efficiently
|
||||
merged_df = pd.merge(
|
||||
df,
|
||||
product_df,
|
||||
on='tcgplayer_id',
|
||||
how='inner'
|
||||
)
|
||||
|
||||
# Define price columns mapping - using enum values directly
|
||||
price_columns = {
|
||||
'tcg_market_price': PriceType.TCG_MARKET.value,
|
||||
'tcg_direct_low': PriceType.TCG_DIRECT_LOW.value,
|
||||
'tcg_low_price_with_shipping': PriceType.TCG_LOW_WITH_SHIPPING.value,
|
||||
'tcg_low_price': PriceType.TCG_LOW.value,
|
||||
'tcg_marketplace_price': PriceType.TCG_MARKETPLACE.value
|
||||
}
|
||||
|
||||
# Process each price type in chunks
|
||||
for price_col, price_type in price_columns.items():
|
||||
valid_prices_df = merged_df[merged_df[price_col].notna()].copy()
|
||||
|
||||
for chunk_start in range(0, len(valid_prices_df), self.CHUNK_SIZE):
|
||||
chunk_df = valid_prices_df.iloc[chunk_start:chunk_start + self.CHUNK_SIZE].copy()
|
||||
uuids = self._bulk_generate_uuids(len(chunk_df))
|
||||
|
||||
chunk_df['price'] = chunk_df[price_col]
|
||||
chunk_df['product_id'] = chunk_df['product_id'].astype(str) # Fixed: Ensure string UUIDs
|
||||
records = self._prepare_price_records(chunk_df, price_type, uuids)
|
||||
self._bulk_insert_prices(records)
|
||||
|
||||
# Handle suggested prices separately with parallel processing
|
||||
product_ids = merged_df['product_id'].unique()
|
||||
suggested_prices = {}
|
||||
|
||||
for chunk_start in range(0, len(product_ids), self.CHUNK_SIZE):
|
||||
chunk_ids = product_ids[chunk_start:chunk_start + self.CHUNK_SIZE]
|
||||
chunk_prices = self._calculate_suggested_prices_batch(chunk_ids)
|
||||
suggested_prices.update(chunk_prices)
|
||||
|
||||
# Create suggested price records
|
||||
if suggested_prices:
|
||||
suggested_df = pd.DataFrame([
|
||||
{'product_id': str(pid), 'price': price} # Fixed: Ensure string UUIDs
|
||||
for pid, price in suggested_prices.items()
|
||||
])
|
||||
|
||||
uuids = self._bulk_generate_uuids(len(suggested_df))
|
||||
records = self._prepare_price_records(suggested_df, 'suggested_price', uuids)
|
||||
self._bulk_insert_prices(records)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to process pricing export: {e}")
|
||||
logger.error(f"Error occurred during price processing: {str(e)}")
|
||||
raise
|
72
app/services/util/_dataframe.py
Normal file
72
app/services/util/_dataframe.py
Normal file
@@ -0,0 +1,72 @@
|
||||
import pandas as pd
|
||||
from io import StringIO
|
||||
from app.db.models import File
|
||||
|
||||
|
||||
class ManaboxRow:
|
||||
def __init__(self, row: pd.Series):
|
||||
# Integer field
|
||||
try:
|
||||
self.manabox_id = int(row['manabox_id'])
|
||||
except (ValueError, TypeError):
|
||||
raise ValueError(f"manabox_id must be convertible to integer, got: {row['manabox_id']}")
|
||||
|
||||
# String fields with None/NaN handling
|
||||
self.name = str(row['name']) if pd.notna(row['name']) else ''
|
||||
self.set_code = str(row['set_code']) if pd.notna(row['set_code']) else ''
|
||||
self.set_name = str(row['set_name']) if pd.notna(row['set_name']) else ''
|
||||
self.collector_number = str(row['collector_number']) if pd.notna(row['collector_number']) else ''
|
||||
self.foil = str(row['foil']) if pd.notna(row['foil']) else ''
|
||||
self.rarity = str(row['rarity']) if pd.notna(row['rarity']) else ''
|
||||
self.scryfall_id = str(row['scryfall_id']) if pd.notna(row['scryfall_id']) else ''
|
||||
self.condition = str(row['condition']) if pd.notna(row['condition']) else ''
|
||||
self.language = str(row['language']) if pd.notna(row['language']) else ''
|
||||
self.quantity = str(row['quantity']) if pd.notna(row['quantity']) else ''
|
||||
|
||||
|
||||
class TCGPlayerPricingRow:
|
||||
def __init__(self, row: pd.Series):
|
||||
self.tcgplayer_id = row['tcgplayer_id']
|
||||
self.product_line = row['product_line']
|
||||
self.set_name = row['set_name']
|
||||
self.product_name = row['product_name']
|
||||
self.title = row['title']
|
||||
self.number = row['number']
|
||||
self.rarity = row['rarity']
|
||||
self.condition = row['condition']
|
||||
self.tcg_market_price = row['tcg_market_price']
|
||||
self.tcg_direct_low = row['tcg_direct_low']
|
||||
self.tcg_low_price_with_shipping = row['tcg_low_price_with_shipping']
|
||||
self.tcg_low_price = row['tcg_low_price']
|
||||
self.total_quantity = row['total_quantity']
|
||||
self.add_to_quantity = row['add_to_quantity']
|
||||
self.tcg_marketplace_price = row['tcg_marketplace_price']
|
||||
self.photo_url = row['photo_url']
|
||||
|
||||
|
||||
class DataframeUtil:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def format_df_columns(self, df: pd.DataFrame) -> pd.DataFrame:
|
||||
df.columns = df.columns.str.lower()
|
||||
df.columns = df.columns.str.replace(' ', '_')
|
||||
return df
|
||||
|
||||
def file_to_df(self, file: File) -> pd.DataFrame:
|
||||
with open(file.filepath, 'rb') as f:
|
||||
content = f.read()
|
||||
content = content.decode('utf-8')
|
||||
df = pd.read_csv(StringIO(content))
|
||||
df = self.format_df_columns(df)
|
||||
return df
|
||||
|
||||
def csv_bytes_to_df(self, content: bytes) -> pd.DataFrame:
|
||||
content = content.decode('utf-8')
|
||||
df = pd.read_csv(StringIO(content))
|
||||
df = self.format_df_columns(df)
|
||||
return df
|
||||
|
||||
def df_to_csv_bytes(self, df: pd.DataFrame) -> bytes:
|
||||
csv = df.to_csv(index=False)
|
||||
return csv.encode('utf-8')
|
Reference in New Issue
Block a user