more stuff yay
This commit is contained in:
@@ -1,10 +1,11 @@
|
||||
from db.models import Box, File, StagedFileProduct, Product, OpenBoxCard, OpenBox, Inventory
|
||||
from db.models import Box, File, StagedFileProduct, Product, OpenBoxCard, OpenBox, Inventory, TCGPlayerGroups
|
||||
from db.utils import db_transaction
|
||||
from uuid import uuid4 as uuid
|
||||
from datetime import datetime
|
||||
from sqlalchemy.orm import Session
|
||||
from sqlalchemy.engine.result import Row
|
||||
from schemas.box import CreateOpenBoxResponse, CreateSealedBoxResponse, CreateBoxResponse
|
||||
from sqlalchemy import or_
|
||||
from schemas.box import CreateBoxRequest, CreateBoxResponse, UpdateBoxRequest, CreateOpenBoxRequest
|
||||
import logging
|
||||
from typing import Any
|
||||
from db.utils import db_transaction
|
||||
@@ -92,7 +93,7 @@ class BoxService:
|
||||
response = CreateBoxResponse(success=True)
|
||||
return response
|
||||
|
||||
def create_box(self, create_box_data: dict[str, Any], file_ids: list[str] = None) -> CreateBoxResponse:
|
||||
def _create_box(self, create_box_data: dict[str, Any], file_ids: list[str] = None) -> CreateBoxResponse:
|
||||
sealed = create_box_data["sealed"]
|
||||
assert isinstance(sealed, bool)
|
||||
if file_ids and not sealed:
|
||||
@@ -132,4 +133,88 @@ class BoxService:
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating box: {str(e)}")
|
||||
raise e
|
||||
raise e
|
||||
|
||||
def validate_box_type(self, box_type: str) -> bool:
|
||||
return box_type in ["collector", "play", "draft", "set", "commander"]
|
||||
|
||||
def validate_set_code(self, set_code: str) -> bool:
|
||||
exists = self.db.query(TCGPlayerGroups).filter(
|
||||
TCGPlayerGroups.abbreviation == set_code
|
||||
).first() is not None
|
||||
return exists
|
||||
|
||||
def create_box(self, create_box_data: CreateBoxRequest) -> Box:
|
||||
# validate box data
|
||||
if not self.validate_box_type(create_box_data.type):
|
||||
raise Exception("Invalid box type")
|
||||
if not self.validate_set_code(create_box_data.set_code):
|
||||
raise Exception("Invalid set code")
|
||||
# check if box exists by type and set code or sku
|
||||
existing_box = self.db.query(Box).filter(
|
||||
or_(
|
||||
Box.type == create_box_data.type,
|
||||
Box.sku == create_box_data.sku
|
||||
),
|
||||
Box.set_code == create_box_data.set_code
|
||||
).first()
|
||||
if existing_box:
|
||||
raise Exception("Box already exists")
|
||||
# create box
|
||||
with db_transaction(self.db):
|
||||
box = Box(
|
||||
product_id=str(uuid()),
|
||||
type=create_box_data.type,
|
||||
set_code=create_box_data.set_code,
|
||||
sku=create_box_data.sku,
|
||||
num_cards_expected=create_box_data.num_cards_expected
|
||||
)
|
||||
self.db.add(box)
|
||||
|
||||
return box
|
||||
|
||||
def update_box(self, box_id: str, update_box_data: UpdateBoxRequest) -> Box:
|
||||
box = self.db.query(Box).filter(Box.product_id == box_id).first()
|
||||
if not box:
|
||||
raise Exception("Box not found")
|
||||
with db_transaction(self.db):
|
||||
if update_box_data.type:
|
||||
box.type = update_box_data.type
|
||||
if update_box_data.set_code:
|
||||
box.set_code = update_box_data.set_code
|
||||
if update_box_data.sku:
|
||||
box.sku = update_box_data.sku
|
||||
if update_box_data.num_cards_expected:
|
||||
box.num_cards_expected = update_box_data.num_cards_expected
|
||||
return box
|
||||
|
||||
def delete_box(self, box_id: str) -> Box:
|
||||
box = self.db.query(Box).filter(Box.product_id == box_id).first()
|
||||
if not box:
|
||||
raise Exception("Box not found")
|
||||
with db_transaction(self.db):
|
||||
self.db.delete(box)
|
||||
return box
|
||||
|
||||
def open_box(self, box_id: str, box_data: CreateOpenBoxRequest):
|
||||
box = self.db.query(Box).filter(Box.product_id == box_id).first()
|
||||
if not box:
|
||||
raise Exception("Box not found")
|
||||
with db_transaction(self.db):
|
||||
open_box = OpenBox(
|
||||
id=str(uuid()),
|
||||
product_id=box_id,
|
||||
num_cards_actual=box_data.num_cards_actual,
|
||||
date_opened=datetime.strptime(box_data.date_opened, "%Y-%m-%d") if box_data.date_opened else datetime.now()
|
||||
)
|
||||
self.db.add(open_box)
|
||||
staged_product_data = self.get_staged_product_data(box_data.file_ids)
|
||||
product_data = self.aggregate_staged_product_data(staged_product_data)
|
||||
self.inventory_service.process_staged_products(product_data)
|
||||
self.add_products_to_open_box(open_box, product_data)
|
||||
# update box_id for files
|
||||
for file_id in box_data.file_ids:
|
||||
file = self.db.query(File).filter(File.id == file_id).first()
|
||||
file.box_id = open_box.id
|
||||
self.db.add(file)
|
||||
return open_box
|
@@ -14,7 +14,7 @@ logger = logging.getLogger(__name__)
|
||||
# Name,Set code,Set name,Collector number,Foil,Rarity,Quantity,ManaBox ID,Scryfall ID,Purchase price,Misprint,Altered,Condition,Language,Purchase price currency
|
||||
MANABOX_REQUIRED_FILE_HEADERS = ['Name', 'Set code', 'Set name', 'Collector number', 'Foil', 'Rarity', 'Quantity', 'ManaBox ID', 'Scryfall ID', 'Purchase price', 'Misprint', 'Altered', 'Condition', 'Language', 'Purchase price currency']
|
||||
MANABOX_ALLOWED_FILE_EXTENSIONS = ['.csv']
|
||||
MANABOX_ALLOWED_FILE_TYPES = ['scan_export']
|
||||
MANABOX_ALLOWED_FILE_TYPES = ['scan_export_common', 'scan_export_rare']
|
||||
MANABOX_CONFIG = {
|
||||
"required_headers": MANABOX_REQUIRED_FILE_HEADERS,
|
||||
"allowed_extensions": MANABOX_ALLOWED_FILE_EXTENSIONS,
|
||||
|
@@ -1,205 +1,8 @@
|
||||
import logging
|
||||
from typing import Callable
|
||||
from db.models import TCGPlayerInventory, TCGPlayerExportHistory, TCGPlayerPricingHistory, ManaboxExportData, ManaboxTCGPlayerMapping, TCGPlayerProduct
|
||||
from sqlalchemy.orm import Session
|
||||
import pandas as pd
|
||||
from db.utils import db_transaction
|
||||
from sqlalchemy import func, and_, exists
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class PricingService:
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
def get_box_with_most_recent_prices(self, box_id: str) -> pd.DataFrame:
|
||||
latest_prices = (
|
||||
self.db.query(
|
||||
TCGPlayerPricingHistory.tcgplayer_product_id,
|
||||
func.max(TCGPlayerPricingHistory.date_created).label('max_date')
|
||||
)
|
||||
.group_by(TCGPlayerPricingHistory.tcgplayer_product_id)
|
||||
.subquery('latest') # Added name to subquery
|
||||
)
|
||||
|
||||
result = (
|
||||
self.db.query(ManaboxExportData, TCGPlayerPricingHistory, TCGPlayerProduct)
|
||||
.join(ManaboxTCGPlayerMapping, ManaboxExportData.id == ManaboxTCGPlayerMapping.manabox_id)
|
||||
.join(TCGPlayerProduct, ManaboxTCGPlayerMapping.tcgplayer_id == TCGPlayerProduct.id)
|
||||
.join(TCGPlayerPricingHistory, TCGPlayerProduct.id == TCGPlayerPricingHistory.tcgplayer_product_id)
|
||||
.join(
|
||||
latest_prices,
|
||||
and_(
|
||||
TCGPlayerPricingHistory.tcgplayer_product_id == latest_prices.c.tcgplayer_product_id,
|
||||
TCGPlayerPricingHistory.date_created == latest_prices.c.max_date
|
||||
)
|
||||
)
|
||||
.filter(ManaboxExportData.box_id == box_id) # Removed str() conversion
|
||||
.all()
|
||||
)
|
||||
|
||||
logger.debug(f"Found {len(result)} rows")
|
||||
|
||||
df = pd.DataFrame([{
|
||||
**{f"manabox_{k}": v for k, v in row[0].__dict__.items() if not k.startswith('_')},
|
||||
**{f"pricing_{k}": v for k, v in row[1].__dict__.items() if not k.startswith('_')},
|
||||
**{f"tcgproduct_{k}": v for k, v in row[2].__dict__.items() if not k.startswith('_')}
|
||||
} for row in result])
|
||||
|
||||
return df
|
||||
|
||||
def get_live_inventory_with_most_recent_prices(self) -> pd.DataFrame:
|
||||
# Get latest export IDs using subqueries
|
||||
latest_inventory_export = (
|
||||
self.db.query(TCGPlayerExportHistory.inventory_export_id)
|
||||
.filter(TCGPlayerExportHistory.type == "live_inventory")
|
||||
.order_by(TCGPlayerExportHistory.date_created.desc())
|
||||
.limit(1)
|
||||
.scalar_subquery()
|
||||
)
|
||||
# this is bad because latest pricing export is not guaranteed to be related to the latest inventory export
|
||||
latest_pricing_export = (
|
||||
self.db.query(TCGPlayerExportHistory.pricing_export_id)
|
||||
.filter(TCGPlayerExportHistory.type == "pricing")
|
||||
.order_by(TCGPlayerExportHistory.date_created.desc())
|
||||
.limit(1)
|
||||
.scalar_subquery()
|
||||
)
|
||||
|
||||
# Join inventory and pricing data in a single query
|
||||
inventory_with_pricing = (
|
||||
self.db.query(TCGPlayerInventory, TCGPlayerPricingHistory)
|
||||
.join(
|
||||
TCGPlayerPricingHistory,
|
||||
TCGPlayerInventory.tcgplayer_product_id == TCGPlayerPricingHistory.tcgplayer_product_id
|
||||
)
|
||||
.filter(
|
||||
TCGPlayerInventory.export_id == latest_inventory_export,
|
||||
TCGPlayerPricingHistory.export_id == latest_pricing_export
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Convert to pandas DataFrame
|
||||
df = pd.DataFrame([{
|
||||
# Inventory columns
|
||||
**{f"inventory_{k}": v
|
||||
for k, v in row[0].__dict__.items()
|
||||
if not k.startswith('_')},
|
||||
# Pricing columns
|
||||
**{f"pricing_{k}": v
|
||||
for k, v in row[1].__dict__.items()
|
||||
if not k.startswith('_')}
|
||||
} for row in inventory_with_pricing])
|
||||
|
||||
return df
|
||||
|
||||
def default_pricing_algo(self, df: pd.DataFrame = None):
|
||||
if df is None:
|
||||
logger.debug("No DataFrame provided, fetching live inventory with most recent prices")
|
||||
df = self.get_live_inventory_with_most_recent_prices()
|
||||
# if tcg low price is < 0.35, set my_price to 0.35
|
||||
# if either tcg low price or tcg low price with shipping is under 5, set my_price to tcg low price * 1.25
|
||||
# if tcg low price with shipping is > 25 set price to tcg low price with shipping * 1.025
|
||||
# otherwise, set price to tcg low price with shipping * 1.10
|
||||
# also round to 2 decimal places
|
||||
df['my_price'] = df.apply(lambda row: round(
|
||||
0.35 if row['pricing_tcg_low_price'] < 0.35 else
|
||||
row['pricing_tcg_low_price'] * 1.25 if row['pricing_tcg_low_price'] < 5 or row['pricing_tcg_low_price_with_shipping'] < 5 else
|
||||
row['pricing_tcg_low_price_with_shipping'] * 1.025 if row['pricing_tcg_low_price_with_shipping'] > 25 else
|
||||
row['pricing_tcg_low_price_with_shipping'] * 1.10, 2), axis=1)
|
||||
# log rows with no price
|
||||
no_price = df[df['my_price'].isnull()]
|
||||
if len(no_price) > 0:
|
||||
logger.warning(f"Found {len(no_price)} rows with no price")
|
||||
logger.warning(no_price)
|
||||
# remove rows with no price
|
||||
df = df.dropna(subset=['my_price'])
|
||||
return df
|
||||
|
||||
def convert_df_to_csv(self, df: pd.DataFrame):
|
||||
# Flip the mapping to be from current names TO desired names
|
||||
column_mapping = {
|
||||
'inventory_tcgplayer_id': 'TCGplayer Id',
|
||||
'inventory_product_line': 'Product Line',
|
||||
'inventory_set_name': 'Set Name',
|
||||
'inventory_product_name': 'Product Name',
|
||||
'inventory_title': 'Title',
|
||||
'inventory_number': 'Number',
|
||||
'inventory_rarity': 'Rarity',
|
||||
'inventory_condition': 'Condition',
|
||||
'pricing_tcg_market_price': 'TCG Market Price',
|
||||
'pricing_tcg_direct_low': 'TCG Direct Low',
|
||||
'pricing_tcg_low_price_with_shipping': 'TCG Low Price With Shipping',
|
||||
'pricing_tcg_low_price': 'TCG Low Price',
|
||||
'inventory_total_quantity': 'Total Quantity',
|
||||
'inventory_add_to_quantity': 'Add to Quantity',
|
||||
'my_price': 'TCG Marketplace Price',
|
||||
'inventory_photo_url': 'Photo URL'
|
||||
}
|
||||
|
||||
df['pricing_tcg_market_price'] = ""
|
||||
df['pricing_tcg_direct_low'] = ""
|
||||
df['pricing_tcg_low_price_with_shipping'] = ""
|
||||
df['pricing_tcg_low_price'] = ""
|
||||
df['inventory_total_quantity'] = ""
|
||||
df['inventory_add_to_quantity'] = 0
|
||||
df['inventory_photo_url'] = ""
|
||||
|
||||
# First select the columns we want (using the keys of our mapping)
|
||||
# Then rename them to the desired names (the values in our mapping)
|
||||
df = df[column_mapping.keys()].rename(columns=column_mapping)
|
||||
|
||||
return df.to_csv(index=False, quoting=1, quotechar='"')
|
||||
|
||||
def convert_add_df_to_csv(self, df: pd.DataFrame):
|
||||
column_mapping = {
|
||||
'tcgproduct_tcgplayer_id': 'TCGplayer Id',
|
||||
'tcgproduct_product_line': 'Product Line',
|
||||
'tcgproduct_set_name': 'Set Name',
|
||||
'tcgproduct_product_name': 'Product Name',
|
||||
'tcgproduct_title': 'Title',
|
||||
'tcgproduct_number': 'Number',
|
||||
'tcgproduct_rarity': 'Rarity',
|
||||
'tcgproduct_condition': 'Condition',
|
||||
'pricing_tcg_market_price': 'TCG Market Price',
|
||||
'pricing_tcg_direct_low': 'TCG Direct Low',
|
||||
'pricing_tcg_low_price_with_shipping': 'TCG Low Price With Shipping',
|
||||
'pricing_tcg_low_price': 'TCG Low Price',
|
||||
'tcgproduct_group_id': 'Total Quantity',
|
||||
'manabox_quantity': 'Add to Quantity',
|
||||
'my_price': 'TCG Marketplace Price',
|
||||
'tcgproduct_photo_url': 'Photo URL'
|
||||
}
|
||||
df['tcgproduct_group_id'] = ""
|
||||
df['pricing_tcg_market_price'] = ""
|
||||
df['pricing_tcg_direct_low'] = ""
|
||||
df['pricing_tcg_low_price_with_shipping'] = ""
|
||||
df['pricing_tcg_low_price'] = ""
|
||||
df['tcgproduct_photo_url'] = ""
|
||||
|
||||
df = df[column_mapping.keys()].rename(columns=column_mapping)
|
||||
|
||||
return df.to_csv(index=False, quoting=1, quotechar='"')
|
||||
|
||||
def create_live_inventory_pricing_update_csv(self, algo: Callable = None) -> str:
|
||||
actual_algo = algo if algo is not None else self.default_pricing_algo
|
||||
df = actual_algo()
|
||||
csv = self.convert_df_to_csv(df)
|
||||
return csv
|
||||
|
||||
def create_add_to_tcgplayer_csv(self, box_id: str = None, upload_id: str = None, algo: Callable = None) -> str:
|
||||
actual_algo = algo if algo is not None else self.default_pricing_algo
|
||||
if box_id and upload_id:
|
||||
raise ValueError("Cannot specify both box_id and upload_id")
|
||||
elif not box_id and not upload_id:
|
||||
raise ValueError("Must specify either box_id or upload_id")
|
||||
elif box_id:
|
||||
logger.debug("creating df")
|
||||
df = self.get_box_with_most_recent_prices(box_id)
|
||||
elif upload_id:
|
||||
raise NotImplementedError("Not yet implemented")
|
||||
df = actual_algo(df)
|
||||
csv = self.convert_add_df_to_csv(df)
|
||||
return csv
|
||||
|
205
services/pricing_old.py
Normal file
205
services/pricing_old.py
Normal file
@@ -0,0 +1,205 @@
|
||||
import logging
|
||||
from typing import Callable
|
||||
from db.models import TCGPlayerInventory, TCGPlayerExportHistory, TCGPlayerPricingHistory, ManaboxExportData, ManaboxTCGPlayerMapping, TCGPlayerProduct
|
||||
from sqlalchemy.orm import Session
|
||||
import pandas as pd
|
||||
from db.utils import db_transaction
|
||||
from sqlalchemy import func, and_, exists
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class PricingService:
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
def get_box_with_most_recent_prices(self, box_id: str) -> pd.DataFrame:
|
||||
latest_prices = (
|
||||
self.db.query(
|
||||
TCGPlayerPricingHistory.tcgplayer_product_id,
|
||||
func.max(TCGPlayerPricingHistory.date_created).label('max_date')
|
||||
)
|
||||
.group_by(TCGPlayerPricingHistory.tcgplayer_product_id)
|
||||
.subquery('latest') # Added name to subquery
|
||||
)
|
||||
|
||||
result = (
|
||||
self.db.query(ManaboxExportData, TCGPlayerPricingHistory, TCGPlayerProduct)
|
||||
.join(ManaboxTCGPlayerMapping, ManaboxExportData.id == ManaboxTCGPlayerMapping.manabox_id)
|
||||
.join(TCGPlayerProduct, ManaboxTCGPlayerMapping.tcgplayer_id == TCGPlayerProduct.id)
|
||||
.join(TCGPlayerPricingHistory, TCGPlayerProduct.id == TCGPlayerPricingHistory.tcgplayer_product_id)
|
||||
.join(
|
||||
latest_prices,
|
||||
and_(
|
||||
TCGPlayerPricingHistory.tcgplayer_product_id == latest_prices.c.tcgplayer_product_id,
|
||||
TCGPlayerPricingHistory.date_created == latest_prices.c.max_date
|
||||
)
|
||||
)
|
||||
.filter(ManaboxExportData.box_id == box_id) # Removed str() conversion
|
||||
.all()
|
||||
)
|
||||
|
||||
logger.debug(f"Found {len(result)} rows")
|
||||
|
||||
df = pd.DataFrame([{
|
||||
**{f"manabox_{k}": v for k, v in row[0].__dict__.items() if not k.startswith('_')},
|
||||
**{f"pricing_{k}": v for k, v in row[1].__dict__.items() if not k.startswith('_')},
|
||||
**{f"tcgproduct_{k}": v for k, v in row[2].__dict__.items() if not k.startswith('_')}
|
||||
} for row in result])
|
||||
|
||||
return df
|
||||
|
||||
def get_live_inventory_with_most_recent_prices(self) -> pd.DataFrame:
|
||||
# Get latest export IDs using subqueries
|
||||
latest_inventory_export = (
|
||||
self.db.query(TCGPlayerExportHistory.inventory_export_id)
|
||||
.filter(TCGPlayerExportHistory.type == "live_inventory")
|
||||
.order_by(TCGPlayerExportHistory.date_created.desc())
|
||||
.limit(1)
|
||||
.scalar_subquery()
|
||||
)
|
||||
# this is bad because latest pricing export is not guaranteed to be related to the latest inventory export
|
||||
latest_pricing_export = (
|
||||
self.db.query(TCGPlayerExportHistory.pricing_export_id)
|
||||
.filter(TCGPlayerExportHistory.type == "pricing")
|
||||
.order_by(TCGPlayerExportHistory.date_created.desc())
|
||||
.limit(1)
|
||||
.scalar_subquery()
|
||||
)
|
||||
|
||||
# Join inventory and pricing data in a single query
|
||||
inventory_with_pricing = (
|
||||
self.db.query(TCGPlayerInventory, TCGPlayerPricingHistory)
|
||||
.join(
|
||||
TCGPlayerPricingHistory,
|
||||
TCGPlayerInventory.tcgplayer_product_id == TCGPlayerPricingHistory.tcgplayer_product_id
|
||||
)
|
||||
.filter(
|
||||
TCGPlayerInventory.export_id == latest_inventory_export,
|
||||
TCGPlayerPricingHistory.export_id == latest_pricing_export
|
||||
)
|
||||
.all()
|
||||
)
|
||||
|
||||
# Convert to pandas DataFrame
|
||||
df = pd.DataFrame([{
|
||||
# Inventory columns
|
||||
**{f"inventory_{k}": v
|
||||
for k, v in row[0].__dict__.items()
|
||||
if not k.startswith('_')},
|
||||
# Pricing columns
|
||||
**{f"pricing_{k}": v
|
||||
for k, v in row[1].__dict__.items()
|
||||
if not k.startswith('_')}
|
||||
} for row in inventory_with_pricing])
|
||||
|
||||
return df
|
||||
|
||||
def default_pricing_algo(self, df: pd.DataFrame = None):
|
||||
if df is None:
|
||||
logger.debug("No DataFrame provided, fetching live inventory with most recent prices")
|
||||
df = self.get_live_inventory_with_most_recent_prices()
|
||||
# if tcg low price is < 0.35, set my_price to 0.35
|
||||
# if either tcg low price or tcg low price with shipping is under 5, set my_price to tcg low price * 1.25
|
||||
# if tcg low price with shipping is > 25 set price to tcg low price with shipping * 1.025
|
||||
# otherwise, set price to tcg low price with shipping * 1.10
|
||||
# also round to 2 decimal places
|
||||
df['my_price'] = df.apply(lambda row: round(
|
||||
0.35 if row['pricing_tcg_low_price'] < 0.35 else
|
||||
row['pricing_tcg_low_price'] * 1.25 if row['pricing_tcg_low_price'] < 5 or row['pricing_tcg_low_price_with_shipping'] < 5 else
|
||||
row['pricing_tcg_low_price_with_shipping'] * 1.025 if row['pricing_tcg_low_price_with_shipping'] > 25 else
|
||||
row['pricing_tcg_low_price_with_shipping'] * 1.10, 2), axis=1)
|
||||
# log rows with no price
|
||||
no_price = df[df['my_price'].isnull()]
|
||||
if len(no_price) > 0:
|
||||
logger.warning(f"Found {len(no_price)} rows with no price")
|
||||
logger.warning(no_price)
|
||||
# remove rows with no price
|
||||
df = df.dropna(subset=['my_price'])
|
||||
return df
|
||||
|
||||
def convert_df_to_csv(self, df: pd.DataFrame):
|
||||
# Flip the mapping to be from current names TO desired names
|
||||
column_mapping = {
|
||||
'inventory_tcgplayer_id': 'TCGplayer Id',
|
||||
'inventory_product_line': 'Product Line',
|
||||
'inventory_set_name': 'Set Name',
|
||||
'inventory_product_name': 'Product Name',
|
||||
'inventory_title': 'Title',
|
||||
'inventory_number': 'Number',
|
||||
'inventory_rarity': 'Rarity',
|
||||
'inventory_condition': 'Condition',
|
||||
'pricing_tcg_market_price': 'TCG Market Price',
|
||||
'pricing_tcg_direct_low': 'TCG Direct Low',
|
||||
'pricing_tcg_low_price_with_shipping': 'TCG Low Price With Shipping',
|
||||
'pricing_tcg_low_price': 'TCG Low Price',
|
||||
'inventory_total_quantity': 'Total Quantity',
|
||||
'inventory_add_to_quantity': 'Add to Quantity',
|
||||
'my_price': 'TCG Marketplace Price',
|
||||
'inventory_photo_url': 'Photo URL'
|
||||
}
|
||||
|
||||
df['pricing_tcg_market_price'] = ""
|
||||
df['pricing_tcg_direct_low'] = ""
|
||||
df['pricing_tcg_low_price_with_shipping'] = ""
|
||||
df['pricing_tcg_low_price'] = ""
|
||||
df['inventory_total_quantity'] = ""
|
||||
df['inventory_add_to_quantity'] = 0
|
||||
df['inventory_photo_url'] = ""
|
||||
|
||||
# First select the columns we want (using the keys of our mapping)
|
||||
# Then rename them to the desired names (the values in our mapping)
|
||||
df = df[column_mapping.keys()].rename(columns=column_mapping)
|
||||
|
||||
return df.to_csv(index=False, quoting=1, quotechar='"')
|
||||
|
||||
def convert_add_df_to_csv(self, df: pd.DataFrame):
|
||||
column_mapping = {
|
||||
'tcgproduct_tcgplayer_id': 'TCGplayer Id',
|
||||
'tcgproduct_product_line': 'Product Line',
|
||||
'tcgproduct_set_name': 'Set Name',
|
||||
'tcgproduct_product_name': 'Product Name',
|
||||
'tcgproduct_title': 'Title',
|
||||
'tcgproduct_number': 'Number',
|
||||
'tcgproduct_rarity': 'Rarity',
|
||||
'tcgproduct_condition': 'Condition',
|
||||
'pricing_tcg_market_price': 'TCG Market Price',
|
||||
'pricing_tcg_direct_low': 'TCG Direct Low',
|
||||
'pricing_tcg_low_price_with_shipping': 'TCG Low Price With Shipping',
|
||||
'pricing_tcg_low_price': 'TCG Low Price',
|
||||
'tcgproduct_group_id': 'Total Quantity',
|
||||
'manabox_quantity': 'Add to Quantity',
|
||||
'my_price': 'TCG Marketplace Price',
|
||||
'tcgproduct_photo_url': 'Photo URL'
|
||||
}
|
||||
df['tcgproduct_group_id'] = ""
|
||||
df['pricing_tcg_market_price'] = ""
|
||||
df['pricing_tcg_direct_low'] = ""
|
||||
df['pricing_tcg_low_price_with_shipping'] = ""
|
||||
df['pricing_tcg_low_price'] = ""
|
||||
df['tcgproduct_photo_url'] = ""
|
||||
|
||||
df = df[column_mapping.keys()].rename(columns=column_mapping)
|
||||
|
||||
return df.to_csv(index=False, quoting=1, quotechar='"')
|
||||
|
||||
def create_live_inventory_pricing_update_csv(self, algo: Callable = None) -> str:
|
||||
actual_algo = algo if algo is not None else self.default_pricing_algo
|
||||
df = actual_algo()
|
||||
csv = self.convert_df_to_csv(df)
|
||||
return csv
|
||||
|
||||
def create_add_to_tcgplayer_csv(self, box_id: str = None, upload_id: str = None, algo: Callable = None) -> str:
|
||||
actual_algo = algo if algo is not None else self.default_pricing_algo
|
||||
if box_id and upload_id:
|
||||
raise ValueError("Cannot specify both box_id and upload_id")
|
||||
elif not box_id and not upload_id:
|
||||
raise ValueError("Must specify either box_id or upload_id")
|
||||
elif box_id:
|
||||
logger.debug("creating df")
|
||||
df = self.get_box_with_most_recent_prices(box_id)
|
||||
elif upload_id:
|
||||
raise NotImplementedError("Not yet implemented")
|
||||
df = actual_algo(df)
|
||||
csv = self.convert_add_df_to_csv(df)
|
||||
return csv
|
@@ -5,6 +5,7 @@ from io import StringIO
|
||||
import pandas as pd
|
||||
from services.file import FileService
|
||||
from services.tcgplayer import TCGPlayerService
|
||||
from services.storage import StorageService
|
||||
from uuid import uuid4 as uuid
|
||||
import logging
|
||||
|
||||
@@ -26,10 +27,11 @@ class ManaboxRow:
|
||||
self.quantity = row['quantity']
|
||||
|
||||
class ProductService:
|
||||
def __init__(self, db: Session, file_service: FileService, tcgplayer_service: TCGPlayerService):
|
||||
def __init__(self, db: Session, file_service: FileService, tcgplayer_service: TCGPlayerService, storage_service: StorageService):
|
||||
self.db = db
|
||||
self.file_service = file_service
|
||||
self.tcgplayer_service = tcgplayer_service
|
||||
self.storage_service = storage_service
|
||||
|
||||
def _format_manabox_df(self, df: pd.DataFrame) -> pd.DataFrame:
|
||||
# format columns
|
||||
@@ -140,7 +142,9 @@ class ProductService:
|
||||
df = self._manabox_file_to_df(file)
|
||||
for index, row in df.iterrows():
|
||||
manabox_row = ManaboxRow(row)
|
||||
# create card concepts - manabox, tcgplayer, card, product
|
||||
card_manabox = self.card_manabox_lookup_create_if_not_exist(manabox_row)
|
||||
# create staged inventory with quantity for processing down the marketplace pipeline
|
||||
staged_product = self.create_staged_product(file, card_manabox, row)
|
||||
# update file status
|
||||
with db_transaction(self.db):
|
||||
@@ -148,4 +152,10 @@ class ProductService:
|
||||
except Exception as e:
|
||||
with db_transaction(self.db):
|
||||
file.status = 'error'
|
||||
raise e
|
||||
raise e
|
||||
try:
|
||||
# create storage records for physically storing individual cards
|
||||
self.storage_service.store_staged_products_for_file(file.id)
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating storage records: {str(e)}")
|
||||
raise e
|
||||
|
176
services/storage.py
Normal file
176
services/storage.py
Normal file
@@ -0,0 +1,176 @@
|
||||
from sqlalchemy.orm import Session
|
||||
from db.utils import db_transaction
|
||||
from db.models import Warehouse, User, StagedFileProduct, StorageBlock, ProductBlock, File, Card
|
||||
from uuid import uuid4 as uuid
|
||||
from typing import List, TypedDict
|
||||
|
||||
class ProductAttributes(TypedDict):
|
||||
product_id: str
|
||||
card_number: str
|
||||
|
||||
class StorageService:
|
||||
def __init__(self, db: Session):
|
||||
self.db = db
|
||||
|
||||
def get_or_create_user(self, username: str) -> User:
|
||||
user = self.db.query(User).filter(User.username == username).first()
|
||||
if user is None:
|
||||
user = User(
|
||||
id = str(uuid()),
|
||||
username = username
|
||||
)
|
||||
with db_transaction(self.db):
|
||||
self.db.add(user)
|
||||
return user
|
||||
|
||||
def get_or_create_warehouse(self) -> Warehouse:
|
||||
warehouse = self.db.query(Warehouse).first()
|
||||
user = self.get_or_create_user('admin')
|
||||
if warehouse is None:
|
||||
warehouse = Warehouse(
|
||||
id = str(uuid()),
|
||||
user_id = user.id
|
||||
)
|
||||
with db_transaction(self.db):
|
||||
self.db.add(warehouse)
|
||||
return warehouse
|
||||
|
||||
def get_staged_product(self, file_id: str) -> List[StagedFileProduct]:
|
||||
staged_product = self.db.query(StagedFileProduct).filter(StagedFileProduct.file_id == file_id).all()
|
||||
return staged_product
|
||||
|
||||
def get_storage_block_name(self, warehouse: Warehouse, file_id: str) -> str:
|
||||
# Get file type from id
|
||||
current_file = self.db.query(File).filter(File.id == file_id).first()
|
||||
if not current_file:
|
||||
raise ValueError(f"No file found with id {file_id}")
|
||||
|
||||
# Determine storage block type
|
||||
storage_block_type = 'rare' if 'rare' in current_file.type else 'common'
|
||||
prefix = storage_block_type[0]
|
||||
|
||||
# Get most recent storage block with same type and warehouse id
|
||||
latest_block = (
|
||||
self.db.query(StorageBlock)
|
||||
.filter(
|
||||
StorageBlock.warehouse_id == warehouse.id,
|
||||
StorageBlock.type == storage_block_type
|
||||
)
|
||||
.order_by(StorageBlock.date_created.desc())
|
||||
.first()
|
||||
)
|
||||
|
||||
# If no existing block, start with number 1
|
||||
if not latest_block:
|
||||
return f"{prefix}1"
|
||||
|
||||
# Start with the next number after the latest block
|
||||
number = int(latest_block.name[1:])
|
||||
|
||||
while True:
|
||||
number += 1
|
||||
new_name = f"{prefix}{number}"
|
||||
|
||||
# Check if the new name already exists
|
||||
exists = (
|
||||
self.db.query(StorageBlock)
|
||||
.filter(
|
||||
StorageBlock.warehouse_id == warehouse.id,
|
||||
StorageBlock.name == new_name
|
||||
)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not exists:
|
||||
return new_name
|
||||
|
||||
def create_storage_block(self, warehouse: Warehouse, file_id: str) -> StorageBlock:
|
||||
current_file = self.db.query(File).filter(File.id == file_id).first()
|
||||
if not current_file:
|
||||
raise ValueError(f"No file found with id {file_id}")
|
||||
|
||||
storage_block_type = 'rare' if 'rare' in current_file.type else 'common'
|
||||
|
||||
storage_block = StorageBlock(
|
||||
id = str(uuid()),
|
||||
warehouse_id = warehouse.id,
|
||||
name = self.get_storage_block_name(warehouse, file_id),
|
||||
type = storage_block_type
|
||||
)
|
||||
with db_transaction(self.db):
|
||||
self.db.add(storage_block)
|
||||
return storage_block
|
||||
|
||||
def add_staged_product_to_product_block(
|
||||
self,
|
||||
staged_product: StagedFileProduct,
|
||||
storage_block: StorageBlock,
|
||||
product_attributes: ProductAttributes,
|
||||
block_index: int
|
||||
) -> ProductBlock:
|
||||
"""Create a new ProductBlock for a single unit of a staged product."""
|
||||
product_block = ProductBlock(
|
||||
id=str(uuid()),
|
||||
product_id=staged_product.product_id,
|
||||
block_id=storage_block.id,
|
||||
block_index=block_index
|
||||
)
|
||||
|
||||
with db_transaction(self.db):
|
||||
self.db.add(product_block)
|
||||
|
||||
return product_block
|
||||
|
||||
def get_staged_product_attributes_for_storage(
|
||||
self,
|
||||
staged_product: StagedFileProduct
|
||||
) -> List[ProductAttributes]:
|
||||
"""Get attributes for each unit of a staged product."""
|
||||
result = (
|
||||
self.db.query(
|
||||
StagedFileProduct.product_id,
|
||||
StagedFileProduct.quantity,
|
||||
Card.number
|
||||
)
|
||||
.join(Card, Card.product_id == StagedFileProduct.product_id)
|
||||
.filter(StagedFileProduct.id == staged_product.id)
|
||||
.first()
|
||||
)
|
||||
|
||||
if not result:
|
||||
return []
|
||||
|
||||
return [
|
||||
ProductAttributes(
|
||||
product_id=result.product_id,
|
||||
card_number=result.number
|
||||
)
|
||||
for i in range(result.quantity)
|
||||
]
|
||||
|
||||
def store_staged_products_for_file(self, file_id: str) -> StorageBlock:
|
||||
"""Store all staged products for a file in a new storage block."""
|
||||
warehouse = self.get_or_create_warehouse()
|
||||
storage_block = self.create_storage_block(warehouse, file_id)
|
||||
staged_products = self.get_staged_product(file_id)
|
||||
|
||||
# Collect all product attributes first
|
||||
all_product_attributes = []
|
||||
for staged_product in staged_products:
|
||||
product_attributes_list = self.get_staged_product_attributes_for_storage(staged_product)
|
||||
for attrs in product_attributes_list:
|
||||
all_product_attributes.append((staged_product, attrs))
|
||||
|
||||
# Sort by card number as integer to determine block indices
|
||||
sorted_attributes = sorted(all_product_attributes, key=lambda x: int(x[1]['card_number']))
|
||||
|
||||
# Add products with correct block indices
|
||||
for block_index, (staged_product, product_attributes) in enumerate(sorted_attributes, 1):
|
||||
self.add_staged_product_to_product_block(
|
||||
staged_product=staged_product,
|
||||
storage_block=storage_block,
|
||||
product_attributes=product_attributes,
|
||||
block_index=block_index
|
||||
)
|
||||
|
||||
return storage_block
|
@@ -3,16 +3,18 @@ import logging
|
||||
from typing import Dict, Callable
|
||||
from sqlalchemy.orm import Session
|
||||
from services.product import ProductService
|
||||
from services.tcgplayer import TCGPlayerService
|
||||
from db.models import File
|
||||
|
||||
|
||||
class TaskService:
|
||||
def __init__(self, db: Session, product_service: ProductService):
|
||||
def __init__(self, db: Session, product_service: ProductService, tcgplayer_service: TCGPlayerService):
|
||||
self.scheduler = BackgroundScheduler()
|
||||
self.logger = logging.getLogger(__name__)
|
||||
self.tasks: Dict[str, Callable] = {}
|
||||
self.db = db
|
||||
self.product_service = product_service
|
||||
self.tcgplayer_service = tcgplayer_service
|
||||
|
||||
async def start(self):
|
||||
self.scheduler.start()
|
||||
@@ -27,12 +29,21 @@ class TaskService:
|
||||
minute=0,
|
||||
id='daily_report'
|
||||
)
|
||||
|
||||
self.scheduler.add_job(
|
||||
self.pricing_update,
|
||||
'cron',
|
||||
minute=28,
|
||||
id='pricing_update'
|
||||
)
|
||||
|
||||
# Tasks that should be scheduled
|
||||
async def daily_report(self):
|
||||
def daily_report(self): # Removed async
|
||||
self.logger.info("Generating daily report")
|
||||
# Daily report logic
|
||||
|
||||
|
||||
def pricing_update(self): # Removed async
|
||||
self.logger.info("Hourly pricing update")
|
||||
self.tcgplayer_service.cron_load_prices()
|
||||
|
||||
async def process_manabox_file(self, file: File):
|
||||
self.logger.info("Processing ManaBox file")
|
||||
|
@@ -1,4 +1,4 @@
|
||||
from db.models import ManaboxExportData, Box, TCGPlayerGroups, TCGPlayerInventory, TCGPlayerExportHistory, TCGPlayerPricingHistory, TCGPlayerProduct, ManaboxTCGPlayerMapping, CardManabox, CardTCGPlayer
|
||||
from db.models import ManaboxExportData, Box, TCGPlayerGroups, TCGPlayerInventory, TCGPlayerExportHistory, TCGPlayerPricingHistory, TCGPlayerProduct, ManaboxTCGPlayerMapping, CardManabox, CardTCGPlayer, Price
|
||||
import requests
|
||||
from sqlalchemy.orm import Session
|
||||
from db.utils import db_transaction
|
||||
@@ -18,6 +18,8 @@ from typing import List, Dict, Optional
|
||||
from io import StringIO, BytesIO
|
||||
from services.pricing import PricingService
|
||||
from sqlalchemy.sql import exists
|
||||
import pandas as pd
|
||||
from sqlalchemy.exc import SQLAlchemyError
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
@@ -559,6 +561,115 @@ class TCGPlayerService:
|
||||
return None
|
||||
return matching_product
|
||||
|
||||
|
||||
|
||||
def get_pricing_export_for_all_products(self) -> bytes:
|
||||
"""
|
||||
Retrieves pricing export data for all products in TCGPlayer format.
|
||||
|
||||
Returns:
|
||||
bytes: Raw CSV data containing pricing information
|
||||
"""
|
||||
try:
|
||||
all_group_ids = self.db.query(TCGPlayerGroups.group_id).all()
|
||||
all_group_ids = [str(group_id) for group_id, in all_group_ids]
|
||||
export_csv = self._get_export_csv(all_group_ids, convert=False)
|
||||
return export_csv
|
||||
except SQLAlchemyError as e:
|
||||
raise RuntimeError(f"Failed to retrieve group IDs: {str(e)}")
|
||||
|
||||
def pricing_export_to_df(self, export_csv: bytes) -> pd.DataFrame:
|
||||
"""
|
||||
Converts raw CSV pricing data to a pandas DataFrame.
|
||||
|
||||
Args:
|
||||
export_csv (bytes): Raw CSV data in bytes format
|
||||
|
||||
Returns:
|
||||
pd.DataFrame: Processed pricing data
|
||||
|
||||
Raises:
|
||||
ValueError: If no CSV data is provided or if CSV parsing fails
|
||||
"""
|
||||
if not export_csv:
|
||||
raise ValueError("No export CSV provided")
|
||||
|
||||
csv_file = None
|
||||
try:
|
||||
text_content = export_csv.decode('utf-8')
|
||||
csv_file = StringIO(text_content)
|
||||
df = pd.read_csv(csv_file)
|
||||
|
||||
if df.empty:
|
||||
raise ValueError("CSV data is empty")
|
||||
|
||||
return df
|
||||
except UnicodeDecodeError as e:
|
||||
raise ValueError(f"Failed to decode CSV data: {str(e)}")
|
||||
except pd.errors.EmptyDataError:
|
||||
raise ValueError("CSV file is empty or malformed")
|
||||
finally:
|
||||
if csv_file:
|
||||
csv_file.close()
|
||||
|
||||
def cron_load_prices(self) -> None:
|
||||
"""
|
||||
Scheduled task to load and update product prices.
|
||||
Fetches current prices, processes them, and saves new price records to the database.
|
||||
"""
|
||||
try:
|
||||
# Get and process price data
|
||||
price_csv = self.get_pricing_export_for_all_products()
|
||||
price_df = self.pricing_export_to_df(price_csv)
|
||||
|
||||
# Clean column names
|
||||
price_df.columns = price_df.columns.str.lower().str.replace(' ', '_')
|
||||
|
||||
# Get all products efficiently
|
||||
products_query = self.db.query(
|
||||
CardTCGPlayer.tcgplayer_id,
|
||||
CardTCGPlayer.product_id
|
||||
)
|
||||
product_df = pd.read_sql(products_query.statement, self.db.bind)
|
||||
|
||||
# Merge dataframes
|
||||
merged_df = pd.merge(
|
||||
price_df,
|
||||
product_df,
|
||||
on='tcgplayer_id',
|
||||
how='inner'
|
||||
)
|
||||
|
||||
# Define price columns to process
|
||||
price_columns = [
|
||||
'tcg_market_price',
|
||||
'tcg_direct_low',
|
||||
'tcg_low_price_with_shipping',
|
||||
'tcg_low_price',
|
||||
'tcg_marketplace_price'
|
||||
]
|
||||
|
||||
# Process in batches to avoid memory issues
|
||||
BATCH_SIZE = 1000
|
||||
for price_column in price_columns:
|
||||
records = []
|
||||
|
||||
for start_idx in range(0, len(merged_df), BATCH_SIZE):
|
||||
batch_df = merged_df.iloc[start_idx:start_idx + BATCH_SIZE]
|
||||
|
||||
batch_records = [
|
||||
Price(
|
||||
id=str(uuid.uuid4()),
|
||||
product_id=row['product_id'],
|
||||
type=price_column,
|
||||
price=row[price_column]
|
||||
)
|
||||
for _, row in batch_df.iterrows()
|
||||
if pd.notna(row[price_column]) # Skip rows with NaN prices
|
||||
]
|
||||
|
||||
with db_transaction(self.db):
|
||||
self.db.bulk_save_objects(batch_records)
|
||||
self.db.flush()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load prices: {str(e)}")
|
||||
raise
|
Reference in New Issue
Block a user