i think most of this works lole

This commit is contained in:
2025-04-24 23:34:13 -04:00
parent 210a033695
commit 56ba750aad
50 changed files with 154001 additions and 2606 deletions

View File

@ -1,23 +1,26 @@
import os
import json
from datetime import datetime, timedelta
from typing import Optional, List, Dict, Any, Union, Generator, Callable, AsyncGenerator
from datetime import datetime, timezone
from typing import Optional, List, Dict, Any, Union
from sqlalchemy.orm import Session
from app.models.tcgplayer_group import TCGPlayerGroup
from app.models.tcgplayer_product import TCGPlayerProduct
from app.models.tcgplayer_category import TCGPlayerCategory
from app.models.tcgplayer_products import TCGPlayerProduct, TCGPlayerCategory, TCGPlayerGroup
from app.models.inventory_management import SealedExpectedValue
from app.services.base_service import BaseService
from app.schemas.file import FileInDB
from app.db.database import transaction
from app.db.database import transaction as db_transaction
from app.schemas.transaction import PurchaseTransactionCreate, PurchaseItem
from app.contexts.inventory_item import InventoryItemContextFactory
from app.models.tcgplayer_products import MTGJSONSKU, MTGJSONCard
from app.models.tcgplayer_products import TCGPlayerPriceHistory
import csv
import io
import logging
from app.models.tcgplayer_price_history import TCGPlayerPriceHistory
from sqlalchemy import and_, bindparam, update, insert
import py7zr
import shutil
import py7zr
logger = logging.getLogger(__name__)
class DataInitializationService(BaseService):
def __init__(self):
super().__init__(None)
@ -54,7 +57,8 @@ class DataInitializationService(BaseService):
file_record = await self.file_service.get_file_by_filename(db, filename)
if file_record:
# Check if cache is expired (7 days)
cache_age = datetime.now() - file_record.created_at
# Ensure both datetimes are timezone-aware
cache_age = datetime.now(timezone.utc) - file_record.created_at
if cache_age.days < 7:
with open(file_record.path, 'r') as f:
return json.load(f)
@ -70,7 +74,7 @@ class DataInitializationService(BaseService):
batch_size = 1000 # Process in batches of 1000
total_categories = len(categories)
with transaction(db):
with db_transaction(db):
for i in range(0, total_categories, batch_size):
batch = categories[i:i + batch_size]
for category_data in batch:
@ -150,7 +154,7 @@ class DataInitializationService(BaseService):
batch_size = 1000 # Process in batches of 1000
total_groups = len(groups)
with transaction(db):
with db_transaction(db):
for i in range(0, total_groups, batch_size):
batch = groups[i:i + batch_size]
for group_data in batch:
@ -214,8 +218,6 @@ class DataInitializationService(BaseService):
async def sync_products(self, db: Session, products_data: str):
"""Sync products data to the database using streaming for large datasets"""
import csv
import io
# Parse CSV data
csv_reader = csv.DictReader(io.StringIO(products_data))
@ -223,36 +225,46 @@ class DataInitializationService(BaseService):
batch_size = 1000 # Process in batches of 1000
total_products = len(products_list)
with transaction(db):
with db_transaction(db):
for i in range(0, total_products, batch_size):
batch = products_list[i:i + batch_size]
for product_data in batch:
existing_product = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.product_id == product_data["productId"]).first()
sub_type_name = product_data.get("subTypeName") if product_data.get("subTypeName") else "other"
existing_product = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.tcgplayer_product_id == product_data["productId"]).filter(TCGPlayerProduct.sub_type_name == sub_type_name).first()
if existing_product:
# Update existing product
for key, value in {
"name": product_data["name"],
"clean_name": product_data.get("cleanName"),
"image_url": product_data.get("imageUrl"),
"sub_type_name": product_data.get("subTypeName") if product_data.get("subTypeName") else "other",
"normalized_sub_type_name": product_data.get("subTypeName").lower().replace(" ", "_") if product_data.get("subTypeName") else "other",
"category_id": product_data.get("categoryId"),
"group_id": product_data.get("groupId"),
"url": product_data.get("url"),
"modified_on": datetime.fromisoformat(product_data["modifiedOn"].replace("Z", "+00:00")) if product_data.get("modifiedOn") else None,
"image_count": product_data.get("imageCount", 0),
"ext_rarity": product_data.get("extRarity"),
"ext_subtype": product_data.get("extSubtype"),
"ext_oracle_text": product_data.get("extOracleText"),
"ext_number": product_data.get("extNumber"),
"low_price": float(product_data.get("lowPrice")) if product_data.get("lowPrice") else None,
"mid_price": float(product_data.get("midPrice")) if product_data.get("midPrice") else None,
"high_price": float(product_data.get("highPrice")) if product_data.get("highPrice") else None,
"market_price": float(product_data.get("marketPrice")) if product_data.get("marketPrice") else None,
"direct_low_price": float(product_data.get("directLowPrice")) if product_data.get("directLowPrice") else None,
"sub_type_name": product_data.get("subTypeName")
"ext_flavor_text": product_data.get("extFlavorText"),
"ext_power": product_data.get("extPower"),
"ext_toughness": product_data.get("extToughness"),
"ext_flavor_text": product_data.get("extFlavorText")
}.items():
setattr(existing_product, key, value)
else:
logger.debug(f"Creating new product: {product_data['productId']} product name: {product_data['name']}")
new_product = TCGPlayerProduct(
product_id=product_data["productId"],
tcgplayer_product_id=product_data["productId"],
name=product_data["name"],
normalized_sub_type_name=product_data.get("subTypeName").lower().replace(" ", "_") if product_data.get("subTypeName") else "other",
clean_name=product_data.get("cleanName"),
image_url=product_data.get("imageUrl"),
category_id=product_data.get("categoryId"),
@ -269,7 +281,7 @@ class DataInitializationService(BaseService):
high_price=float(product_data.get("highPrice")) if product_data.get("highPrice") else None,
market_price=float(product_data.get("marketPrice")) if product_data.get("marketPrice") else None,
direct_low_price=float(product_data.get("directLowPrice")) if product_data.get("directLowPrice") else None,
sub_type_name=product_data.get("subTypeName"),
sub_type_name=product_data.get("subTypeName") if product_data.get("subTypeName") else "other",
ext_power=product_data.get("extPower"),
ext_toughness=product_data.get("extToughness"),
ext_flavor_text=product_data.get("extFlavorText")
@ -319,50 +331,81 @@ class DataInitializationService(BaseService):
async def sync_archived_prices(self, db: Session, archived_prices_data: dict, date: datetime):
"""Sync archived prices data to the database using bulk operations.
Note: Historical prices are never updated, only new records are inserted."""
from sqlalchemy import insert
from app.models.tcgplayer_price_history import TCGPlayerPriceHistory
# Prepare data for bulk operations
price_records = []
for price_data in archived_prices_data.get("results", []):
record = {
"product_id": price_data["productId"],
"date": date,
"sub_type_name": price_data["subTypeName"],
"low_price": price_data.get("lowPrice"),
"mid_price": price_data.get("midPrice"),
"high_price": price_data.get("highPrice"),
"market_price": price_data.get("marketPrice"),
"direct_low_price": price_data.get("directLowPrice")
}
price_records.append(record)
if not price_records:
if not archived_prices_data.get("success"):
logger.error("Price data sync failed - success flag is false")
return
# Get existing records in bulk to avoid duplicates
product_ids = [r["product_id"] for r in price_records]
sub_type_names = [r["sub_type_name"] for r in price_records]
# Get existing records in bulk to avoid duplicates using a composite key
existing_records = db.query(TCGPlayerPriceHistory).filter(
TCGPlayerPriceHistory.product_id.in_(product_ids),
TCGPlayerPriceHistory.date == date,
TCGPlayerPriceHistory.sub_type_name.in_(sub_type_names)
TCGPlayerPriceHistory.date == date
).all()
# Filter out existing records
existing_keys = {(r.product_id, r.date, r.sub_type_name) for r in existing_records}
to_insert = [
record for record in price_records
if (record["product_id"], record["date"], record["sub_type_name"]) not in existing_keys
]
# Perform bulk insert for new records only
if to_insert:
stmt = insert(TCGPlayerPriceHistory)
db.execute(stmt, to_insert)
db.commit()
# Prepare batch insert data
price_history_batch = []
# Process price data in batches
for price_data in archived_prices_data.get("results", []):
try:
# Get the subtype name from the price data
sub_type_name = price_data.get("subTypeName", "other")
# First try to find product with the requested subtype
product = db.query(TCGPlayerProduct).filter(
TCGPlayerProduct.tcgplayer_product_id == price_data["productId"],
TCGPlayerProduct.sub_type_name == sub_type_name
).first()
# If not found and subtype isn't "other", try with "other" subtype
if not product and sub_type_name != "other":
product = db.query(TCGPlayerProduct).filter(
TCGPlayerProduct.tcgplayer_product_id == price_data["productId"],
TCGPlayerProduct.sub_type_name == "other"
).first()
if product:
sub_type_name = "other"
#logger.info(f"Found product {price_data['productId']} with 'other' subtype as fallback for {sub_type_name}")
if not product:
logger.warning(f"No product found for {price_data['productId']} with subtype {sub_type_name} or 'other'")
continue
# Skip if record already exists
if (product.tcgplayer_product_id, date, sub_type_name) in existing_keys:
continue
# Validate and convert price data
try:
price_history = TCGPlayerPriceHistory(
product_id=product.tcgplayer_product_id,
sub_type_name=sub_type_name,
date=date,
low_price=float(price_data.get("lowPrice")) if price_data.get("lowPrice") else None,
mid_price=float(price_data.get("midPrice")) if price_data.get("midPrice") else None,
high_price=float(price_data.get("highPrice")) if price_data.get("highPrice") else None,
market_price=float(price_data.get("marketPrice")) if price_data.get("marketPrice") else None,
direct_low_price=float(price_data.get("directLowPrice")) if price_data.get("directLowPrice") else None
)
price_history_batch.append(price_history)
except (ValueError, TypeError) as e:
logger.error(f"Invalid price data for product {price_data['productId']}: {str(e)}")
continue
# Process in batches of 1000
if len(price_history_batch) >= 1000:
with db_transaction(db):
db.bulk_save_objects(price_history_batch)
price_history_batch = []
except Exception as e:
logger.error(f"Error processing price data for product {price_data['productId']}: {str(e)}")
continue
# Process any remaining records
if price_history_batch:
with db_transaction(db):
db.bulk_save_objects(price_history_batch)
async def init_archived_prices(self, db: Session, start_date: datetime, end_date: datetime, use_cache: bool = True, game_ids: List[int] = None) -> bool:
"""Initialize archived prices data"""
@ -470,7 +513,7 @@ class DataInitializationService(BaseService):
# Get SKUs data
skus_data = await mtgjson_service.get_skus(db, use_cache)
if skus_data and "data" in skus_data:
skus_count = await self.sync_mtgjson_skus(db, list(skus_data["data"].values()))
skus_count = await self.sync_mtgjson_skus(db, skus_data)
return {
"identifiers_processed": identifiers_count,
@ -479,27 +522,20 @@ class DataInitializationService(BaseService):
async def sync_mtgjson_identifiers(self, db: Session, identifiers_data: List[dict]) -> int:
"""Sync MTGJSON identifiers data to the database"""
from app.models.mtgjson_card import MTGJSONCard
count = 0
with transaction(db):
with db_transaction(db):
for card_data in identifiers_data:
if not isinstance(card_data, dict):
logger.debug(f"Skipping non-dict item: {card_data}")
continue
card_id = card_data.get("uuid")
if not card_id:
logger.debug(f"Skipping item without UUID: {card_data}")
continue
existing_card = db.query(MTGJSONCard).filter(MTGJSONCard.card_id == card_id).first()
existing_card = db.query(MTGJSONCard).filter(MTGJSONCard.mtgjson_uuid == card_data.get("uuid")).first()
if existing_card:
# Update existing card
for key, value in {
"name": card_data.get("name"),
"set_code": card_data.get("setCode"),
"uuid": card_data.get("uuid"),
"abu_id": card_data.get("identifiers", {}).get("abuId"),
"card_kingdom_etched_id": card_data.get("identifiers", {}).get("cardKingdomEtchedId"),
"card_kingdom_foil_id": card_data.get("identifiers", {}).get("cardKingdomFoilId"),
@ -530,10 +566,9 @@ class DataInitializationService(BaseService):
setattr(existing_card, key, value)
else:
new_card = MTGJSONCard(
card_id=card_id,
mtgjson_uuid=card_data.get("uuid"),
name=card_data.get("name"),
set_code=card_data.get("setCode"),
uuid=card_data.get("uuid"),
abu_id=card_data.get("identifiers", {}).get("abuId"),
card_kingdom_etched_id=card_data.get("identifiers", {}).get("cardKingdomEtchedId"),
card_kingdom_foil_id=card_data.get("identifiers", {}).get("cardKingdomFoilId"),
@ -566,38 +601,35 @@ class DataInitializationService(BaseService):
return count
async def sync_mtgjson_skus(self, db: Session, skus_data: List[List[dict]]) -> int:
async def sync_mtgjson_skus(self, db: Session, skus_data: dict) -> int:
"""Sync MTGJSON SKUs data to the database"""
from app.models.mtgjson_sku import MTGJSONSKU
count = 0
with transaction(db):
for product_data in skus_data:
for sku_data in product_data:
sku_id = sku_data.get("skuId")
if not sku_id:
logger.debug(f"Skipping item without SKU ID: {sku_data}")
continue
existing_sku = db.query(MTGJSONSKU).filter(MTGJSONSKU.sku_id == str(sku_id)).first()
if existing_sku:
with db_transaction(db):
for mtgjson_uuid, product_data in skus_data['data'].items():
for sku_data in product_data:
existing_record = db.query(MTGJSONSKU).filter(MTGJSONSKU.mtgjson_uuid == mtgjson_uuid).filter(MTGJSONSKU.tcgplayer_sku_id == sku_data.get("skuId")).first()
if existing_record:
# Update existing SKU
for key, value in {
"product_id": sku_data.get("productId"),
"tcgplayer_product_id": sku_data.get("productId"),
"condition": sku_data.get("condition"),
"finish": sku_data.get("finish"),
"language": sku_data.get("language"),
"printing": sku_data.get("printing"),
"normalized_printing": sku_data.get("printing").lower().replace(" ", "_") if sku_data.get("printing") else None
}.items():
setattr(existing_sku, key, value)
setattr(existing_record, key, value)
else:
new_sku = MTGJSONSKU(
sku_id=sku_id,
product_id=sku_data.get("productId"),
mtgjson_uuid=mtgjson_uuid,
tcgplayer_sku_id=sku_data.get("skuId"),
tcgplayer_product_id=sku_data.get("productId"),
condition=sku_data.get("condition"),
finish=sku_data.get("finish"),
language=sku_data.get("language"),
printing=sku_data.get("printing"),
normalized_printing=sku_data.get("printing").lower().replace(" ", "_") if sku_data.get("printing") else None
)
db.add(new_sku)
count += 1
@ -654,4 +686,62 @@ class DataInitializationService(BaseService):
if file.path.startswith(subdir):
await self.file_service.delete_file(db, file.id)
await self.mtgjson_service.clear_cache()
print("Cache cleared")
print("Cache cleared")
async def initialize_inventory_data(self, db: Session) -> None:
"""Initialize inventory data"""
with db_transaction(db):
logger.info("Initializing inventory data...")
# set expected value
product_id1 = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.tcgplayer_sku == "562118").first().id
expected_value_box = SealedExpectedValue(
product_id=product_id1,
expected_value=120.69
)
db.add(expected_value_box)
db.flush()
product_id2 = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.tcgplayer_sku == "562119").first().id
expected_value_case = SealedExpectedValue(
product_id=product_id2,
expected_value=820.69
)
db.add(expected_value_case)
db.flush()
inventory_service = self.get_service("inventory")
customer = await inventory_service.create_customer(db, "Bob Smith")
vendor = await inventory_service.create_vendor(db, "Joe Blow")
marketplace = await inventory_service.create_marketplace(db, "Tcgplayer")
transaction = await inventory_service.create_purchase_transaction(db, PurchaseTransactionCreate(
vendor_id=vendor.id,
transaction_date=datetime.now(),
items=[PurchaseItem(product_id=product_id1, unit_price=100.69, quantity=1, is_case=False),
PurchaseItem(product_id=product_id2, unit_price=800.01, quantity=2, is_case=True, num_boxes=6)],
transaction_notes="Test Transaction: 1 case and 2 boxes of foundations"
))
logger.info(f"Transaction created: {transaction}")
case_num = 0
for item in transaction.transaction_items:
item = InventoryItemContextFactory(db).get_context(item.physical_item.inventory_item)
logger.info(f"Item: {item}")
if item.physical_item.item_type == "sealed_box":
manabox_service = self.get_service("manabox")
file_path = 'app/data/test_data/manabox_test_file.csv'
file_bytes = open(file_path, 'rb').read()
manabox_file = await manabox_service.process_manabox_csv(db, file_bytes, {"source": "test", "description": "test"}, wait=True)
# Ensure manabox_file is a list before passing it
if not isinstance(manabox_file, list):
manabox_file = [manabox_file]
sealed_box_service = self.get_service("sealed_box")
sealed_box = sealed_box_service.get(db, item.physical_item.inventory_item.id)
success = await inventory_service.process_manabox_import_staging(db, manabox_file, sealed_box)
logger.info(f"sealed box opening success: {success}")
elif item.physical_item.item_type == "sealed_case":
if case_num == 0:
logger.info(f"sealed case {case_num} opening...")
sealed_case_service = self.get_service("sealed_case")
success = await sealed_case_service.open_sealed_case(db, item.physical_item)
logger.info(f"sealed case {case_num} opening success: {success}")
case_num += 1
logger.info("Inventory data initialized")

View File

@ -93,8 +93,10 @@ class MTGJSONService(BaseExternalService):
subdir="identifiers"
)
with open(file_record.path, 'r') as f:
logger.debug(f"Loaded identifiers from MTGJSON: {file_record.path}")
json_file = await self._unzip_file(file_record, "identifiers", db)
with open(json_file.path, 'r') as f:
logger.debug(f"Loaded identifiers from MTGJSON: {json_file.path}")
return json.load(f)
async def get_skus(self, db: Session, use_cache: bool = True) -> Dict[str, Any]:
@ -115,8 +117,10 @@ class MTGJSONService(BaseExternalService):
subdir="skus"
)
with open(file_record.path, 'r') as f:
logger.debug(f"Loaded SKUs from MTGJSON: {file_record.path}")
json_file = await self._unzip_file(file_record, "skus", db)
with open(json_file.path, 'r') as f:
logger.debug(f"Loaded SKUs from MTGJSON: {json_file.path}")
return json.load(f)
async def clear_cache(self, db: Session) -> None:

View File

@ -16,6 +16,7 @@ from app.models.tcgplayer_order import (
TCGPlayerOrderProduct,
TCGPlayerOrderRefund
)
from app.models.tcgplayer_products import TCGPlayerProduct
from sqlalchemy.orm import Session
from app.db.database import transaction
import os
@ -190,8 +191,7 @@ class OrderManagementService(BaseTCGPlayerService):
direct_fee_amount=api_order.transaction.directFeeAmount,
taxes=[{"code": t.code, "amount": t.amount} for t in api_order.transaction.taxes]
)
# Create products
# Create products
db_products = [
TCGPlayerOrderProductCreate(
order_number=api_order.orderNumber,
@ -376,8 +376,8 @@ class OrderManagementService(BaseTCGPlayerService):
('extended_price', 'extendedPrice'),
('quantity', 'quantity'),
('url', 'url'),
('product_id', 'productId'),
('sku_id', 'skuId')
('tcgplayer_product_id', 'productId'),
('tcgplayer_sku_id', 'skuId')
]
for db_field, api_field in product_fields_to_compare:

View File

@ -6,8 +6,6 @@ import json
from datetime import datetime
from sqlalchemy.orm import Session
from app.db.database import transaction
from app.models.tcgplayer_inventory import TCGPlayerInventory
from app.models.tcgplayer_product import TCGPlayerProduct
from app.services.inventory_service import InventoryService
class FileProcessingService:

View File

@ -120,7 +120,7 @@ class FileService:
"""List files with optional filtering"""
query = db.query(File)
if file_type:
query = query.filter(File.type == file_type)
query = query.filter(File.type == file_type).order_by(File.created_at.desc())
files = query.offset(skip).limit(limit).all()
return [FileInDB.model_validate(file) for file in files]

View File

@ -1,63 +1,418 @@
from typing import List, Optional, Dict
from typing import List, Optional, Dict, TypedDict
from sqlalchemy.orm import Session
from app.models.tcgplayer_inventory import TCGPlayerInventory
from app.services.base_service import BaseService
from app.models.manabox_import_staging import ManaboxImportStaging
from app.contexts.inventory_item import InventoryItemContextFactory
from app.models.inventory_management import (
SealedBox, OpenEvent, OpenBox, OpenCard, InventoryItem, SealedCase,
Transaction, TransactionItem, Customer, Vendor, Marketplace
)
from app.schemas.file import FileInDB
from app.schemas.transaction import PurchaseTransactionCreate, SaleTransactionCreate, TransactionResponse
from app.db.database import transaction as db_transaction
from datetime import datetime
from typing import Any
import logging
class InventoryService(BaseService[TCGPlayerInventory]):
logger = logging.getLogger(__name__)
class InventoryService(BaseService):
def __init__(self):
super().__init__(TCGPlayerInventory)
super().__init__(None)
async def process_manabox_import_staging(self, db: Session, manabox_file_uploads: List[FileInDB], sealed_box: SealedBox) -> bool:
try:
with db_transaction(db):
# Check if box is already opened
existing_open_event = db.query(OpenEvent).filter(
OpenEvent.sealed_box_id == sealed_box.id,
OpenEvent.deleted_at.is_(None)
).first()
def create(self, db: Session, obj_in: Dict) -> TCGPlayerInventory:
"""
Create a new inventory item in the database.
Args:
db: Database session
obj_in: Dictionary containing inventory data
Returns:
Inventory: The created inventory object
"""
return super().create(db, obj_in)
if existing_open_event:
raise ValueError(f"Box {sealed_box.id} has already been opened")
def update(self, db: Session, db_obj: TCGPlayerInventory, obj_in: Dict) -> TCGPlayerInventory:
"""
Update an existing inventory item in the database.
Args:
db: Database session
db_obj: The inventory object to update
obj_in: Dictionary containing updated inventory data
Returns:
Inventory: The updated inventory object
"""
return super().update(db, db_obj, obj_in)
# 1. Get the InventoryItemContext for the sealed box
inventory_item_context = InventoryItemContextFactory(db).get_context(sealed_box.inventory_item)
# 2. Create the OpenEvent
open_event = OpenEvent(
sealed_box_id=sealed_box.id,
open_date=datetime.now(),
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(open_event)
db.flush() # Get the ID for relationships
# 3. Create the OpenBox from the SealedBox
open_box = OpenBox(
open_event_id=open_event.id,
product_id=sealed_box.product_id,
sealed_box_id=sealed_box.id,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(open_box)
# 4. Process each card from the CSV
total_market_value = 0
cards = []
manabox_file_upload_ids = [manabox_file_upload.id for manabox_file_upload in manabox_file_uploads]
def get_by_tcgplayer_id(self, db: Session, tcgplayer_id: str) -> Optional[TCGPlayerInventory]:
"""
Get an inventory item by its TCGPlayer ID.
Args:
db: Database session
tcgplayer_id: The TCGPlayer ID to find
Returns:
Optional[TCGPlayerInventory]: The inventory item if found, None otherwise
"""
return db.query(self.model).filter(self.model.tcgplayer_id == tcgplayer_id).first()
staging_data = db.query(ManaboxImportStaging).filter(ManaboxImportStaging.file_id.in_(manabox_file_upload_ids)).all()
for record in staging_data:
for i in range(record.quantity):
# Create the OpenCard
open_card = OpenCard(
product_id=record.product_id,
open_event_id=open_event.id,
box_id=open_box.id,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(open_card)
# Create the InventoryItem for the card
card_inventory_item = InventoryItem(
physical_item=open_card,
cost_basis=0, # Will be calculated later
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(card_inventory_item)
# Get the market value for cost basis distribution
card_context = InventoryItemContextFactory(db).get_context(card_inventory_item)
market_value = card_context.market_price
logger.debug(f"market_value: {market_value}")
total_market_value += market_value
cards.append((open_card, card_inventory_item, market_value))
# 5. Distribute the cost basis
original_cost_basis = inventory_item_context.cost_basis
for open_card, card_inventory_item, market_value in cards:
# Calculate this card's share of the cost basis
logger.debug(f"market_value: {market_value}, total_market_value: {total_market_value}, original_cost_basis: {original_cost_basis}")
cost_basis_share = (market_value / total_market_value) * original_cost_basis
card_inventory_item.cost_basis = cost_basis_share
return True
except Exception as e:
raise e
def get_by_set(self, db: Session, set_name: str, skip: int = 0, limit: int = 100) -> List[TCGPlayerInventory]:
async def create_purchase_transaction(
self,
db: Session,
transaction_data: PurchaseTransactionCreate
) -> Transaction:
"""
Get all inventory items from a specific set.
Creates a purchase transaction from a vendor.
For each item:
1. Creates a PhysicalItem (SealedCase/SealedBox)
2. Creates an InventoryItem with the purchase price as cost basis
3. Creates TransactionItems linking the purchase to the items
"""
try:
with db_transaction(db):
# Create the transaction
transaction = Transaction(
vendor_id=transaction_data.vendor_id,
transaction_type='purchase',
transaction_date=transaction_data.transaction_date,
transaction_notes=transaction_data.transaction_notes,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(transaction)
db.flush()
total_amount = 0
physical_items = []
for item in transaction_data.items:
# Create the physical item based on type
# TODO: remove is_case and num_boxes, should derive from product_id
# TODO: add support for purchasing single cards
if item.is_case:
for i in range(item.quantity):
physical_item = await SealedCaseService().create_sealed_case(
db=db,
product_id=item.product_id,
cost_basis=item.unit_price,
num_boxes=item.num_boxes or 1
)
physical_items.append(physical_item)
else:
for i in range(item.quantity):
physical_item = await SealedBoxService().create_sealed_box(
db=db,
product_id=item.product_id,
cost_basis=item.unit_price
)
physical_items.append(physical_item)
for physical_item in physical_items:
# Create transaction item
transaction_item = TransactionItem(
transaction_id=transaction.id,
physical_item_id=physical_item.id,
unit_price=item.unit_price,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(transaction_item)
total_amount += item.unit_price
# Update transaction total
transaction.transaction_total_amount = total_amount
return transaction
except Exception as e:
raise e
async def create_sale_transaction(
self,
db: Session,
transaction_data: SaleTransactionCreate
) -> Transaction:
"""
this is basically psuedocode not implemented yet
"""
try:
with db_transaction(db):
# Create the transaction
transaction = Transaction(
customer_id=transaction_data.customer_id,
marketplace_id=transaction_data.marketplace_id,
transaction_type='sale',
transaction_date=transaction_data.transaction_date,
transaction_notes=transaction_data.transaction_notes,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(transaction)
db.flush()
total_amount = 0
for item in transaction_data.items:
# Get the inventory item and validate
inventory_item = db.query(InventoryItem).filter(
InventoryItem.id == item.inventory_item_id,
InventoryItem.deleted_at.is_(None)
).first()
if not inventory_item:
raise ValueError(f"Inventory item {item.inventory_item_id} not found")
# Create transaction item
transaction_item = TransactionItem(
transaction_id=transaction.id,
physical_item_id=inventory_item.physical_item_id,
unit_price=item.unit_price,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(transaction_item)
total_amount += item.unit_price
# Update marketplace listing if applicable
if transaction_data.marketplace_id and inventory_item.marketplace_listings:
listing = inventory_item.marketplace_listings
listing.delisting_date = transaction_data.transaction_date
listing.updated_at = datetime.now()
# Update transaction total
transaction.transaction_total_amount = total_amount
return transaction
except Exception as e:
raise e
Args:
db: Database session
set_name: The name of the set to filter by
skip: Number of records to skip (for pagination)
limit: Maximum number of records to return
Returns:
List[TCGPlayerInventory]: List of inventory items from the specified set
"""
return db.query(self.model).filter(self.model.set_name == set_name).offset(skip).limit(limit).all()
async def create_customer(
self,
db: Session,
customer_name: str
) -> Customer:
try:
# check if customer already exists
existing_customer = db.query(Customer).filter(Customer.name == customer_name).first()
if existing_customer:
return existing_customer
with db_transaction(db):
customer = Customer(
name=customer_name,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(customer)
db.flush()
return customer
except Exception as e:
raise e
async def create_vendor(
self,
db: Session,
vendor_name: str
) -> Vendor:
try:
# check if vendor already exists
existing_vendor = db.query(Vendor).filter(Vendor.name == vendor_name).first()
if existing_vendor:
return existing_vendor
with db_transaction(db):
vendor = Vendor(
name=vendor_name,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(vendor)
db.flush()
return vendor
except Exception as e:
raise e
async def create_marketplace(
self,
db: Session,
marketplace_name: str
) -> Marketplace:
try:
# check if marketplace already exists
existing_marketplace = db.query(Marketplace).filter(Marketplace.name == marketplace_name).first()
if existing_marketplace:
return existing_marketplace
with db_transaction(db):
marketplace = Marketplace(
name=marketplace_name,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(marketplace)
db.flush()
return marketplace
except Exception as e:
raise e
class SealedBoxService(BaseService[SealedBox]):
def __init__(self):
super().__init__(SealedBox)
async def create_sealed_box(
self,
db: Session,
product_id: int,
cost_basis: float,
case_id: Optional[int] = None
) -> SealedBox:
try:
with db_transaction(db):
# Create the SealedBox
sealed_box = SealedBox(
product_id=product_id,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(sealed_box)
db.flush() # Get the ID for relationships
# If this box is part of a case, link it
if case_id:
case = db.query(SealedCase).filter(SealedCase.id == case_id).first()
if not case:
raise ValueError(f"Case {case_id} not found")
sealed_box.case_id = case_id
# Create the InventoryItem for the sealed box
inventory_item = InventoryItem(
physical_item=sealed_box,
cost_basis=cost_basis,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(inventory_item)
return sealed_box
except Exception as e:
raise e
class SealedCaseService(BaseService[SealedCase]):
def __init__(self):
super().__init__(SealedCase)
async def create_sealed_case(self, db: Session, product_id: int, cost_basis: float, num_boxes: int) -> SealedCase:
try:
with db_transaction(db):
# Create the SealedCase
sealed_case = SealedCase(
product_id=product_id,
num_boxes=num_boxes,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(sealed_case)
db.flush() # Get the ID for relationships
# Create the InventoryItem for the sealed case
inventory_item = InventoryItem(
physical_item=sealed_case,
cost_basis=cost_basis,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(inventory_item)
return sealed_case
except Exception as e:
raise e
async def open_sealed_case(self, db: Session, sealed_case: SealedCase) -> bool:
try:
sealed_case_context = InventoryItemContextFactory(db).get_context(sealed_case.inventory_item)
with db_transaction(db):
# Create the OpenEvent
open_event = OpenEvent(
sealed_case_id=sealed_case_context.physical_item.id,
open_date=datetime.now(),
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(open_event)
db.flush() # Get the ID for relationships
# Create num_boxes SealedBoxes
for i in range(sealed_case.num_boxes):
sealed_box = SealedBox(
product_id=sealed_case_context.physical_item.product_id,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(sealed_box)
db.flush() # Get the ID for relationships
# Create the InventoryItem for the sealed box
inventory_item = InventoryItem(
physical_item=sealed_box,
cost_basis=sealed_case_context.cost_basis,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(inventory_item)
return True
except Exception as e:
raise e

View File

@ -2,23 +2,22 @@ from app.services.base_service import BaseService
from sqlalchemy.orm import Session
from app.db.database import transaction
from app.schemas.file import FileInDB
from app.models.mtgjson_card import MTGJSONCard
from app.models.mtgjson_sku import MTGJSONSKU
from app.models.tcgplayer_inventory import TCGPlayerInventory
from app.models.tcgplayer_product import TCGPlayerProduct
from app.models.tcgplayer_category import TCGPlayerCategory
from app.models.tcgplayer_group import TCGPlayerGroup
from typing import Dict, Any
from app.models.tcgplayer_products import TCGPlayerProduct, MTGJSONCard, MTGJSONSKU
from app.models.critical_error_log import CriticalErrorLog
from app.models.manabox_import_staging import ManaboxImportStaging
from typing import Dict, Any, Union, List
import csv
import logging
from datetime import datetime
import asyncio
logger = logging.getLogger(__name__)
class ManaboxService(BaseService):
def __init__(self):
super().__init__(None)
async def process_manabox_csv(self, db: Session, bytes: bytes) -> bool:
async def process_manabox_csv(self, db: Session, bytes: bytes, metadata: Dict[str, Any], wait: bool = False) -> Union[bool, List[FileInDB]]:
# save file
file = await self.file_service.save_file(
db=db,
@ -26,77 +25,88 @@ class ManaboxService(BaseService):
filename=f"manabox_{datetime.now().strftime('%Y%m%d%H%M%S')}.csv",
subdir="manabox",
file_type="manabox",
content_type="text/csv"
content_type="text/csv",
metadata=metadata
)
# Read the CSV file
with open(file.path, 'r') as file:
reader = csv.DictReader(file)
# validate headers
if reader.fieldnames != ['Name', 'Set code', 'Set name', 'Collector number', 'Foil', 'Rarity', 'Quantity', 'ManaBox ID', 'Scryfall ID', 'Purchase price', 'Misprint', 'Altered', 'Condition', 'Language', 'Purchase price currency']:
logger.error("Invalid headers")
return False
# skip header row
next(reader)
for row in reader:
# match scryfall id to mtgjson scryfall id, make sure only one distinct tcgplayer id
mtg_json = db.query(MTGJSONCard).filter(MTGJSONCard.scryfall_id == row['Scryfall ID']).all()
# count distinct tcgplayer ids
cd_tcgplayer_ids = db.query(MTGJSONCard.tcgplayer_product_id).filter(MTGJSONCard.scryfall_id == row['Scryfall ID']).distinct().count()
if cd_tcgplayer_ids != 1:
logger.error(f"Error: {cd_tcgplayer_ids} TCGplayer IDs found for {row['Scryfall ID']}")
return False
else:
mtg_json = mtg_json[0]
# get tcgplayer sku id from mtgjson skus
language = 'ENGLISH' if row['Language'] == 'en' else 'JAPANESE' if row['Language'] == 'ja' else None
if row['Foil'].lower() == 'etched':
printing = 'FOIL'
tcgplayer_sku = db.query(MTGJSONSKU.sku_id).filter(MTGJSONSKU.product_id == mtg_json.tcgplayer_etched_product_id).filter(MTGJSONSKU.condition == row['Condition'].replace('_', ' ').upper()).filter(MTGJSONSKU.printing == printing).filter(MTGJSONSKU.language == language).all().distinct()
else:
printing = 'FOIL' if row['Foil'].lower() == 'foil' else 'NON FOIL'
tcgplayer_sku = db.query(MTGJSONSKU.sku_id).filter(MTGJSONSKU.product_id == mtg_json.tcgplayer_product_id).filter(MTGJSONSKU.condition == row['Condition'].replace('_', ' ').upper()).filter(MTGJSONSKU.printing == printing).filter(MTGJSONSKU.language == language).all().distinct()
# count distinct tcgplayer skus
if len(tcgplayer_sku) == 0:
logger.error(f"Error: No TCGplayer SKU found for {mtg_json.name} {row['Condition']}")
logger.debug(row)
logger.debug(language)
logger.debug(row['Condition'].replace('_', ' ').upper())
logger.debug(mtg_json.tcgplayer_product_id)
logger.debug(printing)
return False
elif len(tcgplayer_sku) > 1:
logger.error(f"Error: {len(tcgplayer_sku)} TCGplayer SKUs found for {mtg_json.name} {row['Condition']}")
return False
else:
tcgplayer_sku = tcgplayer_sku[0]
# look up tcgplayer product data for sku
tcgplayer_product = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.product_id == tcgplayer_sku.product_id).first()
# temp just dump into tcgplayer inventory
condition = f'{tcgplayer_sku.condition.title()} Foil' if 'Foil' in tcgplayer_product.sub_type_name else f'{tcgplayer_sku.condition.title()}'
# join tcgplaeyer product on tcgplayer category on category_id and get name
product_line = db.query(TCGPlayerCategory).filter(TCGPlayerCategory.category_id == tcgplayer_product.category_id).first().name
# join tcgplaeyer product on tcgplayer group on group_id and get name
set_name = db.query(TCGPlayerGroup).filter(TCGPlayerGroup.group_id == tcgplayer_product.group_id).first().name
with transaction(db):
tcgplayer_inventory = TCGPlayerInventory(
tcgplayer_id=tcgplayer_sku.sku_id,
product_line=product_line,
set_name=set_name,
product_name=tcgplayer_product.name,
title=None,
number=tcgplayer_product.ext_number,
rarity=tcgplayer_product.ext_rarity,
condition=condition,
tcg_market_price=tcgplayer_product.market_price,
tcg_direct_low=tcgplayer_product.direct_low_price,
tcg_low_price_with_shipping=tcgplayer_product.low_price,
tcg_low_price=tcgplayer_product.low_price,
total_quantity=row['Quantity'],
add_to_quantity=row['Quantity']
)
db.add(tcgplayer_inventory)
# Create the background task
task = asyncio.create_task(self._process_file_background(db, file))
# If wait is True, wait for the task to complete and return the file
if wait:
await task
return_value = await self.file_service.get_file(db, file.id)
return [return_value] if return_value else []
return True
# Name,Set code,Set name,Collector number,Foil,Rarity,Quantity,ManaBox ID,Scryfall ID,Purchase price,Misprint,Altered,Condition,Language,Purchase price currency
async def _process_file_background(self, db: Session, file: FileInDB):
try:
# Read the CSV file
with open(file.path, 'r') as csv_file:
reader = csv.DictReader(csv_file)
# skip header row
next(reader)
for row in reader:
# match scryfall id to mtgjson scryfall id, make sure only one distinct tcgplayer id
mtg_json = db.query(MTGJSONCard).filter(MTGJSONCard.scryfall_id == row['Scryfall ID']).all()
# count distinct tcgplayer ids
cd_tcgplayer_ids = db.query(MTGJSONCard.tcgplayer_sku_id).filter(MTGJSONCard.scryfall_id == row['Scryfall ID']).distinct().count()
if cd_tcgplayer_ids != 1:
logger.error(f"Error: multiple TCGplayer IDs found for scryfall id: {row['Scryfall ID']} found {cd_tcgplayer_ids} ids expected 1")
with transaction(db):
critical_error_log = CriticalErrorLog(
error_message=f"Error: multiple TCGplayer IDs found for scryfall id: {row['Scryfall ID']} found {cd_tcgplayer_ids} ids expected 1"
)
db.add(critical_error_log)
continue
else:
mtg_json = mtg_json[0]
# get tcgplayer sku id from mtgjson skus
language = 'ENGLISH' if row['Language'] == 'en' else 'JAPANESE' if row['Language'] == 'ja' else None
if row['Foil'].lower() == 'etched':
printing = 'FOIL'
tcgplayer_sku = db.query(MTGJSONSKU).filter(MTGJSONSKU.tcgplayer_sku_id == mtg_json.tcgplayer_etched_sku_id).filter(MTGJSONSKU.condition == row['Condition'].replace('_', ' ').upper()).filter(MTGJSONSKU.printing == printing).filter(MTGJSONSKU.language == language).distinct().all()
else:
printing = 'FOIL' if row['Foil'].lower() == 'foil' else 'NON FOIL'
tcgplayer_sku = db.query(MTGJSONSKU).filter(MTGJSONSKU.tcgplayer_sku_id == mtg_json.tcgplayer_sku_id).filter(MTGJSONSKU.condition == row['Condition'].replace('_', ' ').upper()).filter(MTGJSONSKU.printing == printing).filter(MTGJSONSKU.language == language).distinct().all()
# count distinct tcgplayer skus
if len(tcgplayer_sku) == 0:
logger.error(f"Error: No TCGplayer SKU found for mtgjson name: {mtg_json.name} condition: {row['Condition']} language: {language} printing: {printing}")
with transaction(db):
critical_error_log = CriticalErrorLog(
error_message=f"Error: No TCGplayer SKU found for mtgjson name: {mtg_json.name} condition: {row['Condition']} language: {language} printing: {printing}"
)
db.add(critical_error_log)
continue
elif len(tcgplayer_sku) > 1:
logger.error(f"Error: {len(tcgplayer_sku)} TCGplayer SKUs found for mtgjson name: {mtg_json.name} condition: {row['Condition']} language: {language} printing: {printing}")
with transaction(db):
critical_error_log = CriticalErrorLog(
error_message=f"Error: {len(tcgplayer_sku)} TCGplayer SKUs found for mtgjson name: {mtg_json.name} condition: {row['Condition']} language: {language} printing: {printing}"
)
db.add(critical_error_log)
continue
else:
tcgplayer_sku = tcgplayer_sku[0]
# look up tcgplayer product data for sku
tcgplayer_product = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.tcgplayer_product_id == tcgplayer_sku.tcgplayer_product_id).filter(TCGPlayerProduct.condition == row['Condition'].replace('_', ' ').upper()).filter(TCGPlayerProduct.language == language).filter(TCGPlayerProduct.printing == printing).first()
quantity = int(row['Quantity'])
with transaction(db):
manabox_import_staging = ManaboxImportStaging(
file_id=file.id,
product_id=tcgplayer_product.id,
quantity=quantity,
created_at=datetime.now(),
updated_at=datetime.now()
)
db.add(manabox_import_staging)
except Exception as e:
logger.error(f"Error processing file: {str(e)}")
with transaction(db):
critical_error_log = CriticalErrorLog(
error_message=f"Error processing file: {str(e)}"
)
db.add(critical_error_log)

View File

@ -29,7 +29,11 @@ class ServiceManager:
'file': 'app.services.file_service.FileService',
'tcgcsv': 'app.services.external_api.tcgcsv.tcgcsv_service.TCGCSVService',
'mtgjson': 'app.services.external_api.mtgjson.mtgjson_service.MTGJSONService',
'manabox': 'app.services.manabox_service.ManaboxService'
'manabox': 'app.services.manabox_service.ManaboxService',
'inventory': 'app.services.inventory_service.InventoryService',
'sealed_box': 'app.services.inventory_service.SealedBoxService',
'sealed_case': 'app.services.inventory_service.SealedCaseService'
}
self._service_configs = {
'label_printer': {'printer_api_url': "http://192.168.1.110:8000"},

View File

@ -10,7 +10,7 @@ import aiohttp
import jinja2
from weasyprint import HTML
from app.services.base_service import BaseService
from app.models.tcgplayer_group import TCGPlayerGroup
from app.models.tcgplayer_products import TCGPlayerProduct
log = logging.getLogger(__name__)