This commit is contained in:
2025-11-26 17:21:20 -05:00
parent 7b17684423
commit e06dcc3dee
15 changed files with 602 additions and 54 deletions

View File

@@ -0,0 +1,128 @@
"""make_tcgplayer_and_mtgjson_constraints_deferrable
Revision ID: 49535e0cd103
Revises: 236605bcac6e
Create Date: 2025-11-25 16:36:13.333042
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '49535e0cd103'
down_revision: Union[str, None] = '236605bcac6e'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# Drop existing foreign key constraints
op.drop_constraint('mtgjson_cards_mtgjson_uuid_fkey', 'mtgjson_cards', type_='foreignkey')
op.drop_constraint('tcgplayer_groups_category_id_fkey', 'tcgplayer_groups', type_='foreignkey')
op.drop_constraint('tcgplayer_products_category_id_fkey', 'tcgplayer_products', type_='foreignkey')
op.drop_constraint('tcgplayer_products_group_id_fkey', 'tcgplayer_products', type_='foreignkey')
op.drop_constraint('fk_sku_to_product_composite', 'mtgjson_skus', type_='foreignkey')
op.drop_constraint('mtgjson_skus_mtgjson_uuid_fkey', 'mtgjson_skus', type_='foreignkey')
# Recreate foreign key constraints as DEFERRABLE INITIALLY DEFERRED
op.create_foreign_key(
'mtgjson_cards_mtgjson_uuid_fkey',
'mtgjson_cards', 'mtgjson_cards',
['mtgjson_uuid'], ['mtgjson_uuid'],
deferrable=True, initially='DEFERRED'
)
op.create_foreign_key(
'tcgplayer_groups_category_id_fkey',
'tcgplayer_groups', 'tcgplayer_categories',
['category_id'], ['category_id'],
deferrable=True, initially='DEFERRED'
)
op.create_foreign_key(
'tcgplayer_products_category_id_fkey',
'tcgplayer_products', 'tcgplayer_categories',
['category_id'], ['category_id'],
deferrable=True, initially='DEFERRED'
)
op.create_foreign_key(
'tcgplayer_products_group_id_fkey',
'tcgplayer_products', 'tcgplayer_groups',
['group_id'], ['group_id'],
deferrable=True, initially='DEFERRED'
)
# Composite foreign key
op.create_foreign_key(
'fk_sku_to_product_composite',
'mtgjson_skus', 'tcgplayer_products',
['tcgplayer_product_id', 'normalized_printing'],
['tcgplayer_product_id', 'normalized_sub_type_name'],
deferrable=True, initially='DEFERRED'
)
op.create_foreign_key(
'mtgjson_skus_mtgjson_uuid_fkey',
'mtgjson_skus', 'mtgjson_cards',
['mtgjson_uuid'], ['mtgjson_uuid'],
deferrable=True, initially='DEFERRED'
)
def downgrade() -> None:
"""Downgrade schema."""
# Drop deferrable constraints
op.drop_constraint('mtgjson_cards_mtgjson_uuid_fkey', 'mtgjson_cards', type_='foreignkey')
op.drop_constraint('tcgplayer_groups_category_id_fkey', 'tcgplayer_groups', type_='foreignkey')
op.drop_constraint('tcgplayer_products_category_id_fkey', 'tcgplayer_products', type_='foreignkey')
op.drop_constraint('tcgplayer_products_group_id_fkey', 'tcgplayer_products', type_='foreignkey')
op.drop_constraint('fk_sku_to_product_composite', 'mtgjson_skus', type_='foreignkey')
op.drop_constraint('mtgjson_skus_mtgjson_uuid_fkey', 'mtgjson_skus', type_='foreignkey')
# Recreate original constraints (some were DEFERRABLE, some weren't)
op.create_foreign_key(
'mtgjson_cards_mtgjson_uuid_fkey',
'mtgjson_cards', 'mtgjson_cards',
['mtgjson_uuid'], ['mtgjson_uuid'],
deferrable=True
)
op.create_foreign_key(
'tcgplayer_groups_category_id_fkey',
'tcgplayer_groups', 'tcgplayer_categories',
['category_id'], ['category_id'],
deferrable=True
)
op.create_foreign_key(
'tcgplayer_products_category_id_fkey',
'tcgplayer_products', 'tcgplayer_categories',
['category_id'], ['category_id'],
deferrable=True
)
op.create_foreign_key(
'tcgplayer_products_group_id_fkey',
'tcgplayer_products', 'tcgplayer_groups',
['group_id'], ['group_id'],
deferrable=True
)
# Composite foreign key - originally NOT deferrable
op.create_foreign_key(
'fk_sku_to_product_composite',
'mtgjson_skus', 'tcgplayer_products',
['tcgplayer_product_id', 'normalized_printing'],
['tcgplayer_product_id', 'normalized_sub_type_name']
)
op.create_foreign_key(
'mtgjson_skus_mtgjson_uuid_fkey',
'mtgjson_skus', 'mtgjson_cards',
['mtgjson_uuid'], ['mtgjson_uuid']
)

View File

@@ -0,0 +1,42 @@
"""add inventory locations table
Revision ID: add_inventory_locations
Revises: c4a9e19ce012
Create Date: 2025-11-26 13:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'add_inventory_locations'
down_revision: Union[str, None] = 'c4a9e19ce012'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
op.create_table('inventory_locations',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('sku_id', sa.Integer(), nullable=False),
sa.Column('quantity', sa.Integer(), nullable=False, server_default='0'),
sa.Column('location', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_inventory_locations_id'), 'inventory_locations', ['id'], unique=False)
op.create_index(op.f('ix_inventory_locations_sku_id'), 'inventory_locations', ['sku_id'], unique=False)
op.create_index('ix_inventory_locations_sku_location', 'inventory_locations', ['sku_id', 'location'], unique=False)
def downgrade() -> None:
"""Downgrade schema."""
op.drop_index('ix_inventory_locations_sku_location', table_name='inventory_locations')
op.drop_index(op.f('ix_inventory_locations_sku_id'), table_name='inventory_locations')
op.drop_index(op.f('ix_inventory_locations_id'), table_name='inventory_locations')
op.drop_table('inventory_locations')

View File

@@ -0,0 +1,45 @@
"""make_physical_items_fk_deferrable
Revision ID: c4a9e19ce012
Revises: 49535e0cd103
Create Date: 2025-11-26 10:49:31.015075
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'c4a9e19ce012'
down_revision: Union[str, None] = '49535e0cd103'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# Drop existing foreign key constraint from physical_items to mtgjson_skus
op.drop_constraint('physical_items_tcgplayer_sku_id_fkey', 'physical_items', type_='foreignkey')
# Recreate the foreign key constraint as DEFERRABLE INITIALLY DEFERRED
op.create_foreign_key(
'physical_items_tcgplayer_sku_id_fkey',
'physical_items', 'mtgjson_skus',
['tcgplayer_sku_id'], ['tcgplayer_sku_id'],
deferrable=True, initially='DEFERRED'
)
def downgrade() -> None:
"""Downgrade schema."""
# Drop deferrable constraint
op.drop_constraint('physical_items_tcgplayer_sku_id_fkey', 'physical_items', type_='foreignkey')
# Recreate original constraint (not deferrable)
op.create_foreign_key(
'physical_items_tcgplayer_sku_id_fkey',
'physical_items', 'mtgjson_skus',
['tcgplayer_sku_id'], ['tcgplayer_sku_id']
)

View File

@@ -94,6 +94,10 @@ tr:hover {
width: 200px;
}
.location {
width: 120px;
}
/* Add alternating row colors */
tbody tr:nth-child(even) {
background-color: #f0f0f0;
@@ -119,6 +123,7 @@ tbody tr:hover {
<th class="set">Set</th>
<th class="rarity">Rarity</th>
<th class="card-number">Card #</th>
<th class="location">Location</th>
</tr>
</thead>
<tbody>
@@ -129,6 +134,7 @@ tbody tr:hover {
<td class="set">{{ item.set }}</td>
<td class="rarity">{{ item.rarity }}</td>
<td class="card-number">{{ item.card_number }}</td>
<td class="location">{{ item.location }}</td>
</tr>
{% endfor %}
</tbody>

View File

@@ -59,11 +59,11 @@ async def lifespan(app: FastAPI):
db = SessionLocal()
try:
#data_init_service = service_manager.get_service('data_initialization')
#data_init = await data_init_service.initialize_data(db, game_ids=[1], use_cache=False, init_categories=True, init_products=True, init_groups=True, init_archived_prices=True, init_mtgjson=False, archived_prices_start_date="2025-05-22", archived_prices_end_date="2025-05-23")
#data_init = await data_init_service.initialize_data(db, game_ids=[1,3,62,71,86,89], use_cache=False, init_categories=True, init_products=True, init_groups=True, init_archived_prices=True, init_mtgjson=True, archived_prices_start_date="2025-11-20", archived_prices_end_date="2025-11-21")
#logger.info(f"Data initialization results: {data_init}")
# Update most recent prices
#MostRecentTCGPlayerPrice.update_most_recent_prices(db)
logger.info("Most recent prices updated successfully")
#logger.info("Most recent prices updated successfully")
# Create default customer, vendor, and marketplace
#inv_data_init = await data_init_service.initialize_inventory_data(db)

View File

@@ -30,6 +30,7 @@ from app.models.tcgplayer_order import (
from app.models.tcgplayer_inventory import TCGPlayerInventory
from app.models.manabox_import_staging import ManaboxImportStaging
from app.models.pricing import PricingEvent
from app.models.inventory_location import InventoryLocation
__all__ = [
@@ -58,5 +59,6 @@ __all__ = [
'TCGPlayerOrderRefund',
'TCGPlayerPriceHistory',
'MostRecentTCGPlayerPrice',
'PricingEvent'
'PricingEvent',
'InventoryLocation'
]

View File

@@ -0,0 +1,19 @@
from sqlalchemy import Column, Integer, String, DateTime, Index
from sqlalchemy.sql import func
from app.db.database import Base
class InventoryLocation(Base):
__tablename__ = "inventory_locations"
id = Column(Integer, primary_key=True, index=True)
sku_id = Column(Integer, nullable=False, index=True)
quantity = Column(Integer, nullable=False, default=0)
location = Column(String, nullable=True)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now())
__table_args__ = (
# Create a composite index on sku_id and location for efficient querying
Index('ix_inventory_locations_sku_location', 'sku_id', 'location'),
)

View File

@@ -257,6 +257,7 @@ class MostRecentTCGPlayerPrice(Base):
# Delete all existing records
db.query(cls).delete()
EXCLUDED_CATEGORY_IDS = [62]
# Get the most recent price for each product and sub_type_name
subquery = db.query(
@@ -269,14 +270,21 @@ class MostRecentTCGPlayerPrice(Base):
).subquery()
# Join with price history to get the full records
latest_prices = db.query(TCGPlayerPriceHistory).join(
subquery,
and_(
TCGPlayerPriceHistory.product_id == subquery.c.product_id,
TCGPlayerPriceHistory.sub_type_name == subquery.c.sub_type_name,
TCGPlayerPriceHistory.date == subquery.c.max_date
# filter excluded categories
latest_prices = (
db.query(TCGPlayerPriceHistory)
.join(
subquery,
and_(
TCGPlayerPriceHistory.product_id == subquery.c.product_id,
TCGPlayerPriceHistory.sub_type_name == subquery.c.sub_type_name,
TCGPlayerPriceHistory.date == subquery.c.max_date,
),
)
).all()
.join(TCGPlayerProduct, TCGPlayerProduct.tcgplayer_product_id == TCGPlayerPriceHistory.product_id)
.filter(TCGPlayerProduct.category_id.notin_(EXCLUDED_CATEGORY_IDS))
.all()
)
# Create new MostRecentTCGPlayerPrice records
for price in latest_prices:

View File

@@ -80,24 +80,28 @@ async def generate_pull_sheets(
) -> GenerateResponse:
"""
Generate and print pull sheets for the specified orders.
Args:
request: Dictionary containing:
- order_ids: List of TCGPlayer order numbers
Returns:
Success status of the operation
Success status and CSV file path for inventory adjustment
"""
try:
pull_sheet_service = service_manager.get_service('pull_sheet')
order_ids = request.order_ids
pull_sheet = await pull_sheet_service.get_or_create_rendered_pull_sheet(db, order_ids)
pull_sheet, inventory_csv = await pull_sheet_service.get_or_create_rendered_pull_sheet(db, order_ids)
regular_printer = service_manager.get_service('regular_printer')
success = await regular_printer.print_file(pull_sheet.path)
# TODO: change to accept file instead of path
return {"success": success, "message": "Pull sheets generated and printed successfully"}
return {
"success": success,
"message": "Pull sheets generated and printed successfully",
"csv_file_path": inventory_csv.path
}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to generate pull sheet: {str(e)}")
@@ -243,3 +247,4 @@ async def process_manabox_csv(
return {"success": success, "message": "Manabox CSV processed successfully"}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to process Manabox CSV: {str(e)}")

View File

@@ -1,4 +1,5 @@
from fastapi import APIRouter, Depends, HTTPException
from fastapi.responses import FileResponse
from sqlalchemy.orm import Session
from app.db.database import get_db
from app.models.file import File as FileModel
@@ -7,6 +8,10 @@ from app.routes.set_label_routes import router as set_label_router
from app.routes.order_routes import router as order_router
from app.routes.manabox_routes import router as manabox_router
from app.routes.inventory_management_routes import router as inventory_management_router
from pathlib import Path
import logging
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api")
# Include set label routes
@@ -49,4 +54,36 @@ async def update_file(file_id: int, file: FileUpdate):
@router.delete("/files/{file_id}", response_model=FileDelete)
async def delete_file(file_id: int):
return {"message": "File deleted successfully"}
return {"message": "File deleted successfully"}
@router.get("/download-file")
async def download_file(file_path: str):
"""
Download a file by its path.
Args:
file_path: The file path to download
Returns:
FileResponse with the file
"""
try:
# Resolve to absolute path
path = Path(file_path).resolve()
logger.info(f"Attempting to download file: {path}")
if not path.exists():
logger.error(f"File not found: {path}")
raise HTTPException(status_code=404, detail=f"File not found: {path}")
return FileResponse(
path=str(path),
filename=path.name,
media_type='text/csv'
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to download file: {str(e)}")
raise HTTPException(status_code=500, detail=f"Failed to download file: {str(e)}")

View File

@@ -1,4 +1,5 @@
from pydantic import BaseModel
from typing import Optional
class GenerateRequest(BaseModel):
"""
@@ -27,6 +28,7 @@ class GeneratePullSheetsRequest(GenerateRequest):
class GenerateResponse(BaseModel):
message: str
success: bool
csv_file_path: Optional[str] = None
class GenerateReturnLabelsRequest(BaseModel):
number_of_labels: int

View File

@@ -302,6 +302,8 @@ class DataInitializationService(BaseService):
groups = db.query(TCGPlayerGroup).filter(TCGPlayerGroup.category_id == game_id).all()
logger.info(f"Processing {len(groups)} groups for game ID {game_id}")
for group in groups:
if group.group_id == 24359:
continue
if use_cache:
products_data = await self._load_cached_data(db, f"products_{game_id}_{group.group_id}.json")
if products_data:

View File

@@ -96,9 +96,9 @@ class OrderManagementService(BaseTCGPlayerService):
if file:
return file
payload = {
"sortingType": "byRelease",
"format": "default",
"timezoneOffset": -4,
"sortingType": "ByRelease",
"format": "Default",
"timezoneOffset": -5,
"orderNumbers": order_ids
}
response = await self._make_request("POST", self.packing_slip_endpoint, data=payload, headers=self._get_headers("POST", "application/json"), auth_required=True, download_file=True)

View File

@@ -1,4 +1,4 @@
from typing import List, Dict
from typing import List, Dict, Tuple
import pandas as pd
from datetime import datetime
from pathlib import Path
@@ -6,9 +6,12 @@ from jinja2 import Environment, FileSystemLoader
from weasyprint import HTML
import logging
import asyncio
import csv
import io
from app.schemas.file import FileInDB
from app.services.base_service import BaseService
from sqlalchemy.orm import Session
from app.models.inventory_location import InventoryLocation
logger = logging.getLogger(__name__)
@@ -20,35 +23,89 @@ class PullSheetService(BaseService):
self.env = Environment(loader=FileSystemLoader(str(self.template_dir)))
self.template = self.env.get_template("pull_sheet.html")
async def get_or_create_rendered_pull_sheet(self, db: Session, order_ids: list[str]) -> FileInDB:
async def get_or_create_rendered_pull_sheet(self, db: Session, order_ids: list[str]) -> Tuple[FileInDB, FileInDB]:
# get file service
file_service = self.get_service('file')
# check if rendered pull sheet exists
# check if both rendered pull sheet and inventory adjustment CSV exist
rendered_pull_sheet = await file_service.get_file_by_metadata(db, "order_ids", order_ids, "rendered_pull_sheet", "application/pdf")
if rendered_pull_sheet:
return rendered_pull_sheet
inventory_adjustment_csv = await file_service.get_file_by_metadata(db, "order_ids", order_ids, "inventory_adjustment", "text/csv")
if rendered_pull_sheet and inventory_adjustment_csv:
return rendered_pull_sheet, inventory_adjustment_csv
# check if pull sheet data file exists
pull_sheet_data_file = await file_service.get_file_by_metadata(db, "order_ids", order_ids, "pull_sheet", "text/csv")
if pull_sheet_data_file:
# generate pdf from pull sheet data file
return await self.generate_pull_sheet_pdf(db, pull_sheet_data_file)
# if no pull sheet data file exists, get it from order management service
order_service = self.get_service('order_management')
pull_sheet_data_file = await order_service.get_pull_sheet(db, order_ids)
return await self.generate_pull_sheet_pdf(db, pull_sheet_data_file)
if not pull_sheet_data_file:
# if no pull sheet data file exists, get it from order management service
order_service = self.get_service('order_management')
pull_sheet_data_file = await order_service.get_pull_sheet(db, order_ids)
# generate both PDF and inventory adjustment CSV
return await self.generate_pull_sheet_files(db, pull_sheet_data_file)
async def generate_pull_sheet_files(self, db: Session, file: FileInDB) -> Tuple[FileInDB, FileInDB]:
"""Generate both PDF pull sheet and inventory adjustment CSV from a CSV file.
Args:
db: Database session
file: FileInDB object containing the pull sheet data
Returns:
Tuple of (rendered PDF FileInDB, inventory adjustment CSV FileInDB)
"""
try:
# Read and process CSV data
items, inventory_adjustments = await self._read_and_process_csv(db, file.path)
# Generate PDF
template_data = {
'items': items,
'generation_date': datetime.now().strftime("%Y-%m-%d %H:%M:%S")
}
html_content = self.template.render(**template_data)
# Ensure metadata is properly formatted
metadata = file.file_metadata.copy() if file.file_metadata else {}
if 'order_ids' in metadata:
metadata['order_ids'] = sorted(metadata['order_ids'])
file_service = self.get_service('file')
# Save PDF
pdf_file = await file_service.save_file(
db=db,
file_data=html_content,
filename=f"rendered_pull_sheet_{datetime.now().strftime('%Y%m%d_%H%M%S')}.pdf",
subdir="tcgplayer/pull_sheets/rendered",
file_type="rendered_pull_sheet",
content_type="application/pdf",
metadata=metadata,
html_content=True
)
# Generate inventory adjustment CSV
csv_file = await self._generate_inventory_adjustment_csv(db, inventory_adjustments, metadata)
return pdf_file, csv_file
except Exception as e:
logger.error(f"Error generating pull sheet files: {str(e)}")
raise
async def generate_pull_sheet_pdf(self, db: Session, file: FileInDB) -> FileInDB:
"""Generate a PDF pull sheet from a CSV file.
Args:
file: FileInDB object containing the pull sheet data
Returns:
Path to the generated PDF file
"""
try:
# Read and process CSV data
items = await self._read_and_process_csv(file.path)
items = await self._read_and_process_csv(db, file.path)
# Prepare template data
template_data = {
@@ -80,40 +137,222 @@ class PullSheetService(BaseService):
logger.error(f"Error generating pull sheet PDF: {str(e)}")
raise
async def _read_and_process_csv(self, csv_path: str) -> List[Dict]:
async def _read_and_process_csv(self, db: Session, csv_path: str) -> Tuple[List[Dict], List[Dict]]:
"""Read and process CSV data using pandas.
Args:
db: Database session
csv_path: Path to the CSV file
Returns:
List of processed items
Tuple of (items list for PDF, inventory adjustments list for CSV)
"""
# Read CSV into pandas DataFrame in a separate thread to avoid blocking
df = await asyncio.get_event_loop().run_in_executor(
None,
lambda: pd.read_csv(csv_path)
)
# Filter out the "Orders Contained in Pull Sheet" row
df = df[df['Product Line'] != 'Orders Contained in Pull Sheet:'].copy()
# Convert Set Release Date to datetime
df['Set Release Date'] = pd.to_datetime(df['Set Release Date'], format='%m/%d/%Y %H:%M:%S')
# Sort by Set Release Date (descending) and then Product Name (ascending)
df = df.sort_values(['Set Release Date', 'Set', 'Product Name'], ascending=[False, True, True])
# Convert to list of dictionaries
# Convert to list of dictionaries with location allocation
items = []
inventory_adjustments = []
for _, row in df.iterrows():
items.append({
sku_id = int(row['SkuId']) if pd.notna(row['SkuId']) else None
quantity_needed = int(row['Quantity'])
# Handle card number with try/except for int conversion
card_number = ''
if 'Number' in row and pd.notna(row['Number']):
number_str = str(row['Number'])
if '/' in number_str:
# Keep as-is for numbers like "123/456"
card_number = number_str
else:
# Try to convert to int to strip decimals (123.0 -> 123)
try:
card_number = str(int(float(row['Number'])))
except (ValueError, TypeError):
# If conversion fails (e.g., "SEA123"), use as-is
card_number = number_str
# Base item data - include set_release_date for final sorting
item_data = {
'product_name': row['Product Name'],
'condition': row['Condition'],
'quantity': str(int(row['Quantity'])), # Convert to string for template
'set': row['Set'],
'rarity': row['Rarity'],
'card_number': str(int(row['Number'])) if 'Number' in row and pd.notna(row['Number']) and '/' not in str(row['Number']) else str(row['Number']) if 'Number' in row and pd.notna(row['Number']) and '/' in str(row['Number']) else ''
'card_number': card_number,
'set_release_date': row['Set Release Date']
}
if sku_id is not None:
# Query inventory locations for this SKU
locations = db.query(InventoryLocation).filter(
InventoryLocation.sku_id == sku_id,
InventoryLocation.quantity > 0
).all()
if locations:
# Allocate quantity across locations
allocated_items, adjustments = self._allocate_quantity_to_locations(
item_data, quantity_needed, locations, sku_id
)
items.extend(allocated_items)
inventory_adjustments.extend(adjustments)
else:
# No location data found
items.append({
**item_data,
'quantity': str(quantity_needed),
'location': 'LOCATION UNKNOWN'
})
# Still record the adjustment even without location
inventory_adjustments.append({
'sku_id': sku_id,
'location': 'LOCATION UNKNOWN',
'quantity': -quantity_needed
})
else:
# No SKU ID in CSV
items.append({
**item_data,
'quantity': str(quantity_needed),
'location': 'LOCATION UNKNOWN'
})
# Can't create adjustment without SKU ID
# Final sort: by location (tcgplayer_inventory first, then alpha),
# then by set_release_date (desc), set (asc), product_name (asc)
items.sort(key=lambda item: (
0 if item['location'] == 'tcgplayer_inventory' else 1, # tcgplayer_inventory first
item['location'] or 'zzz', # Then alphabetically (null locations at end)
-item['set_release_date'].timestamp(), # Set Release Date descending
item['set'], # Set ascending
item['product_name'] # Product Name ascending
))
return items, inventory_adjustments
def _allocate_quantity_to_locations(
self, item_data: Dict, quantity_needed: int, locations: List[InventoryLocation], sku_id: int
) -> Tuple[List[Dict], List[Dict]]:
"""Allocate quantity across available locations.
Args:
item_data: Base item data (name, set, etc.)
quantity_needed: Total quantity needed
locations: List of available locations with quantities
sku_id: The SKU ID for inventory adjustment tracking
Returns:
Tuple of (items list for display, inventory adjustments list)
"""
# Sort locations: prioritize "tcgplayer_inventory" location if exists
# But only if we don't need to split across multiple locations
total_available = sum(loc.quantity for loc in locations)
# Check if we have a "tcgplayer_inventory" location with enough quantity
tcgplayer_location = next(
(loc for loc in locations if loc.location == 'tcgplayer_inventory'),
None
)
if tcgplayer_location and tcgplayer_location.quantity >= quantity_needed:
# Use tcgplayer_inventory exclusively if it has enough
return (
[{
**item_data,
'quantity': str(quantity_needed),
'location': tcgplayer_location.location or 'LOCATION UNKNOWN'
}],
[{
'sku_id': sku_id,
'location': tcgplayer_location.location or 'LOCATION UNKNOWN',
'quantity': -quantity_needed
}]
)
# Otherwise, allocate across all available locations
allocated_items = []
inventory_adjustments = []
remaining = quantity_needed
# Sort locations: tcgplayer_inventory first, then others
sorted_locations = sorted(
locations,
key=lambda loc: (0 if loc.location == 'tcgplayer_inventory' else 1, loc.location or 'zzz')
)
for location in sorted_locations:
if remaining <= 0:
break
allocated_qty = min(remaining, location.quantity)
if allocated_qty > 0:
allocated_items.append({
**item_data,
'quantity': str(allocated_qty),
'location': location.location or 'LOCATION UNKNOWN'
})
inventory_adjustments.append({
'sku_id': sku_id,
'location': location.location or 'LOCATION UNKNOWN',
'quantity': -allocated_qty
})
remaining -= allocated_qty
# If we still need more quantity (not enough in inventory)
if remaining > 0:
allocated_items.append({
**item_data,
'quantity': str(remaining),
'location': 'INSUFFICIENT INVENTORY'
})
return items
inventory_adjustments.append({
'sku_id': sku_id,
'location': 'INSUFFICIENT INVENTORY',
'quantity': -remaining
})
return allocated_items, inventory_adjustments
async def _generate_inventory_adjustment_csv(
self, db: Session, inventory_adjustments: List[Dict], metadata: Dict
) -> FileInDB:
"""Generate a CSV file for inventory adjustments.
Args:
db: Database session
inventory_adjustments: List of inventory adjustments with sku_id, location, quantity
metadata: Metadata for the file
Returns:
FileInDB object containing the CSV file
"""
# Create CSV content
output = io.StringIO()
writer = csv.DictWriter(output, fieldnames=['sku_id', 'location', 'quantity'])
writer.writeheader()
writer.writerows(inventory_adjustments)
csv_content = output.getvalue()
# Save CSV file
file_service = self.get_service('file')
return await file_service.save_file(
db=db,
file_data=csv_content,
filename=f"inventory_adjustment_{datetime.now().strftime('%Y%m%d_%H%M%S')}.csv",
subdir="tcgplayer/pull_sheets/inventory_adjustments",
file_type="inventory_adjustment",
content_type="text/csv",
metadata=metadata
)

View File

@@ -134,7 +134,7 @@ async function generatePullSheets() {
showToast('Please select at least one order', 'error');
return;
}
setLoading(true);
const response = await fetch(`${API_BASE_URL}/orders/generate-pull-sheets`, {
method: 'POST',
@@ -145,12 +145,25 @@ async function generatePullSheets() {
order_ids: orderIds
})
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.detail || 'Failed to generate pull sheets');
}
const data = await response.json();
// Download the inventory adjustment CSV if available
if (data.csv_file_path) {
const csvUrl = `${API_BASE_URL}/download-file?file_path=${encodeURIComponent(data.csv_file_path)}`;
const link = document.createElement('a');
link.href = csvUrl;
link.download = 'inventory_adjustment.csv';
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
}
showToast('Pull sheets generated successfully');
} catch (error) {
showToast('Error generating pull sheets: ' + error.message, 'error');