kind of a mess lol but file caching and front end

This commit is contained in:
zman 2025-04-17 13:28:49 -04:00
parent 21408af48c
commit 8f35cedb4a
45 changed files with 1435 additions and 1316 deletions

View File

@ -5,14 +5,6 @@ from sqlalchemy import pool
from alembic import context
# Import your models here
from app.db.database import Base
from app.models.inventory import Inventory
from app.models.card import Card
from app.models.box import Box, OpenBox
from app.models.game import Game
from app.models.file import File
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
@ -24,6 +16,10 @@ if config.config_file_name is not None:
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
from app.db.database import Base
from app.models import *
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,

View File

@ -19,8 +19,10 @@ depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade schema."""
${downgrades if downgrades else "pass"}

View File

@ -1,49 +0,0 @@
"""create tcgplayer categories table
Revision ID: 2025_04_09_create_tcgplayer_categories_table
Revises: remove_product_id_unique_constraint
Create Date: 2025-04-09 23:20:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '2025_04_09_create_tcgplayer_categories_table'
down_revision: str = 'remove_product_id_unique_constraint'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
op.create_table('tcgplayer_categories',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('category_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('display_name', sa.String(), nullable=True),
sa.Column('seo_category_name', sa.String(), nullable=True),
sa.Column('category_description', sa.String(), nullable=True),
sa.Column('category_page_title', sa.String(), nullable=True),
sa.Column('sealed_label', sa.String(), nullable=True),
sa.Column('non_sealed_label', sa.String(), nullable=True),
sa.Column('condition_guide_url', sa.String(), nullable=True),
sa.Column('is_scannable', sa.Boolean(), nullable=True, default=False),
sa.Column('popularity', sa.Integer(), nullable=True, default=0),
sa.Column('is_direct', sa.Boolean(), nullable=True, default=False),
sa.Column('modified_on', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('category_id')
)
op.create_index('ix_tcgplayer_categories_id', 'tcgplayer_categories', ['id'], unique=False)
op.create_index('ix_tcgplayer_categories_category_id', 'tcgplayer_categories', ['category_id'], unique=True)
def downgrade() -> None:
op.drop_index('ix_tcgplayer_categories_category_id', table_name='tcgplayer_categories')
op.drop_index('ix_tcgplayer_categories_id', table_name='tcgplayer_categories')
op.drop_table('tcgplayer_categories')

View File

@ -1,95 +0,0 @@
"""create mtgjson tables
Revision ID: 2025_04_13_create_mtgjson_tables
Revises: 2025_04_09_create_tcgplayer_categories_table
Create Date: 2025-04-13 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2025_04_13_create_mtgjson_tables'
down_revision = '2025_04_09_create_tcgplayer_categories_table'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Create mtgjson_cards table
op.create_table(
'mtgjson_cards',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('card_id', sa.String(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('set_code', sa.String(), nullable=True),
sa.Column('uuid', sa.String(), nullable=True),
# Identifiers
sa.Column('abu_id', sa.String(), nullable=True),
sa.Column('card_kingdom_etched_id', sa.String(), nullable=True),
sa.Column('card_kingdom_foil_id', sa.String(), nullable=True),
sa.Column('card_kingdom_id', sa.String(), nullable=True),
sa.Column('cardsphere_id', sa.String(), nullable=True),
sa.Column('cardsphere_foil_id', sa.String(), nullable=True),
sa.Column('cardtrader_id', sa.String(), nullable=True),
sa.Column('csi_id', sa.String(), nullable=True),
sa.Column('mcm_id', sa.String(), nullable=True),
sa.Column('mcm_meta_id', sa.String(), nullable=True),
sa.Column('miniaturemarket_id', sa.String(), nullable=True),
sa.Column('mtg_arena_id', sa.String(), nullable=True),
sa.Column('mtgjson_foil_version_id', sa.String(), nullable=True),
sa.Column('mtgjson_non_foil_version_id', sa.String(), nullable=True),
sa.Column('mtgjson_v4_id', sa.String(), nullable=True),
sa.Column('mtgo_foil_id', sa.String(), nullable=True),
sa.Column('mtgo_id', sa.String(), nullable=True),
sa.Column('multiverse_id', sa.String(), nullable=True),
sa.Column('scg_id', sa.String(), nullable=True),
sa.Column('scryfall_id', sa.String(), nullable=True),
sa.Column('scryfall_card_back_id', sa.String(), nullable=True),
sa.Column('scryfall_oracle_id', sa.String(), nullable=True),
sa.Column('scryfall_illustration_id', sa.String(), nullable=True),
sa.Column('tcgplayer_product_id', sa.String(), nullable=True),
sa.Column('tcgplayer_etched_product_id', sa.String(), nullable=True),
sa.Column('tnt_id', sa.String(), nullable=True),
sa.Column('data', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_mtgjson_cards_card_id'), 'mtgjson_cards', ['card_id'], unique=True)
op.create_index(op.f('ix_mtgjson_cards_id'), 'mtgjson_cards', ['id'], unique=False)
# Create mtgjson_skus table
op.create_table(
'mtgjson_skus',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('sku_id', sa.String(), nullable=False),
sa.Column('product_id', sa.String(), nullable=True),
sa.Column('condition', sa.String(), nullable=True),
sa.Column('finish', sa.String(), nullable=True),
sa.Column('language', sa.String(), nullable=True),
sa.Column('printing', sa.String(), nullable=True),
sa.Column('card_id', sa.String(), nullable=True),
sa.Column('data', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_mtgjson_skus_card_id'), 'mtgjson_skus', ['card_id'], unique=False)
op.create_index(op.f('ix_mtgjson_skus_id'), 'mtgjson_skus', ['id'], unique=False)
op.create_index(op.f('ix_mtgjson_skus_product_id'), 'mtgjson_skus', ['product_id'], unique=False)
op.create_index(op.f('ix_mtgjson_skus_sku_id'), 'mtgjson_skus', ['sku_id'], unique=False)
def downgrade() -> None:
# Drop mtgjson_skus table first due to foreign key constraint
op.drop_index(op.f('ix_mtgjson_skus_sku_id'), table_name='mtgjson_skus')
op.drop_index(op.f('ix_mtgjson_skus_product_id'), table_name='mtgjson_skus')
op.drop_index(op.f('ix_mtgjson_skus_id'), table_name='mtgjson_skus')
op.drop_index(op.f('ix_mtgjson_skus_card_id'), table_name='mtgjson_skus')
op.drop_table('mtgjson_skus')
# Drop mtgjson_cards table
op.drop_index(op.f('ix_mtgjson_cards_id'), table_name='mtgjson_cards')
op.drop_index(op.f('ix_mtgjson_cards_card_id'), table_name='mtgjson_cards')
op.drop_table('mtgjson_cards')

View File

@ -1,29 +0,0 @@
"""fix alembic version table
Revision ID: 2025_04_14_fix_alembic_version
Revises: 4dbeb89dd33a
Create Date: 2025-04-14 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2025_04_14_fix_alembic_version'
down_revision = '4dbeb89dd33a'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Increase the size of version_num column in alembic_version table
op.alter_column('alembic_version', 'version_num',
existing_type=sa.String(32),
type_=sa.String(255))
def downgrade() -> None:
# Revert the column size back to 32
op.alter_column('alembic_version', 'version_num',
existing_type=sa.String(255),
type_=sa.String(32))

View File

@ -1,32 +0,0 @@
"""fix foreign key issue
Revision ID: fix_foreign_key_issue
Revises: 5bf5f87793d7
Create Date: 2025-04-14 04:15:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'fix_foreign_key_issue'
down_revision: Union[str, None] = '5bf5f87793d7'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Drop the foreign key constraint if it exists
op.execute('ALTER TABLE mtgjson_skus DROP CONSTRAINT IF EXISTS mtgjson_skus_card_id_fkey')
# Make the column nullable
op.alter_column('mtgjson_skus', 'card_id',
existing_type=sa.String(),
nullable=True)
def downgrade() -> None:
# No downgrade - we don't want to recreate the constraint
pass

View File

@ -1,33 +0,0 @@
"""fix mtgjson final
Revision ID: 2025_04_14_fix_mtgjson_final
Revises: d1628d8feb57
Create Date: 2025-04-14 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2025_04_14_fix_mtgjson_final'
down_revision = 'd1628d8feb57'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Drop the foreign key constraint and make card_id nullable
op.drop_constraint('mtgjson_skus_card_id_fkey', 'mtgjson_skus', type_='foreignkey')
op.alter_column('mtgjson_skus', 'card_id',
existing_type=sa.String(),
nullable=True)
def downgrade() -> None:
# Make card_id not nullable and recreate foreign key
op.alter_column('mtgjson_skus', 'card_id',
existing_type=sa.String(),
nullable=False)
op.create_foreign_key('mtgjson_skus_card_id_fkey',
'mtgjson_skus', 'mtgjson_cards',
['card_id'], ['card_id'])

View File

@ -1,33 +0,0 @@
"""fix mtgjson foreign key
Revision ID: 2025_04_14_fix_mtgjson_foreign_key
Revises: 4ad81b486caf
Create Date: 2025-04-14 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2025_04_14_fix_mtgjson_foreign_key'
down_revision = '4ad81b486caf'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Drop the foreign key constraint and make card_id nullable
op.execute('ALTER TABLE mtgjson_skus DROP CONSTRAINT IF EXISTS mtgjson_skus_card_id_fkey')
op.alter_column('mtgjson_skus', 'card_id',
existing_type=sa.String(),
nullable=True)
def downgrade() -> None:
# Make card_id not nullable and recreate foreign key
op.alter_column('mtgjson_skus', 'card_id',
existing_type=sa.String(),
nullable=False)
op.create_foreign_key('mtgjson_skus_card_id_fkey',
'mtgjson_skus', 'mtgjson_cards',
['card_id'], ['card_id'])

View File

@ -1,35 +0,0 @@
"""remove mtgjson data columns
Revision ID: 2025_04_14_remove_mtgjson_data_columns
Revises: 2025_04_13_create_mtgjson_tables
Create Date: 2025-04-14 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2025_04_14_remove_mtgjson_data_columns'
down_revision = '2025_04_13_create_mtgjson_tables'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Remove data column from mtgjson_skus table
op.drop_column('mtgjson_skus', 'data')
# Remove data column from mtgjson_cards table
op.drop_column('mtgjson_cards', 'data')
def downgrade() -> None:
# Add data column back to mtgjson_cards table
op.add_column('mtgjson_cards',
sa.Column('data', sa.JSON(), nullable=True)
)
# Add data column back to mtgjson_skus table
op.add_column('mtgjson_skus',
sa.Column('data', sa.JSON(), nullable=True)
)

View File

@ -1,31 +0,0 @@
"""remove mtgjson foreign key constraint
Revision ID: 2025_04_14_remove_mtgjson_foreign_key
Revises: 2025_04_14_remove_mtgjson_data_columns
Create Date: 2025-04-14 00:00:00.000000
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '2025_04_14_remove_mtgjson_foreign_key'
down_revision = '2025_04_14_remove_mtgjson_data_columns'
branch_labels = None
depends_on = None
def upgrade() -> None:
# Drop the foreign key constraint from mtgjson_skus table
op.drop_constraint('mtgjson_skus_card_id_fkey', 'mtgjson_skus', type_='foreignkey')
def downgrade() -> None:
# Recreate the foreign key constraint
op.create_foreign_key(
'mtgjson_skus_card_id_fkey',
'mtgjson_skus',
'mtgjson_cards',
['card_id'],
['card_id']
)

View File

@ -0,0 +1,369 @@
"""i hate alembic so goddamn much
Revision ID: 479003fbead7
Revises:
Create Date: 2025-04-17 12:08:13.714276
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '479003fbead7'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
"""Upgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('cards',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('rarity', sa.String(), nullable=True),
sa.Column('set_name', sa.String(), nullable=True),
sa.Column('price', sa.Float(), nullable=True),
sa.Column('quantity', sa.Integer(), nullable=True),
sa.Column('tcgplayer_sku', sa.String(), nullable=True),
sa.Column('product_line', sa.String(), nullable=True),
sa.Column('product_name', sa.String(), nullable=True),
sa.Column('title', sa.String(), nullable=True),
sa.Column('number', sa.String(), nullable=True),
sa.Column('condition', sa.String(), nullable=True),
sa.Column('tcg_market_price', sa.Float(), nullable=True),
sa.Column('tcg_direct_low', sa.Float(), nullable=True),
sa.Column('tcg_low_price_with_shipping', sa.Float(), nullable=True),
sa.Column('tcg_low_price', sa.Float(), nullable=True),
sa.Column('total_quantity', sa.Integer(), nullable=True),
sa.Column('add_to_quantity', sa.Integer(), nullable=True),
sa.Column('tcg_marketplace_price', sa.Float(), nullable=True),
sa.Column('photo_url', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_cards_id'), 'cards', ['id'], unique=False)
op.create_index(op.f('ix_cards_name'), 'cards', ['name'], unique=False)
op.create_index(op.f('ix_cards_set_name'), 'cards', ['set_name'], unique=False)
op.create_index(op.f('ix_cards_tcgplayer_sku'), 'cards', ['tcgplayer_sku'], unique=True)
op.create_table('files',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('file_type', sa.String(), nullable=True),
sa.Column('content_type', sa.String(), nullable=True),
sa.Column('path', sa.String(), nullable=True),
sa.Column('size', sa.Integer(), nullable=True),
sa.Column('file_metadata', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_files_id'), 'files', ['id'], unique=False)
op.create_table('games',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('description', sa.String(), nullable=True),
sa.Column('image_url', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_games_id'), 'games', ['id'], unique=False)
op.create_table('inventory',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('tcgplayer_id', sa.String(), nullable=True),
sa.Column('product_line', sa.String(), nullable=True),
sa.Column('set_name', sa.String(), nullable=True),
sa.Column('product_name', sa.String(), nullable=True),
sa.Column('title', sa.String(), nullable=True),
sa.Column('number', sa.String(), nullable=True),
sa.Column('rarity', sa.String(), nullable=True),
sa.Column('condition', sa.String(), nullable=True),
sa.Column('tcg_market_price', sa.Float(), nullable=True),
sa.Column('tcg_direct_low', sa.Float(), nullable=True),
sa.Column('tcg_low_price_with_shipping', sa.Float(), nullable=True),
sa.Column('tcg_low_price', sa.Float(), nullable=True),
sa.Column('total_quantity', sa.Integer(), nullable=True),
sa.Column('add_to_quantity', sa.Integer(), nullable=True),
sa.Column('tcg_marketplace_price', sa.Float(), nullable=True),
sa.Column('photo_url', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_inventory_id'), 'inventory', ['id'], unique=False)
op.create_index(op.f('ix_inventory_tcgplayer_id'), 'inventory', ['tcgplayer_id'], unique=True)
op.create_table('mtgjson_cards',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('card_id', sa.String(), nullable=True),
sa.Column('name', sa.String(), nullable=True),
sa.Column('set_code', sa.String(), nullable=True),
sa.Column('uuid', sa.String(), nullable=True),
sa.Column('abu_id', sa.String(), nullable=True),
sa.Column('card_kingdom_etched_id', sa.String(), nullable=True),
sa.Column('card_kingdom_foil_id', sa.String(), nullable=True),
sa.Column('card_kingdom_id', sa.String(), nullable=True),
sa.Column('cardsphere_id', sa.String(), nullable=True),
sa.Column('cardsphere_foil_id', sa.String(), nullable=True),
sa.Column('cardtrader_id', sa.String(), nullable=True),
sa.Column('csi_id', sa.String(), nullable=True),
sa.Column('mcm_id', sa.String(), nullable=True),
sa.Column('mcm_meta_id', sa.String(), nullable=True),
sa.Column('miniaturemarket_id', sa.String(), nullable=True),
sa.Column('mtg_arena_id', sa.String(), nullable=True),
sa.Column('mtgjson_foil_version_id', sa.String(), nullable=True),
sa.Column('mtgjson_non_foil_version_id', sa.String(), nullable=True),
sa.Column('mtgjson_v4_id', sa.String(), nullable=True),
sa.Column('mtgo_foil_id', sa.String(), nullable=True),
sa.Column('mtgo_id', sa.String(), nullable=True),
sa.Column('multiverse_id', sa.String(), nullable=True),
sa.Column('scg_id', sa.String(), nullable=True),
sa.Column('scryfall_id', sa.String(), nullable=True),
sa.Column('scryfall_card_back_id', sa.String(), nullable=True),
sa.Column('scryfall_oracle_id', sa.String(), nullable=True),
sa.Column('scryfall_illustration_id', sa.String(), nullable=True),
sa.Column('tcgplayer_product_id', sa.String(), nullable=True),
sa.Column('tcgplayer_etched_product_id', sa.String(), nullable=True),
sa.Column('tnt_id', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_mtgjson_cards_card_id'), 'mtgjson_cards', ['card_id'], unique=True)
op.create_index(op.f('ix_mtgjson_cards_id'), 'mtgjson_cards', ['id'], unique=False)
op.create_table('mtgjson_skus',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('sku_id', sa.String(), nullable=True),
sa.Column('product_id', sa.String(), nullable=True),
sa.Column('condition', sa.String(), nullable=True),
sa.Column('finish', sa.String(), nullable=True),
sa.Column('language', sa.String(), nullable=True),
sa.Column('printing', sa.String(), nullable=True),
sa.Column('card_id', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_mtgjson_skus_id'), 'mtgjson_skus', ['id'], unique=False)
op.create_index(op.f('ix_mtgjson_skus_product_id'), 'mtgjson_skus', ['product_id'], unique=False)
op.create_index(op.f('ix_mtgjson_skus_sku_id'), 'mtgjson_skus', ['sku_id'], unique=False)
op.create_table('tcgplayer_categories',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(), nullable=False),
sa.Column('display_name', sa.String(), nullable=True),
sa.Column('seo_category_name', sa.String(), nullable=True),
sa.Column('category_description', sa.String(), nullable=True),
sa.Column('category_page_title', sa.String(), nullable=True),
sa.Column('sealed_label', sa.String(), nullable=True),
sa.Column('non_sealed_label', sa.String(), nullable=True),
sa.Column('condition_guide_url', sa.String(), nullable=True),
sa.Column('is_scannable', sa.Boolean(), nullable=True),
sa.Column('popularity', sa.Integer(), nullable=True),
sa.Column('is_direct', sa.Boolean(), nullable=True),
sa.Column('modified_on', sa.DateTime(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tcgplayer_categories_category_id'), 'tcgplayer_categories', ['category_id'], unique=True)
op.create_index(op.f('ix_tcgplayer_categories_id'), 'tcgplayer_categories', ['id'], unique=False)
op.create_table('tcgplayer_groups',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('group_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(), nullable=False),
sa.Column('abbreviation', sa.String(), nullable=True),
sa.Column('is_supplemental', sa.Boolean(), nullable=True),
sa.Column('published_on', sa.DateTime(), nullable=True),
sa.Column('modified_on', sa.DateTime(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tcgplayer_groups_group_id'), 'tcgplayer_groups', ['group_id'], unique=True)
op.create_index(op.f('ix_tcgplayer_groups_id'), 'tcgplayer_groups', ['id'], unique=False)
op.create_table('tcgplayer_order_products',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_number', sa.String(), nullable=True),
sa.Column('product_name', sa.String(), nullable=True),
sa.Column('unit_price', sa.Float(), nullable=True),
sa.Column('extended_price', sa.Float(), nullable=True),
sa.Column('quantity', sa.Integer(), nullable=True),
sa.Column('url', sa.String(), nullable=True),
sa.Column('product_id', sa.String(), nullable=True),
sa.Column('sku_id', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tcgplayer_order_products_id'), 'tcgplayer_order_products', ['id'], unique=False)
op.create_index(op.f('ix_tcgplayer_order_products_order_number'), 'tcgplayer_order_products', ['order_number'], unique=False)
op.create_table('tcgplayer_order_refunds',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_number', sa.String(), nullable=True),
sa.Column('refund_created_at', sa.DateTime(), nullable=True),
sa.Column('type', sa.String(), nullable=True),
sa.Column('amount', sa.Float(), nullable=True),
sa.Column('description', sa.String(), nullable=True),
sa.Column('origin', sa.String(), nullable=True),
sa.Column('shipping_amount', sa.Float(), nullable=True),
sa.Column('products', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tcgplayer_order_refunds_id'), 'tcgplayer_order_refunds', ['id'], unique=False)
op.create_index(op.f('ix_tcgplayer_order_refunds_order_number'), 'tcgplayer_order_refunds', ['order_number'], unique=False)
op.create_table('tcgplayer_order_transactions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_number', sa.String(), nullable=True),
sa.Column('product_amount', sa.Float(), nullable=True),
sa.Column('shipping_amount', sa.Float(), nullable=True),
sa.Column('gross_amount', sa.Float(), nullable=True),
sa.Column('fee_amount', sa.Float(), nullable=True),
sa.Column('net_amount', sa.Float(), nullable=True),
sa.Column('direct_fee_amount', sa.Float(), nullable=True),
sa.Column('taxes', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tcgplayer_order_transactions_id'), 'tcgplayer_order_transactions', ['id'], unique=False)
op.create_index(op.f('ix_tcgplayer_order_transactions_order_number'), 'tcgplayer_order_transactions', ['order_number'], unique=False)
op.create_table('tcgplayer_orders',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_number', sa.String(), nullable=True),
sa.Column('order_created_at', sa.DateTime(), nullable=True),
sa.Column('status', sa.String(), nullable=True),
sa.Column('channel', sa.String(), nullable=True),
sa.Column('fulfillment', sa.String(), nullable=True),
sa.Column('seller_name', sa.String(), nullable=True),
sa.Column('buyer_name', sa.String(), nullable=True),
sa.Column('payment_type', sa.String(), nullable=True),
sa.Column('pickup_status', sa.String(), nullable=True),
sa.Column('shipping_type', sa.String(), nullable=True),
sa.Column('estimated_delivery_date', sa.DateTime(), nullable=True),
sa.Column('recipient_name', sa.String(), nullable=True),
sa.Column('address_line_1', sa.String(), nullable=True),
sa.Column('address_line_2', sa.String(), nullable=True),
sa.Column('city', sa.String(), nullable=True),
sa.Column('state', sa.String(), nullable=True),
sa.Column('zip_code', sa.String(), nullable=True),
sa.Column('country', sa.String(), nullable=True),
sa.Column('tracking_numbers', sa.JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), nullable=True),
sa.Column('updated_at', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tcgplayer_orders_id'), 'tcgplayer_orders', ['id'], unique=False)
op.create_index(op.f('ix_tcgplayer_orders_order_number'), 'tcgplayer_orders', ['order_number'], unique=False)
op.create_table('boxes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('product_id', sa.Integer(), nullable=True),
sa.Column('type', sa.String(), nullable=True),
sa.Column('set_code', sa.String(), nullable=True),
sa.Column('sku', sa.Integer(), nullable=True),
sa.Column('name', sa.String(), nullable=True),
sa.Column('game_id', sa.Integer(), nullable=True),
sa.Column('expected_number_of_cards', sa.Integer(), nullable=True),
sa.Column('description', sa.String(), nullable=True),
sa.Column('image_url', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['game_id'], ['games.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_boxes_id'), 'boxes', ['id'], unique=False)
op.create_table('tcgplayer_products',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('product_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(), nullable=False),
sa.Column('clean_name', sa.String(), nullable=True),
sa.Column('image_url', sa.String(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.Column('group_id', sa.Integer(), nullable=True),
sa.Column('url', sa.String(), nullable=True),
sa.Column('modified_on', sa.DateTime(), nullable=True),
sa.Column('image_count', sa.Integer(), nullable=True),
sa.Column('ext_rarity', sa.String(), nullable=True),
sa.Column('ext_number', sa.String(), nullable=True),
sa.Column('low_price', sa.Float(), nullable=True),
sa.Column('mid_price', sa.Float(), nullable=True),
sa.Column('high_price', sa.Float(), nullable=True),
sa.Column('market_price', sa.Float(), nullable=True),
sa.Column('direct_low_price', sa.Float(), nullable=True),
sa.Column('sub_type_name', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['group_id'], ['tcgplayer_groups.group_id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tcgplayer_products_id'), 'tcgplayer_products', ['id'], unique=False)
op.create_index(op.f('ix_tcgplayer_products_product_id'), 'tcgplayer_products', ['product_id'], unique=False)
op.create_table('open_boxes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('box_id', sa.Integer(), nullable=True),
sa.Column('number_of_cards', sa.Integer(), nullable=True),
sa.Column('date_opened', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['box_id'], ['boxes.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_open_boxes_id'), 'open_boxes', ['id'], unique=False)
# ### end Alembic commands ###
def downgrade() -> None:
"""Downgrade schema."""
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_open_boxes_id'), table_name='open_boxes')
op.drop_table('open_boxes')
op.drop_index(op.f('ix_tcgplayer_products_product_id'), table_name='tcgplayer_products')
op.drop_index(op.f('ix_tcgplayer_products_id'), table_name='tcgplayer_products')
op.drop_table('tcgplayer_products')
op.drop_index(op.f('ix_boxes_id'), table_name='boxes')
op.drop_table('boxes')
op.drop_index(op.f('ix_tcgplayer_orders_order_number'), table_name='tcgplayer_orders')
op.drop_index(op.f('ix_tcgplayer_orders_id'), table_name='tcgplayer_orders')
op.drop_table('tcgplayer_orders')
op.drop_index(op.f('ix_tcgplayer_order_transactions_order_number'), table_name='tcgplayer_order_transactions')
op.drop_index(op.f('ix_tcgplayer_order_transactions_id'), table_name='tcgplayer_order_transactions')
op.drop_table('tcgplayer_order_transactions')
op.drop_index(op.f('ix_tcgplayer_order_refunds_order_number'), table_name='tcgplayer_order_refunds')
op.drop_index(op.f('ix_tcgplayer_order_refunds_id'), table_name='tcgplayer_order_refunds')
op.drop_table('tcgplayer_order_refunds')
op.drop_index(op.f('ix_tcgplayer_order_products_order_number'), table_name='tcgplayer_order_products')
op.drop_index(op.f('ix_tcgplayer_order_products_id'), table_name='tcgplayer_order_products')
op.drop_table('tcgplayer_order_products')
op.drop_index(op.f('ix_tcgplayer_groups_id'), table_name='tcgplayer_groups')
op.drop_index(op.f('ix_tcgplayer_groups_group_id'), table_name='tcgplayer_groups')
op.drop_table('tcgplayer_groups')
op.drop_index(op.f('ix_tcgplayer_categories_id'), table_name='tcgplayer_categories')
op.drop_index(op.f('ix_tcgplayer_categories_category_id'), table_name='tcgplayer_categories')
op.drop_table('tcgplayer_categories')
op.drop_index(op.f('ix_mtgjson_skus_sku_id'), table_name='mtgjson_skus')
op.drop_index(op.f('ix_mtgjson_skus_product_id'), table_name='mtgjson_skus')
op.drop_index(op.f('ix_mtgjson_skus_id'), table_name='mtgjson_skus')
op.drop_table('mtgjson_skus')
op.drop_index(op.f('ix_mtgjson_cards_id'), table_name='mtgjson_cards')
op.drop_index(op.f('ix_mtgjson_cards_card_id'), table_name='mtgjson_cards')
op.drop_table('mtgjson_cards')
op.drop_index(op.f('ix_inventory_tcgplayer_id'), table_name='inventory')
op.drop_index(op.f('ix_inventory_id'), table_name='inventory')
op.drop_table('inventory')
op.drop_index(op.f('ix_games_id'), table_name='games')
op.drop_table('games')
op.drop_index(op.f('ix_files_id'), table_name='files')
op.drop_table('files')
op.drop_index(op.f('ix_cards_tcgplayer_sku'), table_name='cards')
op.drop_index(op.f('ix_cards_set_name'), table_name='cards')
op.drop_index(op.f('ix_cards_name'), table_name='cards')
op.drop_index(op.f('ix_cards_id'), table_name='cards')
op.drop_table('cards')
# ### end Alembic commands ###

View File

@ -1,26 +0,0 @@
"""merge heads
Revision ID: 4ad81b486caf
Revises: 2025_04_14_remove_mtgjson_data_columns, 8764850e4e35
Create Date: 2025-04-12 23:38:27.257987
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '4ad81b486caf'
down_revision: Union[str, None] = ('2025_04_14_remove_mtgjson_data_columns', '8764850e4e35')
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@ -1,171 +0,0 @@
"""create inventory table
Revision ID: 4dbeb89dd33a
Revises:
Create Date: 2025-04-09 21:56:49.068087
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '4dbeb89dd33a'
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('boxes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('product_id', sa.Integer(), nullable=True),
sa.Column('type', sa.String(), nullable=True),
sa.Column('set_code', sa.String(), nullable=True),
sa.Column('sku', sa.Integer(), nullable=True),
sa.Column('name', sa.String(), nullable=True),
sa.Column('expected_number_of_cards', sa.Integer(), nullable=True),
sa.Column('description', sa.String(), nullable=True),
sa.Column('image_url', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_boxes_id'), 'boxes', ['id'], unique=False)
op.create_table('cards',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('rarity', sa.String(), nullable=True),
sa.Column('set_name', sa.String(), nullable=True),
sa.Column('price', sa.Float(), nullable=True),
sa.Column('quantity', sa.Integer(), nullable=True),
sa.Column('tcgplayer_sku', sa.String(), nullable=True),
sa.Column('product_line', sa.String(), nullable=True),
sa.Column('product_name', sa.String(), nullable=True),
sa.Column('title', sa.String(), nullable=True),
sa.Column('number', sa.String(), nullable=True),
sa.Column('condition', sa.String(), nullable=True),
sa.Column('tcg_market_price', sa.Float(), nullable=True),
sa.Column('tcg_direct_low', sa.Float(), nullable=True),
sa.Column('tcg_low_price_with_shipping', sa.Float(), nullable=True),
sa.Column('tcg_low_price', sa.Float(), nullable=True),
sa.Column('total_quantity', sa.Integer(), nullable=True),
sa.Column('add_to_quantity', sa.Integer(), nullable=True),
sa.Column('tcg_marketplace_price', sa.Float(), nullable=True),
sa.Column('photo_url', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_cards_id'), 'cards', ['id'], unique=False)
op.create_index(op.f('ix_cards_name'), 'cards', ['name'], unique=False)
op.create_index(op.f('ix_cards_set_name'), 'cards', ['set_name'], unique=False)
op.create_index(op.f('ix_cards_tcgplayer_sku'), 'cards', ['tcgplayer_sku'], unique=True)
op.create_table('files',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('type', sa.String(), nullable=True),
sa.Column('path', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_files_id'), 'files', ['id'], unique=False)
op.create_table('games',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=True),
sa.Column('description', sa.String(), nullable=True),
sa.Column('image_url', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_games_id'), 'games', ['id'], unique=False)
op.create_table('inventory',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('tcgplayer_id', sa.String(), nullable=True),
sa.Column('product_line', sa.String(), nullable=True),
sa.Column('set_name', sa.String(), nullable=True),
sa.Column('product_name', sa.String(), nullable=True),
sa.Column('title', sa.String(), nullable=True),
sa.Column('number', sa.String(), nullable=True),
sa.Column('rarity', sa.String(), nullable=True),
sa.Column('condition', sa.String(), nullable=True),
sa.Column('tcg_market_price', sa.Float(), nullable=True),
sa.Column('tcg_direct_low', sa.Float(), nullable=True),
sa.Column('tcg_low_price_with_shipping', sa.Float(), nullable=True),
sa.Column('tcg_low_price', sa.Float(), nullable=True),
sa.Column('total_quantity', sa.Integer(), nullable=True),
sa.Column('add_to_quantity', sa.Integer(), nullable=True),
sa.Column('tcg_marketplace_price', sa.Float(), nullable=True),
sa.Column('photo_url', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_inventory_id'), 'inventory', ['id'], unique=False)
op.create_index(op.f('ix_inventory_tcgplayer_id'), 'inventory', ['tcgplayer_id'], unique=True)
op.create_table('open_boxes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('box_id', sa.Integer(), nullable=True),
sa.Column('number_of_cards', sa.Integer(), nullable=True),
sa.Column('date_opened', sa.DateTime(timezone=True), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.ForeignKeyConstraint(['box_id'], ['boxes.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_open_boxes_id'), 'open_boxes', ['id'], unique=False)
op.create_table('tcgplayer_products',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('product_id', sa.Integer(), nullable=False),
sa.Column('name', sa.String(), nullable=False),
sa.Column('clean_name', sa.String(), nullable=True),
sa.Column('image_url', sa.String(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.Column('group_id', sa.Integer(), nullable=True),
sa.Column('url', sa.String(), nullable=True),
sa.Column('modified_on', sa.DateTime(), nullable=True),
sa.Column('image_count', sa.Integer(), nullable=True),
sa.Column('ext_rarity', sa.String(), nullable=True),
sa.Column('ext_number', sa.String(), nullable=True),
sa.Column('low_price', sa.Float(), nullable=True),
sa.Column('mid_price', sa.Float(), nullable=True),
sa.Column('high_price', sa.Float(), nullable=True),
sa.Column('market_price', sa.Float(), nullable=True),
sa.Column('direct_low_price', sa.Float(), nullable=True),
sa.Column('sub_type_name', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('(CURRENT_TIMESTAMP)'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('product_id')
)
op.create_index(op.f('ix_tcgplayer_products_id'), 'tcgplayer_products', ['id'], unique=False)
op.create_index(op.f('ix_tcgplayer_products_product_id'), 'tcgplayer_products', ['product_id'], unique=True)
# ### end Alembic commands ###
def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_tcgplayer_products_product_id'), table_name='tcgplayer_products')
op.drop_index(op.f('ix_tcgplayer_products_id'), table_name='tcgplayer_products')
op.drop_index(op.f('ix_open_boxes_id'), table_name='open_boxes')
op.drop_table('open_boxes')
op.drop_index(op.f('ix_inventory_tcgplayer_id'), table_name='inventory')
op.drop_index(op.f('ix_inventory_id'), table_name='inventory')
op.drop_table('inventory')
op.drop_index(op.f('ix_games_id'), table_name='games')
op.drop_table('games')
op.drop_index(op.f('ix_files_id'), table_name='files')
op.drop_table('files')
op.drop_index(op.f('ix_cards_tcgplayer_sku'), table_name='cards')
op.drop_index(op.f('ix_cards_set_name'), table_name='cards')
op.drop_index(op.f('ix_cards_name'), table_name='cards')
op.drop_index(op.f('ix_cards_id'), table_name='cards')
op.drop_table('cards')
op.drop_index(op.f('ix_boxes_id'), table_name='boxes')
op.drop_table('boxes')
# ### end Alembic commands ###

View File

@ -1,26 +0,0 @@
"""merge all heads
Revision ID: 5bf5f87793d7
Revises: 2025_04_14_fix_alembic_version, 2025_04_14_fix_mtgjson_final
Create Date: 2025-04-13 00:12:47.613416
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '5bf5f87793d7'
down_revision: Union[str, None] = ('2025_04_14_fix_alembic_version', '2025_04_14_fix_mtgjson_final')
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@ -1,126 +0,0 @@
"""create_tcgplayer_order_tables
Revision ID: 6f2b3f870fdf
Revises: fix_foreign_key_issue
Create Date: 2025-04-16 20:19:01.698636
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects.postgresql import JSON
# revision identifiers, used by Alembic.
revision: str = '6f2b3f870fdf'
down_revision: Union[str, None] = 'fix_foreign_key_issue'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create tcgplayer_orders table
op.create_table(
'tcgplayer_orders',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_number', sa.String(), nullable=True),
sa.Column('order_created_at', sa.DateTime(), nullable=True),
sa.Column('status', sa.String(), nullable=True),
sa.Column('channel', sa.String(), nullable=True),
sa.Column('fulfillment', sa.String(), nullable=True),
sa.Column('seller_name', sa.String(), nullable=True),
sa.Column('buyer_name', sa.String(), nullable=True),
sa.Column('payment_type', sa.String(), nullable=True),
sa.Column('pickup_status', sa.String(), nullable=True),
sa.Column('shipping_type', sa.String(), nullable=True),
sa.Column('estimated_delivery_date', sa.DateTime(), nullable=True),
sa.Column('recipient_name', sa.String(), nullable=True),
sa.Column('address_line_1', sa.String(), nullable=True),
sa.Column('address_line_2', sa.String(), nullable=True),
sa.Column('city', sa.String(), nullable=True),
sa.Column('state', sa.String(), nullable=True),
sa.Column('zip_code', sa.String(), nullable=True),
sa.Column('country', sa.String(), nullable=True),
sa.Column('tracking_numbers', JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tcgplayer_orders_id'), 'tcgplayer_orders', ['id'], unique=False)
op.create_index(op.f('ix_tcgplayer_orders_order_number'), 'tcgplayer_orders', ['order_number'], unique=False)
# Create tcgplayer_order_transactions table
op.create_table(
'tcgplayer_order_transactions',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_number', sa.String(), nullable=True),
sa.Column('product_amount', sa.Float(), nullable=True),
sa.Column('shipping_amount', sa.Float(), nullable=True),
sa.Column('gross_amount', sa.Float(), nullable=True),
sa.Column('fee_amount', sa.Float(), nullable=True),
sa.Column('net_amount', sa.Float(), nullable=True),
sa.Column('direct_fee_amount', sa.Float(), nullable=True),
sa.Column('taxes', JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tcgplayer_order_transactions_id'), 'tcgplayer_order_transactions', ['id'], unique=False)
op.create_index(op.f('ix_tcgplayer_order_transactions_order_number'), 'tcgplayer_order_transactions', ['order_number'], unique=False)
# Create tcgplayer_order_products table
op.create_table(
'tcgplayer_order_products',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_number', sa.String(), nullable=True),
sa.Column('product_name', sa.String(), nullable=True),
sa.Column('unit_price', sa.Float(), nullable=True),
sa.Column('extended_price', sa.Float(), nullable=True),
sa.Column('quantity', sa.Integer(), nullable=True),
sa.Column('url', sa.String(), nullable=True),
sa.Column('product_id', sa.String(), nullable=True),
sa.Column('sku_id', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tcgplayer_order_products_id'), 'tcgplayer_order_products', ['id'], unique=False)
op.create_index(op.f('ix_tcgplayer_order_products_order_number'), 'tcgplayer_order_products', ['order_number'], unique=False)
# Create tcgplayer_order_refunds table
op.create_table(
'tcgplayer_order_refunds',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('order_number', sa.String(), nullable=True),
sa.Column('refund_created_at', sa.DateTime(), nullable=True),
sa.Column('type', sa.String(), nullable=True),
sa.Column('amount', sa.Float(), nullable=True),
sa.Column('description', sa.String(), nullable=True),
sa.Column('origin', sa.String(), nullable=True),
sa.Column('shipping_amount', sa.Float(), nullable=True),
sa.Column('products', JSON(), nullable=True),
sa.Column('created_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.Column('updated_at', sa.DateTime(), server_default=sa.text('now()'), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_tcgplayer_order_refunds_id'), 'tcgplayer_order_refunds', ['id'], unique=False)
op.create_index(op.f('ix_tcgplayer_order_refunds_order_number'), 'tcgplayer_order_refunds', ['order_number'], unique=False)
def downgrade() -> None:
op.drop_index(op.f('ix_tcgplayer_order_refunds_order_number'), table_name='tcgplayer_order_refunds')
op.drop_index(op.f('ix_tcgplayer_order_refunds_id'), table_name='tcgplayer_order_refunds')
op.drop_table('tcgplayer_order_refunds')
op.drop_index(op.f('ix_tcgplayer_order_products_order_number'), table_name='tcgplayer_order_products')
op.drop_index(op.f('ix_tcgplayer_order_products_id'), table_name='tcgplayer_order_products')
op.drop_table('tcgplayer_order_products')
op.drop_index(op.f('ix_tcgplayer_order_transactions_order_number'), table_name='tcgplayer_order_transactions')
op.drop_index(op.f('ix_tcgplayer_order_transactions_id'), table_name='tcgplayer_order_transactions')
op.drop_table('tcgplayer_order_transactions')
op.drop_index(op.f('ix_tcgplayer_orders_order_number'), table_name='tcgplayer_orders')
op.drop_index(op.f('ix_tcgplayer_orders_id'), table_name='tcgplayer_orders')
op.drop_table('tcgplayer_orders')

View File

@ -1,26 +0,0 @@
"""merge heads
Revision ID: 8764850e4e35
Revises: 2025_04_13_create_mtgjson_tables, 2025_04_09_create_tcgplayer_categories_table
Create Date: 2025-04-12 23:16:47.846723
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = '8764850e4e35'
down_revision: Union[str, None] = ('2025_04_13_create_mtgjson_tables', '2025_04_09_create_tcgplayer_categories_table')
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@ -1,26 +0,0 @@
"""merge heads
Revision ID: d1628d8feb57
Revises: 2025_04_14_fix_mtgjson_foreign_key, 2025_04_14_remove_mtgjson_foreign_key
Create Date: 2025-04-13 00:11:03.312552
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'd1628d8feb57'
down_revision: Union[str, None] = ('2025_04_14_fix_mtgjson_foreign_key', '2025_04_14_remove_mtgjson_foreign_key')
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
pass
def downgrade() -> None:
pass

View File

@ -1,101 +0,0 @@
"""remove product_id unique constraint
Revision ID: remove_product_id_unique_constraint
Revises: 4dbeb89dd33a
Create Date: 2025-04-09 23:10:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = 'remove_product_id_unique_constraint'
down_revision: str = '4dbeb89dd33a'
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Create a new table without the unique constraint
op.create_table('tcgplayer_products_new',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('product_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(), nullable=False),
sa.Column('clean_name', sa.String(), nullable=True),
sa.Column('image_url', sa.String(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.Column('group_id', sa.Integer(), nullable=True),
sa.Column('url', sa.String(), nullable=True),
sa.Column('modified_on', sa.DateTime(), nullable=True),
sa.Column('image_count', sa.Integer(), nullable=True),
sa.Column('ext_rarity', sa.String(), nullable=True),
sa.Column('ext_number', sa.String(), nullable=True),
sa.Column('low_price', sa.Float(), nullable=True),
sa.Column('mid_price', sa.Float(), nullable=True),
sa.Column('high_price', sa.Float(), nullable=True),
sa.Column('market_price', sa.Float(), nullable=True),
sa.Column('direct_low_price', sa.Float(), nullable=True),
sa.Column('sub_type_name', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.ForeignKeyConstraint(['group_id'], ['tcgplayer_groups.group_id'])
)
# Copy data from old table to new table
op.execute('INSERT INTO tcgplayer_products_new SELECT * FROM tcgplayer_products')
# Drop old table
op.drop_table('tcgplayer_products')
# Rename new table to old table name
op.rename_table('tcgplayer_products_new', 'tcgplayer_products')
# Create indexes
op.create_index('ix_tcgplayer_products_id', 'tcgplayer_products', ['id'], unique=False)
op.create_index('ix_tcgplayer_products_product_id', 'tcgplayer_products', ['product_id'], unique=False)
def downgrade() -> None:
# Create a new table with the unique constraint
op.create_table('tcgplayer_products_new',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('product_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(), nullable=False),
sa.Column('clean_name', sa.String(), nullable=True),
sa.Column('image_url', sa.String(), nullable=True),
sa.Column('category_id', sa.Integer(), nullable=True),
sa.Column('group_id', sa.Integer(), nullable=True),
sa.Column('url', sa.String(), nullable=True),
sa.Column('modified_on', sa.DateTime(), nullable=True),
sa.Column('image_count', sa.Integer(), nullable=True),
sa.Column('ext_rarity', sa.String(), nullable=True),
sa.Column('ext_number', sa.String(), nullable=True),
sa.Column('low_price', sa.Float(), nullable=True),
sa.Column('mid_price', sa.Float(), nullable=True),
sa.Column('high_price', sa.Float(), nullable=True),
sa.Column('market_price', sa.Float(), nullable=True),
sa.Column('direct_low_price', sa.Float(), nullable=True),
sa.Column('sub_type_name', sa.String(), nullable=True),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('product_id'),
sa.ForeignKeyConstraint(['group_id'], ['tcgplayer_groups.group_id'])
)
# Copy data from old table to new table
op.execute('INSERT INTO tcgplayer_products_new SELECT * FROM tcgplayer_products')
# Drop old table
op.drop_table('tcgplayer_products')
# Rename new table to old table name
op.rename_table('tcgplayer_products_new', 'tcgplayer_products')
# Create indexes
op.create_index('ix_tcgplayer_products_id', 'tcgplayer_products', ['id'], unique=False)
op.create_index('ix_tcgplayer_products_product_id', 'tcgplayer_products', ['product_id'], unique=True)

66
app.log
View File

@ -1,33 +1,33 @@
2025-04-16 23:58:58,575 - INFO - app.main - Application starting up...
2025-04-16 23:58:58,622 - INFO - app.main - Database initialized successfully
2025-04-16 23:58:58,622 - INFO - app.services.service_manager - Service OrderManagementService registered
2025-04-16 23:58:58,622 - INFO - app.services.service_manager - Service TCGPlayerInventoryService registered
2025-04-16 23:58:58,622 - INFO - app.services.service_manager - Service LabelPrinterService registered
2025-04-16 23:58:58,622 - INFO - app.services.service_manager - Service RegularPrinterService registered
2025-04-16 23:58:58,625 - INFO - app.services.service_manager - Service AddressLabelService registered
2025-04-16 23:58:58,853 - INFO - app.services.service_manager - Service PullSheetService registered
2025-04-16 23:58:58,854 - INFO - app.services.service_manager - Service SetLabelService registered
2025-04-16 23:58:58,897 - INFO - app.services.service_manager - Service DataInitializationService registered
2025-04-16 23:58:58,914 - INFO - app.services.service_manager - Service SchedulerService registered
2025-04-16 23:58:58,914 - INFO - app.services.service_manager - All services initialized successfully
2025-04-16 23:58:58,914 - INFO - apscheduler.scheduler - Adding job tentatively -- it will be properly scheduled when the scheduler starts
2025-04-16 23:58:58,914 - INFO - app.services.scheduler.base_scheduler - Scheduled task process_tcgplayer_export to run every 86400 seconds
2025-04-16 23:58:58,914 - INFO - apscheduler.scheduler - Adding job tentatively -- it will be properly scheduled when the scheduler starts
2025-04-16 23:58:58,915 - INFO - app.services.scheduler.base_scheduler - Scheduled task update_open_orders_hourly to run every 3600 seconds
2025-04-16 23:58:58,915 - INFO - apscheduler.scheduler - Adding job tentatively -- it will be properly scheduled when the scheduler starts
2025-04-16 23:58:58,915 - INFO - app.services.scheduler.base_scheduler - Scheduled task update_all_orders_daily to run every 86400 seconds
2025-04-16 23:58:58,915 - INFO - apscheduler.scheduler - Added job "SchedulerService.process_tcgplayer_export" to job store "default"
2025-04-16 23:58:58,915 - INFO - apscheduler.scheduler - Added job "SchedulerService.update_open_orders_hourly" to job store "default"
2025-04-16 23:58:58,915 - INFO - apscheduler.scheduler - Added job "SchedulerService.update_all_orders_daily" to job store "default"
2025-04-16 23:58:58,915 - INFO - apscheduler.scheduler - Scheduler started
2025-04-16 23:58:58,915 - INFO - app.services.scheduler.base_scheduler - Scheduler started
2025-04-16 23:58:58,915 - INFO - app.services.scheduler.scheduler_service - All scheduled tasks started
2025-04-16 23:58:58,915 - INFO - app.main - Scheduler started successfully
2025-04-16 23:59:00,078 - INFO - app.services.external_api.tcgplayer.order_management_service - Getting orders from 0 to 25
2025-04-16 23:59:00,385 - INFO - app.services.external_api.base_external_service - Making request to https://order-management-api.tcgplayer.com/orders/search?api-version=2.0
2025-04-16 23:59:00,386 - INFO - app.services.external_api.tcgplayer.order_management_service - Got 25 orders
2025-04-16 23:59:00,386 - INFO - app.services.external_api.tcgplayer.order_management_service - Getting orders from 25 to 50
2025-04-16 23:59:00,494 - INFO - app.services.external_api.base_external_service - Making request to https://order-management-api.tcgplayer.com/orders/search?api-version=2.0
2025-04-16 23:59:00,494 - INFO - app.services.external_api.tcgplayer.order_management_service - Got 0 orders
2025-04-16 23:59:00,969 - INFO - app.services.external_api.base_external_service - Making request to https://order-management-api.tcgplayer.com/orders/pull-sheets/export?api-version=2.0
2025-04-16 23:59:01,208 - INFO - app.services.regular_printer_service - Print job 75 submitted to printer MFCL2750DW-3
2025-04-17 13:27:20,268 - INFO - app.main - Application starting up...
2025-04-17 13:27:20,319 - INFO - app.main - Database initialized successfully
2025-04-17 13:27:20,366 - INFO - app.services.service_manager - Service OrderManagementService registered
2025-04-17 13:27:20,366 - INFO - app.services.service_manager - Service TCGPlayerInventoryService registered
2025-04-17 13:27:20,369 - INFO - app.services.service_manager - Service LabelPrinterService registered
2025-04-17 13:27:20,372 - INFO - app.services.service_manager - Service RegularPrinterService registered
2025-04-17 13:27:20,387 - INFO - app.services.service_manager - Service AddressLabelService registered
2025-04-17 13:27:20,592 - INFO - app.services.service_manager - Service PullSheetService registered
2025-04-17 13:27:20,592 - INFO - app.services.service_manager - Service SetLabelService registered
2025-04-17 13:27:20,634 - INFO - app.services.service_manager - Service DataInitializationService registered
2025-04-17 13:27:20,652 - INFO - app.services.service_manager - Service SchedulerService registered
2025-04-17 13:27:20,652 - INFO - app.services.service_manager - Service FileService registered
2025-04-17 13:27:20,653 - INFO - app.services.service_manager - All services initialized successfully
2025-04-17 13:27:20,653 - INFO - apscheduler.scheduler - Adding job tentatively -- it will be properly scheduled when the scheduler starts
2025-04-17 13:27:20,653 - INFO - app.services.scheduler.base_scheduler - Scheduled task update_open_orders_hourly to run every 3600 seconds
2025-04-17 13:27:20,653 - INFO - apscheduler.scheduler - Adding job tentatively -- it will be properly scheduled when the scheduler starts
2025-04-17 13:27:20,653 - INFO - app.services.scheduler.base_scheduler - Scheduled task update_all_orders_daily to run every 86400 seconds
2025-04-17 13:27:20,653 - INFO - apscheduler.scheduler - Added job "SchedulerService.update_open_orders_hourly" to job store "default"
2025-04-17 13:27:20,653 - INFO - apscheduler.scheduler - Added job "SchedulerService.update_all_orders_daily" to job store "default"
2025-04-17 13:27:20,653 - INFO - apscheduler.scheduler - Scheduler started
2025-04-17 13:27:20,653 - INFO - app.services.scheduler.base_scheduler - Scheduler started
2025-04-17 13:27:20,653 - INFO - app.services.scheduler.scheduler_service - All scheduled tasks started
2025-04-17 13:27:20,653 - INFO - app.main - Scheduler started successfully
2025-04-17 13:27:24,285 - INFO - app.services.regular_printer_service - Print job 85 submitted to printer MFCL2750DW-3
2025-04-17 13:28:05,282 - INFO - app.services.external_api.base_external_service - Making request to https://order-management-api.tcgplayer.com/orders/packing-slips/export?api-version=2.0
2025-04-17 13:28:05,417 - INFO - app.services.label_printer_service - Converting PDF app/data/cache/tcgplayer/packing_slips/pdf/packing_slip_2025-04-17_13-28-05.pdf to images
2025-04-17 13:28:05,489 - INFO - app.services.label_printer_service - Successfully converted PDF to 2 images
2025-04-17 13:28:05,489 - INFO - app.services.label_printer_service - Processing page 1 with dimensions (1700, 2200)
2025-04-17 13:28:09,731 - INFO - app.services.label_printer_service - Processing page 2 with dimensions (1700, 2200)
2025-04-17 13:28:15,097 - INFO - app.services.label_printer_service - Converting PDF app/data/cache/tcgplayer/packing_slips/pdf/packing_slip_2025-04-17_13-28-05.pdf to images
2025-04-17 13:28:15,167 - INFO - app.services.label_printer_service - Successfully converted PDF to 2 images
2025-04-17 13:28:15,167 - INFO - app.services.label_printer_service - Processing page 1 with dimensions (1700, 2200)
2025-04-17 13:28:19,411 - INFO - app.services.label_printer_service - Processing page 2 with dimensions (1700, 2200)

View File

@ -1,5 +1,7 @@
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
from fastapi.staticfiles import StaticFiles
from fastapi.responses import FileResponse
from contextlib import asynccontextmanager
import uvicorn
import logging
@ -64,6 +66,19 @@ app = FastAPI(
lifespan=lifespan
)
# Mount static files directory
app.mount("/static", StaticFiles(directory="app/static"), name="static")
# Serve index.html at root
@app.get("/")
async def read_root():
return FileResponse('app/static/index.html')
# Serve app.js
@app.get("/app.js")
async def read_app_js():
return FileResponse('app/static/app.js')
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],

26
app/models/__init__.py Normal file
View File

@ -0,0 +1,26 @@
from app.models.box import Box
from app.models.card import Card
from app.models.file import File
from app.models.game import Game
from app.models.inventory import Inventory
from app.models.mtgjson_card import MTGJSONCard
from app.models.mtgjson_sku import MTGJSONSKU
from app.models.tcgplayer_category import TCGPlayerCategory
from app.models.tcgplayer_group import TCGPlayerGroup
from app.models.tcgplayer_order import TCGPlayerOrder
from app.models.tcgplayer_product import TCGPlayerProduct
# This makes all models available for Alembic to discover
__all__ = [
'Box',
'Card',
'File',
'Game',
'Inventory',
'MTGJSONCard',
'MTGJSONSKU',
'TCGPlayerCategory',
'TCGPlayerGroup',
'TCGPlayerOrder',
'TCGPlayerProduct'
]

View File

@ -1,7 +1,7 @@
from pydantic import BaseModel, ConfigDict
from typing import List, Optional
from datetime import datetime
from sqlalchemy import Column, Integer, String, DateTime
from sqlalchemy import Column, Integer, String, DateTime, JSON
from sqlalchemy.orm import relationship
from sqlalchemy.sql import func
from app.db.database import Base
@ -12,7 +12,10 @@ class File(Base):
id = Column(Integer, primary_key=True, index=True)
name = Column(String)
type = Column(String)
file_type = Column(String)
content_type = Column(String)
path = Column(String)
size = Column(Integer) # File size in bytes
file_metadata = Column(JSON)
created_at = Column(DateTime(timezone=True), server_default=func.now())
updated_at = Column(DateTime(timezone=True), onupdate=func.now())

View File

@ -1,15 +1,12 @@
from fastapi import APIRouter, HTTPException, Depends, Query
from app.services.external_api.tcgplayer.order_management_service import OrderManagementService
from app.services.label_printer_service import LabelPrinterService
from app.services.regular_printer_service import RegularPrinterService
from app.services.address_label_service import AddressLabelService
from typing import List, Optional, Literal
from typing import List
from datetime import datetime
from pydantic import BaseModel, Field
from enum import Enum
from app.schemas.tcgplayer import TCGPlayerAPIOrderSummary, TCGPlayerAPIOrder
from app.schemas.generate import GenerateRequest, GenerateAddressLabelsRequest, GeneratePackingSlipsRequest, GeneratePullSheetsRequest, GenerateResponse
from app.services.service_manager import ServiceManager
from sqlalchemy.orm import Session
from app.db.database import get_db
class SearchRange(str, Enum):
@ -73,142 +70,86 @@ async def get_order(order_id: str) -> TCGPlayerAPIOrder:
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to fetch order: {str(e)}")
@router.post("/generate-pull-sheets")
@router.post("/generate-pull-sheets", response_model=GenerateResponse)
async def generate_pull_sheets(
order_ids: Optional[List[str]] = None,
all_open_orders: bool = False
) -> dict:
request: GeneratePullSheetsRequest,
db: Session = Depends(get_db)
) -> GenerateResponse:
"""
Generate and print pull sheets for the specified orders.
Args:
order_ids: List of TCGPlayer order numbers (optional if all_open_orders is True)
all_open_orders: If True, generate pull sheets for all orders (ignores order_ids)
request: Dictionary containing:
- order_ids: List of TCGPlayer order numbers
Returns:
Success status of the operation
"""
try:
order_management = service_manager.get_service('order_management')
if not all_open_orders and not order_ids:
raise HTTPException(
status_code=400,
detail="Either order_ids must be provided or all_open_orders must be True"
)
if all_open_orders:
order_ids = await order_management.get_order_ids(search_range="LastWeek", open_only=True)
pull_sheet = await order_management.get_pull_sheet(order_ids)
pull_sheet_file = await order_management.save_file(
pull_sheet,
f"pull_sheet_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.csv"
)
pull_sheet_service = service_manager.get_service('pull_sheet')
pull_sheet_pdf = await pull_sheet_service.generate_pull_sheet_pdf(pull_sheet_file)
order_ids = request.order_ids
pull_sheet = await pull_sheet_service.get_or_create_rendered_pull_sheet(db, order_ids)
regular_printer = service_manager.get_service('regular_printer')
success = await regular_printer.print_file(pull_sheet_pdf)
return {"success": success}
success = await regular_printer.print_file(pull_sheet.path)
# TODO: change to accept file instead of path
return {"success": success, "message": "Pull sheets generated and printed successfully"}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to generate pull sheet: {str(e)}")
@router.post("/generate-packing-slips")
@router.post("/generate-packing-slips", response_model=GenerateResponse)
async def generate_packing_slips(
order_ids: Optional[List[str]] = None,
all_open_orders: bool = False
) -> dict:
request: GeneratePackingSlipsRequest,
db: Session = Depends(get_db)
) -> GenerateResponse:
"""
Generate and print packing slips for the specified orders.
Args:
order_ids: List of TCGPlayer order numbers (optional if all_open_orders is True)
all_open_orders: If True, generate packing slips for all orders (ignores order_ids)
Returns:
Success status of the operation
request: Dictionary containing:
- order_ids: List of TCGPlayer order numbers
"""
try:
if not all_open_orders and not order_ids:
raise HTTPException(
status_code=400,
detail="Either order_ids must be provided or all_open_orders must be True"
)
# TODO: Add logic to fetch all orders when all_open_orders is True
if all_open_orders:
order_management = service_manager.get_service('order_management')
order_ids = await order_management.get_order_ids(search_range="LastWeek", open_only=True)
packing_slip = await order_management.get_packing_slip(order_ids)
packing_slip_file = await order_management.save_file(
packing_slip,
f"packing_slip_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.pdf"
)
label_printer = service_manager.get_service('label_printer')
success = await label_printer.print_file(
packing_slip_file,
label_size="dk1241",
label_type="packing_slip"
)
return {"success": success}
order_management = service_manager.get_service('order_management')
# get or create packing slips
packing_slips = await order_management.get_or_create_packing_slip(db, request.order_ids)
if packing_slips:
# print packing slips
label_printer = service_manager.get_service('label_printer')
success = await label_printer.print_file(packing_slips.path, label_size="dk1241", label_type="packing_slip")
return {"success": success, "message": "Packing slips generated and printed successfully"}
return {"success": False, "message": "Packing slips not found"}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to generate packing slip: {str(e)}")
raise HTTPException(status_code=500, detail=f"Failed to generate packing slips: {str(e)}")
@router.post("/generate-address-labels")
async def generate_address_labels(
order_ids: Optional[List[str]] = None,
all_open_orders: bool = False,
label_type: LabelType = LabelType.DK1201
) -> dict:
request: GenerateAddressLabelsRequest,
db: Session = Depends(get_db)
) -> GenerateResponse:
"""
Generate and print address labels for the specified orders.
Args:
order_ids: List of TCGPlayer order numbers (optional if all_open_orders is True)
all_open_orders: If True, generate address labels for all orders (ignores order_ids)
label_type: Type of label to generate (dk1201 or dk1241)
request: Dictionary containing:
- order_ids: List of TCGPlayer order numbers
- label_type: Type of label to generate (dk1201 or dk1241)
Returns:
Success status of the operation
"""
try:
order_management = service_manager.get_service('order_management')
if not all_open_orders and not order_ids:
raise HTTPException(
status_code=400,
detail="Either order_ids must be provided or all_open_orders must be True"
)
if all_open_orders:
order_ids = await order_management.get_order_ids(search_range="LastWeek", open_only=True)
shipping_csv = await order_management.get_shipping_csv(order_ids)
shipping_csv_file = await order_management.save_file(
shipping_csv,
f"shipping_csv_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.csv"
)
# Generate PDFs
address_label = service_manager.get_service('address_label')
pdf_files = await address_label.generate_labels_from_csv(
shipping_csv_file,
label_type=label_type
)
# Print each PDF
label_printer = service_manager.get_service('label_printer')
for pdf_file in pdf_files:
success = await label_printer.print_file(
pdf_file,
label_size=label_type,
label_type="address_label"
)
if not success:
raise HTTPException(
status_code=500,
detail=f"Failed to print address label for file {pdf_file}"
)
return {"success": True, "message": "Address labels generated and printed successfully"}
address_label_service = service_manager.get_service('address_label')
address_labels = await address_label_service.get_or_create_address_labels(db, request.order_ids, request.label_type)
if address_labels:
# print address labels
label_printer = service_manager.get_service('label_printer')
for address_label in address_labels:
success = await label_printer.print_file(address_label.path, label_size=request.label_type, label_type="address_label")
if not success:
return {"success": False, "message": "Address labels not found"}
return {"success": True, "message": "Address labels generated and printed successfully"}
return {"success": False, "message": "Address labels not found"}
except Exception as e:
raise HTTPException(status_code=500, detail=f"Failed to generate address labels: {str(e)}")
raise HTTPException(status_code=500, detail=f"Failed to generate address labels: {str(e)}")

View File

@ -106,26 +106,3 @@ async def update_game(game_id: int, game: GameUpdate):
async def delete_game(game_id: int):
return {"message": "Game deleted successfully"}
@router.post("/tcgplayer/process-export")
async def process_tcgplayer_export(export_type: str, db: Session = Depends(get_db)):
"""
Download and process a TCGPlayer export file.
Args:
export_type: Type of export to process (staged, live, or pricing)
db: Database session
"""
try:
# Download the file
file_bytes = await tcgplayer_inventory_service.get_tcgplayer_export(export_type)
# Process the file and load into database
stats = await file_processing_service.process_tcgplayer_export(db, file_bytes)
return {
"message": "Export processed successfully",
"stats": stats
}
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))

View File

@ -1,6 +1,5 @@
from fastapi import APIRouter, HTTPException, Depends
from app.services.set_label_service import SetLabelService
from app.services.label_printer_service import LabelPrinterService
from app.services.service_manager import ServiceManager
from typing import List, Optional
import asyncio
from app.db.database import get_db
@ -10,23 +9,28 @@ from pydantic import BaseModel
class SetLabelRequest(BaseModel):
sets: List[str]
# Initialize service manager
service_manager = ServiceManager()
router = APIRouter(prefix="/set-labels")
set_label_service = SetLabelService()
label_printer_service = LabelPrinterService(printer_api_url="http://192.168.1.110:8000")
@router.post("/generate")
async def generate_set_labels(request: SetLabelRequest):
async def generate_set_labels(request: SetLabelRequest, db: Session = Depends(get_db)):
"""
Generate PDF labels for the specified MTG sets.
Args:
request: Request body containing list of set codes to generate labels for
db: Database session
Returns:
Message indicating success or failure
"""
try:
set_pdfs = await set_label_service.generate_labels(request.sets)
set_label_service = service_manager.get_service('set_label')
label_printer_service = service_manager.get_service('label_printer')
set_pdfs = await set_label_service.generate_labels(db, request.sets)
for set_pdf in set_pdfs:
success = await label_printer_service.print_file(set_pdf, label_size="dk1201", label_type="set_label")
if not success:
@ -46,6 +50,7 @@ async def get_available_sets(db: Session = Depends(get_db)):
List of set codes and their names
"""
try:
set_label_service = service_manager.get_service('set_label')
sets = await set_label_service.get_available_sets(db)
return sets
except Exception as e:

View File

@ -9,7 +9,7 @@ class FileBase(BaseModel):
type: Optional[str] = None
size: Optional[int] = None
content_type: Optional[str] = None
metadata: Optional[dict] = None
file_metadata: Optional[dict] = None
# Schema for creating a new file
class FileCreate(FileBase):

29
app/schemas/generate.py Normal file
View File

@ -0,0 +1,29 @@
from pydantic import BaseModel
class GenerateRequest(BaseModel):
"""
Base class for all generate requests.
Attributes:
order_ids: List of TCGPlayer order numbers
"""
order_ids: list[str]
class GenerateAddressLabelsRequest(GenerateRequest):
"""
Request for generating address labels.
Attributes:
label_type: Type of label to generate (dk1201 or dk1241)
"""
label_type: str
class GeneratePackingSlipsRequest(GenerateRequest):
pass
class GeneratePullSheetsRequest(GenerateRequest):
pass
class GenerateResponse(BaseModel):
message: str
success: bool

View File

@ -2,10 +2,12 @@ from app.services.base_service import BaseService
from app.services.service_manager import ServiceManager
from app.services.file_processing_service import FileProcessingService
from app.services.inventory_service import InventoryService
from app.services.file_service import FileService
__all__ = [
'BaseService',
'ServiceManager',
'FileProcessingService',
'InventoryService'
'InventoryService',
'FileService'
]

View File

@ -7,11 +7,15 @@ from weasyprint import HTML
import logging
import asyncio
from concurrent.futures import ThreadPoolExecutor
from sqlalchemy.orm import Session
from app.schemas.file import FileInDB
from app.services.base_service import BaseService
logger = logging.getLogger(__name__)
class AddressLabelService:
class AddressLabelService(BaseService):
def __init__(self):
super().__init__(None) # BaseService doesn't need a model for this service
self.template_dir = Path("app/data/assets/templates")
self.env = Environment(loader=FileSystemLoader(str(self.template_dir)))
self.templates = {
@ -19,19 +23,39 @@ class AddressLabelService:
"dk1201": self.env.get_template("address_label_dk1201.html")
}
self.return_address_path = "file://" + os.path.abspath("app/data/assets/images/ccrcardsaddress.png")
self.output_dir = "app/data/cache/tcgplayer/address_labels/"
os.makedirs(self.output_dir, exist_ok=True)
self.executor = ThreadPoolExecutor()
async def generate_labels_from_csv(self, csv_path: str, label_type: Literal["dk1201", "dk1241"]) -> List[str]:
async def get_or_create_address_labels(self, db: Session, order_ids: list[str], label_type: Literal["dk1201", "dk1241"]) -> List[FileInDB]:
"""Get or create address labels for the specified orders.
Args:
db: Database session
order_ids: List of TCGPlayer order numbers
label_type: Type of label to generate ("dk1201" or "dk1241")
Returns:
List of FileInDB objects for generated PDF files
"""
# check if address labels exist for the order ids
file_service = self.get_service('file')
# honestly i just dont feel like caching the address labels bc its hard
shipping_csv = await file_service.get_file_by_metadata(db, "order_ids", order_ids, "shipping_csv", "text/csv")
if shipping_csv:
return await self.generate_labels_from_csv(db, shipping_csv.path, label_type)
else:
order_management = self.get_service('order_management')
shipping_csv = await order_management.get_shipping_csv(db, order_ids)
return await self.generate_labels_from_csv(db, shipping_csv.path, label_type)
async def generate_labels_from_csv(self, db: Session, csv_path: str, label_type: Literal["dk1201", "dk1241"]) -> List[FileInDB]:
"""Generate address labels from a CSV file and save them as PDFs.
Args:
db: Database session
csv_path: Path to the CSV file containing address data
label_type: Type of label to generate ("6x4" or "dk1201")
Returns:
List of paths to generated PDF files
List of FileInDB objects for generated PDF files
"""
generated_files = []
@ -46,9 +70,9 @@ class AddressLabelService:
continue
# Generate label for each row
pdf_path = await self._generate_single_label(row, label_type)
if pdf_path:
generated_files.append(str(pdf_path))
file_record = await self._generate_single_label(db, row, label_type)
if file_record:
generated_files.append(file_record)
return generated_files
@ -58,15 +82,16 @@ class AddressLabelService:
reader = csv.DictReader(csvfile)
return list(reader)
async def _generate_single_label(self, row: Dict[str, str], label_type: Literal["dk1201", "dk1241"]) -> Optional[str]:
async def _generate_single_label(self, db: Session, row: Dict[str, str], label_type: Literal["dk1201", "dk1241"]) -> Optional[FileInDB]:
"""Generate a single address label PDF.
Args:
db: Database session
row: Dictionary containing address data
label_type: Type of label to generate ("6x4" or "dk1201")
Returns:
Path to the generated PDF file or None if generation failed
FileInDB object for the generated PDF file or None if generation failed
"""
try:
# Prepare template data
@ -88,12 +113,30 @@ class AddressLabelService:
# Generate PDF in a thread pool
loop = asyncio.get_event_loop()
pdf_path = self.output_dir + f"{row['Order #']}_{label_type}.pdf"
await loop.run_in_executor(
pdf_content = await loop.run_in_executor(
self.executor,
lambda: HTML(string=html_content).write_pdf(str(pdf_path))
lambda: HTML(string=html_content).write_pdf()
)
return pdf_path
# Prepare metadata
metadata = {
"order_number": row.get('Order #'),
"label_type": label_type
}
# Save using FileService
filename = f"{row['Order #']}_{label_type}.pdf"
file_record = await self.file_service.save_file(
db=db,
file_data=pdf_content,
filename=filename,
subdir="address_labels",
file_type="address_label",
content_type="application/pdf",
metadata=metadata
)
return file_record
except Exception as e:
logger.error(f"Error generating label for order {row.get('Order #', 'unknown')}: {str(e)}")

View File

@ -1,12 +1,15 @@
from typing import Type, TypeVar, Generic, List, Optional, Any
from sqlalchemy.orm import Session
from app.db.database import Base
from app.services.service_manager import ServiceManager
T = TypeVar('T')
class BaseService(Generic[T]):
def __init__(self, model: Type[T]):
self.model = model
self.service_manager = ServiceManager()
self._services = {}
def get(self, db: Session, id: int) -> Optional[T]:
return db.query(self.model).filter(self.model.id == id).first()
@ -36,4 +39,15 @@ class BaseService(Generic[T]):
db.delete(obj)
db.commit()
return True
return False
return False
def get_service(self, name: str) -> Any:
"""Get a service by name with lazy loading"""
if name not in self._services:
self._services[name] = self.service_manager.get_service(name)
return self._services[name]
@property
def file_service(self):
"""Convenience property for file service"""
return self.get_service('file')

View File

@ -2,6 +2,11 @@ from typing import Any, Dict, Optional, Union
import aiohttp
import logging
import json
import csv
import io
from app.services.service_manager import ServiceManager
from app.schemas.file import FileInDB
from sqlalchemy.orm import Session
logger = logging.getLogger(__name__)
@ -10,6 +15,8 @@ class BaseExternalService:
self.base_url = base_url
self.api_key = api_key
self.session = None
self.service_manager = ServiceManager()
self._services = {}
async def _get_session(self) -> aiohttp.ClientSession:
if self.session is None or self.session.closed:
@ -73,4 +80,36 @@ class BaseExternalService:
if self.session and not self.session.closed:
await self.session.close()
self.session = None
logger.info(f"Closed session for {self.__class__.__name__}")
logger.info(f"Closed session for {self.__class__.__name__}")
def get_service(self, name: str) -> Any:
"""Get a service by name with lazy loading"""
if name not in self._services:
self._services[name] = self.service_manager.get_service(name)
return self._services[name]
@property
def file_service(self):
"""Convenience property for file service"""
return self.get_service('file')
async def save_file(self, db: Session, file_data: Union[bytes, list[dict]], file_name: str, subdir: str, file_type: Optional[str] = None) -> FileInDB:
"""Save a file using the FileService"""
if isinstance(file_data, list):
# Convert list of dictionaries to CSV bytes
output = io.StringIO()
writer = csv.DictWriter(output, fieldnames=file_data[0].keys())
writer.writeheader()
writer.writerows(file_data)
file_data = output.getvalue().encode('utf-8')
file_type = file_type or 'text/csv'
# Use FileService to save the file
file_service = self.get_service('file')
return await file_service.save_file(
db=db,
file_data=file_data,
filename=file_name,
subdir=subdir,
file_type=file_type
)

View File

@ -11,9 +11,12 @@ from datetime import datetime
from app.models.mtgjson_card import MTGJSONCard
from app.models.mtgjson_sku import MTGJSONSKU
from app.db.database import get_db, transaction
from app.services.external_api.base_external_service import BaseExternalService
from app.schemas.file import FileInDB
class MTGJSONService:
class MTGJSONService(BaseExternalService):
def __init__(self, cache_dir: str = "app/data/cache/mtgjson", batch_size: int = 1000):
super().__init__(base_url="https://mtgjson.com/api/v5/")
self.cache_dir = cache_dir
self.identifiers_dir = os.path.join(cache_dir, "identifiers")
self.skus_dir = os.path.join(cache_dir, "skus")
@ -38,27 +41,22 @@ class MTGJSONService:
"""Print progress message with flush"""
print(message, end=end, flush=True)
async def _download_file(self, url: str, output_path: str) -> None:
"""Download a file from the given URL to the specified path using streaming"""
async def _download_file(self, db: Session, url: str, filename: str, subdir: str) -> FileInDB:
"""Download a file from the given URL and save it using FileService"""
print(f"Downloading {url}...")
start_time = time.time()
total_size = 0
async with aiohttp.ClientSession() as session:
async with session.get(url) as response:
if response.status == 200:
total_size = int(response.headers.get('content-length', 0))
with open(output_path, 'wb') as f:
downloaded = 0
async for chunk in response.content.iter_chunked(8192):
f.write(chunk)
downloaded += len(chunk)
if total_size > 0:
percent = (downloaded / total_size) * 100
elapsed = time.time() - start_time
speed = downloaded / elapsed / 1024 / 1024 # MB/s
print(f"\rDownloading: {percent:.1f}% ({downloaded/1024/1024:.1f}MB/{total_size/1024/1024:.1f}MB) at {speed:.1f}MB/s", end="")
print("\nDownload complete!")
file_data = await response.read()
return await self.save_file(
db=db,
file_data=file_data,
file_name=filename,
subdir=f"mtgjson/{subdir}",
file_type=response.headers.get('content-type', 'application/octet-stream')
)
else:
raise Exception(f"Failed to download file from {url}. Status: {response.status}")
@ -153,14 +151,16 @@ class MTGJSONService:
self._print_progress("Starting MTGJSON identifiers processing...")
start_time = time.time()
zip_path = os.path.join(self.identifiers_dir, "AllIdentifiers.json.zip")
await self._download_file(
"https://mtgjson.com/api/v5/AllIdentifiers.json.zip",
zip_path
# Download the file using FileService
file_record = await self._download_file(
db=db,
url="https://mtgjson.com/api/v5/AllIdentifiers.json.zip",
filename="AllIdentifiers.json.zip",
subdir="identifiers"
)
self._print_progress("Unzipping file...")
json_path = await self._unzip_file(zip_path, self.identifiers_dir)
# Get the file path from the database record
zip_path = file_record.path
cards_processed = 0
current_batch = []
@ -169,7 +169,7 @@ class MTGJSONService:
self._print_progress("Processing cards...")
try:
for item in self._stream_json_file(json_path):
for item in self._stream_json_file(zip_path):
if item["type"] == "meta":
self._print_progress(f"Processing MTGJSON data version {item['data'].get('version')} from {item['data'].get('date')}")
continue
@ -239,14 +239,16 @@ class MTGJSONService:
self._print_progress("Starting MTGJSON SKUs processing...")
start_time = time.time()
zip_path = os.path.join(self.skus_dir, "TcgplayerSkus.json.zip")
await self._download_file(
"https://mtgjson.com/api/v5/TcgplayerSkus.json.zip",
zip_path
# Download the file using FileService
file_record = await self._download_file(
db=db,
url="https://mtgjson.com/api/v5/TcgplayerSkus.json.zip",
filename="TcgplayerSkus.json.zip",
subdir="skus"
)
self._print_progress("Unzipping file...")
json_path = await self._unzip_file(zip_path, self.skus_dir)
# Get the file path from the database record
zip_path = file_record.path
skus_processed = 0
current_batch = []
@ -255,7 +257,7 @@ class MTGJSONService:
self._print_progress("Processing SKUs...")
try:
for item in self._stream_json_file(json_path):
for item in self._stream_json_file(zip_path):
if item["type"] == "meta":
self._print_progress(f"Processing MTGJSON SKUs version {item['data'].get('version')} from {item['data'].get('date')}")
continue

View File

@ -10,6 +10,7 @@ from app.db.database import get_db, transaction
from sqlalchemy.orm import Session
import py7zr
import os
from app.schemas.file import FileInDB
class TCGCSVService(BaseExternalService):
def __init__(self):
@ -37,32 +38,28 @@ class TCGCSVService(BaseExternalService):
endpoint = "tcgplayer/categories"
return await self._make_request("GET", endpoint)
async def get_archived_prices_for_date(self, date_str: str):
async def get_archived_prices_for_date(self, db: Session, date_str: str) -> str:
"""Fetch archived prices from TCGCSV API"""
# Check if the date directory already exists
extract_path = f"app/data/cache/tcgcsv/prices/{date_str}"
if os.path.exists(extract_path):
print(f"Prices for date {date_str} already exist, skipping download")
return date_str
# Download the archive file
endpoint = f"archive/tcgplayer/prices-{date_str}.ppmd.7z"
response = await self._make_request("GET", endpoint, binary=True)
# Save the archive file
archive_path = f"app/data/cache/tcgcsv/prices/zip/prices-{date_str}.ppmd.7z"
os.makedirs(os.path.dirname(archive_path), exist_ok=True)
with open(archive_path, "wb") as f:
f.write(response)
# Save the archive file using FileService
file_record = await self.save_file(
db=db,
file_data=response,
file_name=f"prices-{date_str}.ppmd.7z",
subdir=f"tcgcsv/prices/zip",
file_type="application/x-7z-compressed"
)
# Extract the 7z file
with py7zr.SevenZipFile(archive_path, 'r') as archive:
with py7zr.SevenZipFile(file_record.path, 'r') as archive:
# Extract to a directory named after the date
extract_path = f"app/data/cache/tcgcsv/prices/{date_str}"
os.makedirs(extract_path, exist_ok=True)
archive.extractall(path=extract_path)
# The extracted files will be in a directory structure like:
# {date_str}/{game_id}/{group_id}/prices
return date_str
async def get_archived_prices_for_date_range(self, start_date: str, end_date: str):

View File

@ -1,4 +1,4 @@
from typing import Any, Dict, Optional, Union
from typing import Any, Dict, Optional, Union, Literal
import logging
from app.services.external_api.tcgplayer.base_tcgplayer_service import BaseTCGPlayerService
from app.schemas.tcgplayer import (
@ -21,7 +21,8 @@ from app.db.database import transaction
import os
import csv
import io
from app.schemas.file import FileInDB
from datetime import datetime
logger = logging.getLogger(__name__)
class OrderManagementService(BaseTCGPlayerService):
@ -87,7 +88,12 @@ class OrderManagementService(BaseTCGPlayerService):
response = await self._make_request("GET", f"/{order_id}{self.API_VERSION}")
return response
async def get_packing_slip(self, order_ids: list[str]):
async def get_or_create_packing_slip(self, db: Session, order_ids: list[str]) -> FileInDB:
# check if the file already exists
file_service = self.get_service('file')
file = await file_service.get_file_by_metadata(db, "order_ids", order_ids, "packing_slip", "application/pdf")
if file:
return file
payload = {
"sortingType": "byRelease",
"format": "default",
@ -95,40 +101,53 @@ class OrderManagementService(BaseTCGPlayerService):
"orderNumbers": order_ids
}
response = await self._make_request("POST", self.packing_slip_endpoint, data=payload, headers=self._get_headers("POST", "application/json"), auth_required=True, download_file=True)
return response
return await file_service.save_file(
db=db,
file_data=response,
filename=f"packing_slip_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.pdf",
subdir='tcgplayer/packing_slips/pdf',
file_type='packing_slip',
content_type='application/pdf',
metadata={"order_ids": order_ids}
)
async def get_pull_sheet(self, order_ids: list[str]):
async def get_pull_sheet(self, db: Session, order_ids: list[str]) -> FileInDB:
payload = {
"orderNumbers": order_ids,
"timezoneOffset": -4
}
response = await self._make_request("POST", self.pull_sheet_endpoint, data=payload, headers=self._get_headers("POST", "application/json"), auth_required=True, download_file=True)
return response
# get file service
file_service = self.get_service('file')
# save file
return await file_service.save_file(
db=db,
file_data=response,
filename=f"pull_sheet_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.csv",
subdir='tcgplayer/pull_sheets/csv',
file_type='pull_sheet',
content_type='text/csv',
metadata={"order_ids": order_ids}
)
async def get_shipping_csv(self, order_ids: list[str]):
async def get_shipping_csv(self, db: Session, order_ids: list[str]) -> FileInDB:
payload = {
"orderNumbers": order_ids,
"timezoneOffset": -4
}
response = await self._make_request("POST", self.shipping_endpoint, data=payload, headers=self._get_headers("POST", "application/json"), auth_required=True, download_file=True)
return response
async def save_file(self, file_data: Union[bytes, list[dict]], file_name: str) -> str:
if not os.path.exists("app/data/cache/tcgplayer/orders"):
os.makedirs("app/data/cache/tcgplayer/orders")
file_path = f"app/data/cache/tcgplayer/orders/{file_name}"
if isinstance(file_data, list):
# Convert list of dictionaries to CSV bytes
output = io.StringIO()
writer = csv.DictWriter(output, fieldnames=file_data[0].keys())
writer.writeheader()
writer.writerows(file_data)
file_data = output.getvalue().encode('utf-8')
with open(file_path, "wb") as f:
f.write(file_data)
return file_path
# get file service
file_service = self.get_service('file')
# save file
return await file_service.save_file(
db=db,
file_data=response,
filename=f"shipping_{datetime.now().strftime('%Y-%m-%d_%H-%M-%S')}.csv",
subdir='tcgplayer/shipping/csv',
file_type='shipping_csv',
content_type='text/csv',
metadata={"order_ids": order_ids}
)
async def save_order_to_db(self, order: dict, db: Session):
# Parse API response using our API schema

View File

@ -1,21 +1,33 @@
from typing import Dict, List, Optional
from app.services.external_api.tcgplayer.base_tcgplayer_service import BaseTCGPlayerService
from sqlalchemy.orm import Session
from app.schemas.file import FileInDB
class TCGPlayerInventoryService(BaseTCGPlayerService):
def __init__(self):
super().__init__()
async def get_tcgplayer_export(self, export_type: str):
async def get_tcgplayer_export(self, db: Session, export_type: str) -> FileInDB:
"""
Get a TCGPlayer Staged Inventory Export, Live Inventory Export, or Pricing Export
"""
if export_type == "staged":
endpoint = self.staged_inventory_endpoint
file_type = "text/csv"
elif export_type == "live":
endpoint = self.live_inventory_endpoint
file_type = "text/csv"
elif export_type == "pricing":
endpoint = self.pricing_export_endpoint
file_type = "text/csv"
else:
raise ValueError(f"Invalid export type: {export_type}, must be 'staged', 'live', or 'pricing'")
file_bytes = await self._make_request("GET", endpoint, download_file=True)
return file_bytes
return await self.save_file(
db=db,
file_data=file_bytes,
file_name=f"tcgplayer_{export_type}_export.csv",
subdir="tcgplayer/inventory",
file_type=file_type
)

View File

@ -18,129 +18,4 @@ class FileProcessingService:
def _get_cache_path(self, filename: str) -> str:
return os.path.join(self.cache_dir, filename)
async def _cache_export(self, file_bytes: bytes, export_type: str):
cache_path = self._get_cache_path(f"{export_type}_export.csv")
with open(cache_path, 'wb') as f:
f.write(file_bytes)
async def _load_cached_export(self, export_type: str) -> Optional[bytes]:
cache_path = self._get_cache_path(f"{export_type}_export.csv")
if os.path.exists(cache_path):
with open(cache_path, 'rb') as f:
return f.read()
return None
async def process_tcgplayer_export(self, db: Session, file_bytes: bytes, export_type: str = "live", use_cache: bool = False) -> dict:
"""
Process a TCGPlayer export file and load it into the inventory table.
Args:
db: Database session
file_bytes: The downloaded file content as bytes
export_type: Type of export (staged, live, pricing)
use_cache: Whether to use cached export file for development
Returns:
dict: Processing statistics
"""
stats = {
"total_rows": 0,
"processed_rows": 0,
"errors": 0,
"error_messages": []
}
try:
# For development, use cached file if available
if use_cache:
cached_bytes = await self._load_cached_export(export_type)
if cached_bytes:
file_bytes = cached_bytes
else:
await self._cache_export(file_bytes, export_type)
# Convert bytes to string and create a file-like object
file_content = file_bytes.decode('utf-8')
file_like = io.StringIO(file_content)
# Read CSV file
csv_reader = csv.DictReader(file_like)
with transaction(db):
for row in csv_reader:
stats["total_rows"] += 1
try:
# Process each row and create/update inventory item in database
inventory_data = self._map_tcgplayer_row_to_inventory(row)
tcgplayer_id = inventory_data["tcgplayer_id"]
# Check if inventory item already exists
existing_item = self.inventory_service.get_by_tcgplayer_id(db, tcgplayer_id)
# Find matching TCGPlayer product
product_id = int(tcgplayer_id) if tcgplayer_id.isdigit() else None
if product_id:
tcg_product = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.product_id == product_id).first()
if tcg_product:
# Update inventory data with product information if available
inventory_data.update({
"product_name": tcg_product.name,
"photo_url": tcg_product.image_url,
"rarity": tcg_product.ext_rarity,
"number": tcg_product.ext_number
})
if existing_item:
# Update existing item
self.inventory_service.update(db, existing_item, inventory_data)
else:
# Create new item
self.inventory_service.create(db, inventory_data)
stats["processed_rows"] += 1
except Exception as e:
stats["errors"] += 1
stats["error_messages"].append(f"Error processing row {stats['total_rows']}: {str(e)}")
return stats
except Exception as e:
raise Exception(f"Failed to process TCGPlayer export: {str(e)}")
def _map_tcgplayer_row_to_inventory(self, row: dict) -> dict:
"""
Map TCGPlayer export row to inventory model fields.
"""
def safe_float(value: str) -> float:
"""Convert string to float, returning 0.0 for empty strings or invalid values"""
try:
return float(value) if value else 0.0
except ValueError:
return 0.0
def safe_int(value: str) -> int:
"""Convert string to int, returning 0 for empty strings or invalid values"""
try:
return int(value) if value else 0
except ValueError:
return 0
return {
"tcgplayer_id": row.get("TCGplayer Id", ""),
"product_line": row.get("Product Line", ""),
"set_name": row.get("Set Name", ""),
"product_name": row.get("Product Name", ""),
"title": row.get("Title", ""),
"number": row.get("Number", ""),
"rarity": row.get("Rarity", ""),
"condition": row.get("Condition", ""),
"tcg_market_price": safe_float(row.get("TCG Market Price", "")),
"tcg_direct_low": safe_float(row.get("TCG Direct Low", "")),
"tcg_low_price_with_shipping": safe_float(row.get("TCG Low Price With Shipping", "")),
"tcg_low_price": safe_float(row.get("TCG Low Price", "")),
"total_quantity": safe_int(row.get("Total Quantity", "")),
"add_to_quantity": safe_int(row.get("Add to Quantity", "")),
"tcg_marketplace_price": safe_float(row.get("TCG Marketplace Price", "")),
"photo_url": row.get("Photo URL", "")
}

View File

@ -0,0 +1,152 @@
from typing import Optional, Union, List, Dict
import os
from pathlib import Path
from datetime import datetime
from sqlalchemy.orm import Session
from sqlalchemy import String
from app.models.file import File
from app.schemas.file import FileBase, FileCreate, FileInDB
from app.db.database import transaction
import logging
import asyncio
from weasyprint import HTML
import json
logger = logging.getLogger(__name__)
class FileService:
def __init__(self, base_cache_dir: str = "app/data/cache"):
self.base_cache_dir = Path(base_cache_dir)
self.base_cache_dir.mkdir(parents=True, exist_ok=True)
def _get_cache_path(self, subdir: str, filename: str) -> Path:
"""Get the full cache path for a file"""
cache_dir = self.base_cache_dir / subdir
cache_dir.mkdir(parents=True, exist_ok=True)
return cache_dir / filename
async def save_file(
self,
db: Session,
file_data: Union[bytes, str],
filename: str,
subdir: str,
file_type: Optional[str] = None,
content_type: Optional[str] = None,
metadata: Optional[Dict] = None,
html_content: Optional[bool] = False
) -> FileInDB:
"""Save a file to the cache directory and create a database record"""
try:
# Get the full cache path
cache_path = self._get_cache_path(subdir, filename)
if html_content and isinstance(file_data, str):
await asyncio.get_event_loop().run_in_executor(
None,
lambda: HTML(string=file_data).write_pdf(str(cache_path))
)
else:
# Write the file data
if isinstance(file_data, str):
mode = 'w'
encoding = 'utf-8'
else:
mode = 'wb'
encoding = None
with open(cache_path, mode, encoding=encoding) as f:
f.write(file_data)
# Create database record
file_record = File(
name=filename,
path=str(cache_path),
file_type=file_type,
content_type=content_type,
size=os.path.getsize(cache_path),
file_metadata=metadata
)
db.add(file_record)
db.commit()
db.refresh(file_record)
return FileInDB.model_validate(file_record)
except Exception as e:
logger.error(f"Error saving file {filename}: {str(e)}")
raise
async def get_file(self, db: Session, file_id: int) -> Optional[FileInDB]:
"""Get a file record from the database"""
file_record = db.query(File).filter(File.id == file_id).first()
if file_record:
return FileInDB.model_validate(file_record)
return None
async def get_file_path(self, db: Session, file_id: int) -> Optional[Path]:
"""Get the path to a file from its ID"""
file_record = await self.get_file(db, file_id)
if file_record and os.path.exists(file_record.path):
return Path(file_record.path)
return None
async def delete_file(self, db: Session, file_id: int) -> bool:
"""Delete a file and its database record"""
try:
file_record = db.query(File).filter(File.id == file_id).first()
if file_record:
# Delete the file if it exists
if os.path.exists(file_record.path):
os.remove(file_record.path)
# Delete the database record
db.delete(file_record)
db.commit()
return True
return False
except Exception as e:
logger.error(f"Error deleting file {file_id}: {str(e)}")
raise
async def list_files(
self,
db: Session,
skip: int = 0,
limit: int = 100,
file_type: Optional[str] = None
) -> List[FileInDB]:
"""List files with optional filtering"""
query = db.query(File)
if file_type:
query = query.filter(File.type == file_type)
files = query.offset(skip).limit(limit).all()
return [FileInDB.model_validate(file) for file in files]
async def get_file_by_metadata(
self,
db: Session,
metadata_key: str,
metadata_value: Union[str, List[str]],
file_type: Optional[str] = None,
content_type: Optional[str] = None) -> Optional[FileInDB] | None:
# Handle array comparison for order_ids
if metadata_key == "order_ids" and isinstance(metadata_value, list):
# Sort and convert to JSON string for consistent comparison
sorted_value = sorted(metadata_value)
query = db.query(File).filter(
File.file_metadata[metadata_key].cast(String) == json.dumps(sorted_value)
)
else:
query = db.query(File).filter(File.file_metadata[metadata_key].cast(String) == str(metadata_value))
if file_type:
query = query.filter(File.file_type == file_type)
if content_type:
query = query.filter(File.content_type == content_type)
file_record = query.first()
if file_record:
return FileInDB.model_validate(file_record)
else:
return None

View File

@ -9,6 +9,7 @@ import asyncio
import time
from PIL import Image
from contextlib import asynccontextmanager
from app.schemas.file import FileInDB
logger = logging.getLogger(__name__)
@ -141,11 +142,11 @@ class LabelPrinterService:
logger.error(f"Unexpected error in _send_print_request: {e}")
return False
async def print_file(self, file_path: Union[str, Path], label_size: Literal["dk1201", "dk1241"], label_type: Optional[Literal["address_label", "packing_slip", "set_label"]] = None) -> bool:
async def print_file(self, file_path: Union[str, Path, FileInDB], label_size: Literal["dk1201", "dk1241"], label_type: Optional[Literal["address_label", "packing_slip", "set_label"]] = None) -> bool:
"""Print a PDF or PNG file to the label printer.
Args:
file_path: Path to the PDF or PNG file
file_path: Path to the PDF or PNG file, or a FileInDB object
label_size: Size of label to use ("dk1201" or "dk1241")
label_type: Type of label to use ("address_label" or "packing_slip" or "set_label")
@ -158,6 +159,10 @@ class LabelPrinterService:
logger.error("No file path provided")
return False
# Handle FileInDB objects
if isinstance(file_path, FileInDB):
file_path = file_path.path
file_path = Path(file_path)
if not file_path.exists():
logger.error(f"File not found: {file_path}")

View File

@ -6,29 +6,49 @@ from jinja2 import Environment, FileSystemLoader
from weasyprint import HTML
import logging
import asyncio
from app.schemas.file import FileInDB
from app.services.base_service import BaseService
from sqlalchemy.orm import Session
logger = logging.getLogger(__name__)
class PullSheetService:
class PullSheetService(BaseService):
def __init__(self):
super().__init__(None)
self.template_dir = Path("app/data/assets/templates")
self.env = Environment(loader=FileSystemLoader(str(self.template_dir)))
self.template = self.env.get_template("pull_sheet.html")
self.output_dir = Path("app/data/cache/tcgplayer/pull_sheets")
self.output_dir.mkdir(parents=True, exist_ok=True)
async def generate_pull_sheet_pdf(self, csv_path: str) -> str:
async def get_or_create_rendered_pull_sheet(self, db: Session, order_ids: list[str]) -> FileInDB:
# get file service
file_service = self.get_service('file')
# check if rendered pull sheet exists
rendered_pull_sheet = await file_service.get_file_by_metadata(db, "order_ids", order_ids, "rendered_pull_sheet", "application/pdf")
if rendered_pull_sheet:
return rendered_pull_sheet
# check if pull sheet data file exists
pull_sheet_data_file = await file_service.get_file_by_metadata(db, "order_ids", order_ids, "pull_sheet", "text/csv")
if pull_sheet_data_file:
# generate pdf from pull sheet data file
return await self.generate_pull_sheet_pdf(db, pull_sheet_data_file)
# if no pull sheet data file exists, get it from order management service
order_service = self.get_service('order_management')
pull_sheet_data_file = await order_service.get_pull_sheet(db, order_ids)
return await self.generate_pull_sheet_pdf(db, pull_sheet_data_file)
async def generate_pull_sheet_pdf(self, db: Session, file: FileInDB) -> FileInDB:
"""Generate a PDF pull sheet from a CSV file.
Args:
csv_path: Path to the CSV file containing pull sheet data
file: FileInDB object containing the pull sheet data
Returns:
Path to the generated PDF file
"""
try:
# Read and process CSV data
items = await self._read_and_process_csv(csv_path)
items = await self._read_and_process_csv(file.path)
# Prepare template data
template_data = {
@ -38,16 +58,24 @@ class PullSheetService:
# Render HTML
html_content = self.template.render(**template_data)
# Generate PDF in a separate thread to avoid blocking
pdf_path = self.output_dir / f"pull_sheet_{datetime.now().strftime('%Y%m%d_%H%M%S')}.pdf"
await asyncio.get_event_loop().run_in_executor(
None,
lambda: HTML(string=html_content).write_pdf(str(pdf_path))
# Ensure metadata is properly formatted
metadata = file.file_metadata.copy() if file.file_metadata else {}
if 'order_ids' in metadata:
metadata['order_ids'] = sorted(metadata['order_ids'])
file_service = self.get_service('file')
return await file_service.save_file(
db=db,
file_data=html_content,
filename=f"rendered_pull_sheet_{datetime.now().strftime('%Y%m%d_%H%M%S')}.pdf",
subdir="tcgplayer/pull_sheets/rendered",
file_type="rendered_pull_sheet",
content_type="application/pdf",
metadata=metadata,
html_content=True # This tells FileService to convert HTML to PDF
)
return str(pdf_path)
except Exception as e:
logger.error(f"Error generating pull sheet PDF: {str(e)}")
raise
@ -68,7 +96,7 @@ class PullSheetService:
)
# Filter out the "Orders Contained in Pull Sheet" row
df = df[df['Product Line'] != 'Orders Contained in Pull Sheet:']
df = df[df['Product Line'] != 'Orders Contained in Pull Sheet:'].copy()
# Convert Set Release Date to datetime
df['Set Release Date'] = pd.to_datetime(df['Set Release Date'], format='%m/%d/%Y %H:%M:%S')

View File

@ -1,4 +1,4 @@
from app.db.database import get_db, transaction
from app.db.database import transaction, get_db
from app.services.scheduler.base_scheduler import BaseScheduler
import logging
@ -16,41 +16,12 @@ class SchedulerService:
from app.services.service_manager import ServiceManager
self._service_manager = ServiceManager()
return self._service_manager
async def process_tcgplayer_export(self, export_type: str = "live", use_cache: bool = False):
"""
Process TCGPlayer export as a scheduled task.
Args:
export_type: Type of export to process (staged, live, or pricing)
"""
db = get_db()
try:
logger.info(f"Starting scheduled TCGPlayer export processing for {export_type}")
# Get services
tcgplayer_service = self.service_manager.get_service('tcgplayer_inventory')
file_processor = self.service_manager.get_service('file_processing')
# Download the file
file_bytes = await tcgplayer_service.get_tcgplayer_export(export_type)
# Process the file and load into database
with transaction(db):
stats = await file_processor.process_tcgplayer_export(db, export_type=export_type, file_bytes=file_bytes, use_cache=use_cache)
logger.info(f"Completed TCGPlayer export processing: {stats}")
return stats
except Exception as e:
logger.error(f"Error processing TCGPlayer export: {str(e)}")
raise
async def update_open_orders_hourly(self):
"""
Hourly update of orders from TCGPlayer API to database
"""
db = get_db()
db = next(get_db())
try:
logger.info("Starting hourly order update")
# Get order management service
@ -68,12 +39,14 @@ class SchedulerService:
except Exception as e:
logger.error(f"Error updating open orders: {str(e)}")
raise
finally:
db.close()
async def update_all_orders_daily(self):
"""
Daily update of all orders from TCGPlayer API to database
"""
db = get_db()
db = next(get_db())
try:
logger.info("Starting daily order update")
# Get order management service
@ -91,16 +64,11 @@ class SchedulerService:
except Exception as e:
logger.error(f"Error updating all orders: {str(e)}")
raise
finally:
db.close()
async def start_scheduled_tasks(self):
"""Start all scheduled tasks"""
# Schedule TCGPlayer export processing to run daily at 2 AM
await self.scheduler.schedule_task(
task_name="process_tcgplayer_export",
func=self.process_tcgplayer_export,
interval_seconds=24 * 60 * 60, # 24 hours
export_type="live"
)
# Schedule open orders update to run hourly at 00 minutes
await self.scheduler.schedule_task(
task_name="update_open_orders_hourly",

View File

@ -25,11 +25,13 @@ class ServiceManager:
'pull_sheet': 'app.services.pull_sheet_service.PullSheetService',
'set_label': 'app.services.set_label_service.SetLabelService',
'data_initialization': 'app.services.data_initialization.DataInitializationService',
'scheduler': 'app.services.scheduler.scheduler_service.SchedulerService'
'scheduler': 'app.services.scheduler.scheduler_service.SchedulerService',
'file': 'app.services.file_service.FileService'
}
self._service_configs = {
'label_printer': {'printer_api_url': "http://192.168.1.110:8000"},
'regular_printer': {'printer_name': "MFCL2750DW-3"}
'regular_printer': {'printer_name': "MFCL2750DW-3"},
'file': {'base_cache_dir': "app/data/cache"}
}
self._initialized = True

View File

@ -122,8 +122,7 @@ RENAME_SETS = {
class SetLabelService(BaseService):
DEFAULT_OUTPUT_DIR = "app/data/cache/set_labels"
os.makedirs(DEFAULT_OUTPUT_DIR, exist_ok=True)
DEFAULT_OUTPUT_DIR = "set_labels" # Changed to be relative to FileService's base_cache_dir
def __init__(self, output_dir=DEFAULT_OUTPUT_DIR):
super().__init__(None) # BaseService doesn't need a model for this service
@ -131,8 +130,7 @@ class SetLabelService(BaseService):
self.ignored_sets = IGNORED_SETS
self.set_types = SET_TYPES
self.minimum_set_size = MINIMUM_SET_SIZE
self.output_dir = Path(output_dir)
self.output_dir.mkdir(parents=True, exist_ok=True)
self.output_dir = output_dir
async def get_set_data(self, session):
log.info("Getting set data and icons from Scryfall")
@ -173,14 +171,8 @@ class SetLabelService(BaseService):
log.warning(f"Failed to fetch icon from {icon_url}: {e}")
return None
async def generate_label(self, session, set_data):
output_file = self.output_dir / f"{set_data['code']}.pdf"
# Check if file already exists
if output_file.exists():
log.info(f"Label already exists for {set_data['name']} ({set_data['code']})")
return output_file
async def generate_label(self, session, set_data, db: Session):
"""Generate a label for a set and save it using FileService"""
name = RENAME_SETS.get(set_data["name"], set_data["name"])
icon_b64 = await self.get_set_icon(session, set_data["icon_svg_uri"])
@ -192,11 +184,32 @@ class SetLabelService(BaseService):
icon_b64=icon_b64,
)
HTML(string=html_content).write_pdf(output_file)
# Generate PDF content
pdf_content = HTML(string=html_content).write_pdf()
# Save using FileService
filename = f"{set_data['code']}.pdf"
metadata = {
"set_name": name,
"set_code": set_data["code"],
"release_date": set_data["released_at"],
"card_count": set_data["card_count"]
}
file_record = await self.file_service.save_file(
db=db,
file_data=pdf_content,
filename=filename,
subdir=self.output_dir,
file_type="set_label",
metadata=metadata
)
log.info(f"Generated label for {name} ({set_data['code']})")
return output_file
return file_record
async def generate_labels(self, sets=None):
async def generate_labels(self, db: Session, sets=None):
"""Generate labels for sets and return their file records"""
if sets:
self.ignored_sets = ()
self.minimum_set_size = 0
@ -205,7 +218,7 @@ class SetLabelService(BaseService):
async with aiohttp.ClientSession() as session:
set_data = await self.get_set_data(session)
tasks = [self.generate_label(session, exp) for exp in set_data]
tasks = [self.generate_label(session, exp, db) for exp in set_data]
return await asyncio.gather(*tasks)
async def get_available_sets(self, db: Session):

238
app/static/app.js Normal file
View File

@ -0,0 +1,238 @@
// API base URL
const API_BASE_URL = '/api';
// Selected orders for actions
let selectedOrders = new Set();
// Show toast notification
function showToast(message, type = 'success') {
const toast = document.createElement('div');
toast.className = `fixed bottom-4 right-4 px-6 py-3 rounded-lg shadow-lg text-white ${
type === 'success' ? 'bg-green-600' : 'bg-red-600'
} transform translate-y-0 opacity-100 transition-all duration-300`;
toast.textContent = message;
document.body.appendChild(toast);
setTimeout(() => {
toast.style.transform = 'translateY(100%)';
toast.style.opacity = '0';
setTimeout(() => toast.remove(), 300);
}, 3000);
}
// Show loading state
function setLoading(isLoading) {
const buttons = document.querySelectorAll('button');
buttons.forEach(button => {
if (isLoading) {
button.disabled = true;
button.classList.add('opacity-50', 'cursor-not-allowed');
} else {
button.disabled = false;
button.classList.remove('opacity-50', 'cursor-not-allowed');
}
});
}
// Fetch orders from the API
async function fetchOrders() {
try {
setLoading(true);
const searchRange = document.getElementById('searchRange').value;
const openOnly = document.getElementById('openOnly').checked;
const response = await fetch(`${API_BASE_URL}/orders?search_range=${searchRange}&open_only=${openOnly}`);
if (!response.ok) {
throw new Error('Failed to fetch orders');
}
const orders = await response.json();
displayOrders(orders);
showToast('Orders loaded successfully');
} catch (error) {
showToast('Error fetching orders: ' + error.message, 'error');
} finally {
setLoading(false);
}
}
// Display orders in the UI
function displayOrders(orders) {
const ordersList = document.getElementById('ordersList');
ordersList.innerHTML = '';
if (!orders || orders.length === 0) {
ordersList.innerHTML = '<div class="col-span-full text-center text-gray-400 py-4">No orders found</div>';
return;
}
orders.forEach(order => {
const orderCard = document.createElement('div');
orderCard.className = `bg-gray-700 rounded-lg shadow-sm p-4 border border-gray-600 hover:shadow-md transition-shadow cursor-pointer ${
selectedOrders.has(order.orderNumber) ? 'ring-2 ring-blue-500' : ''
}`;
orderCard.dataset.orderId = order.orderNumber;
orderCard.innerHTML = `
<div class="flex flex-col h-full">
<div class="flex justify-between items-start mb-3">
<div class="flex-1 min-w-0">
<h3 class="text-lg font-bold text-blue-400 truncate">#${order.orderNumber || 'N/A'}</h3>
<p class="text-sm text-gray-400">${order.buyerName || 'N/A'}</p>
</div>
<span class="px-2 py-1 text-xs rounded-full ${
order.orderStatus === 'Open' ? 'bg-green-900/50 text-green-300' : 'bg-gray-600/50 text-gray-300'
}">${order.orderStatus || 'Unknown'}</span>
</div>
<div class="mt-auto">
<div class="flex justify-between items-center">
<p class="text-sm text-gray-400">${order.orderDate ? new Date(order.orderDate).toLocaleString() : 'N/A'}</p>
<p class="text-lg font-bold text-white">$${order.totalAmount ? order.totalAmount.toFixed(2) : '0.00'}</p>
</div>
</div>
</div>
`;
ordersList.appendChild(orderCard);
// Add click event listener to the order card
orderCard.addEventListener('click', () => {
const orderId = orderCard.dataset.orderId;
if (selectedOrders.has(orderId)) {
selectedOrders.delete(orderId);
orderCard.classList.remove('ring-2', 'ring-blue-500');
} else {
selectedOrders.add(orderId);
orderCard.classList.add('ring-2', 'ring-blue-500');
}
});
});
}
// Select all orders on the page
function selectAllOrders() {
const orderCards = document.querySelectorAll('[data-order-id]');
const allSelected = orderCards.length > 0 && Array.from(orderCards).every(card => selectedOrders.has(card.dataset.orderId));
orderCards.forEach(card => {
const orderId = card.dataset.orderId;
if (allSelected) {
selectedOrders.delete(orderId);
card.classList.remove('ring-2', 'ring-blue-500');
} else {
selectedOrders.add(orderId);
card.classList.add('ring-2', 'ring-blue-500');
}
});
showToast(allSelected ? 'All orders deselected' : 'All orders selected');
}
// Generate pull sheets
async function generatePullSheets() {
try {
const orderIds = Array.from(selectedOrders);
if (orderIds.length === 0) {
showToast('Please select at least one order', 'error');
return;
}
setLoading(true);
const response = await fetch(`${API_BASE_URL}/orders/generate-pull-sheets`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
order_ids: orderIds
})
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.detail || 'Failed to generate pull sheets');
}
showToast('Pull sheets generated successfully');
} catch (error) {
showToast('Error generating pull sheets: ' + error.message, 'error');
} finally {
setLoading(false);
}
}
// Generate packing slips
async function generatePackingSlips() {
try {
const orderIds = Array.from(selectedOrders);
if (orderIds.length === 0) {
showToast('Please select at least one order', 'error');
return;
}
setLoading(true);
const response = await fetch(`${API_BASE_URL}/orders/generate-packing-slips`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
order_ids: orderIds
})
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.detail || 'Failed to generate packing slips');
}
showToast('Packing slips generated successfully');
} catch (error) {
showToast('Error generating packing slips: ' + error.message, 'error');
} finally {
setLoading(false);
}
}
// Generate address labels
async function generateAddressLabels() {
try {
const orderIds = Array.from(selectedOrders);
if (orderIds.length === 0) {
showToast('Please select at least one order', 'error');
return;
}
const labelType = document.getElementById('labelType').value;
setLoading(true);
const response = await fetch(`${API_BASE_URL}/orders/generate-address-labels`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
order_ids: orderIds,
label_type: labelType
})
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.detail || 'Failed to generate address labels');
}
showToast('Address labels generated successfully');
} catch (error) {
showToast('Error generating address labels: ' + error.message, 'error');
} finally {
setLoading(false);
}
}
// Load orders when page loads
document.addEventListener('DOMContentLoaded', () => {
fetchOrders();
// Add event listeners for search range and open only checkbox
document.getElementById('searchRange').addEventListener('change', fetchOrders);
document.getElementById('openOnly').addEventListener('change', fetchOrders);
});

78
app/static/index.html Normal file
View File

@ -0,0 +1,78 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>TCGPlayer Order Management</title>
<script src="https://cdn.tailwindcss.com"></script>
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;500;600;700&display=swap" rel="stylesheet">
<script>
tailwind.config = {
darkMode: 'class',
theme: {
extend: {
fontFamily: {
sans: ['Inter', 'sans-serif'],
},
}
}
}
</script>
</head>
<body class="bg-gray-900 min-h-screen text-gray-100">
<div class="max-w-7xl mx-auto px-4 sm:px-6 lg:px-8 py-8">
<div class="bg-gray-800 rounded-xl shadow-sm p-6 mb-8">
<h1 class="text-3xl font-bold text-gray-100 mb-2">TCGPlayer Order Management</h1>
<p class="text-gray-400">Manage your TCGPlayer orders efficiently</p>
</div>
<!-- Order Actions Section -->
<div class="bg-gray-800 rounded-xl shadow-sm p-6 mb-8">
<h2 class="text-xl font-semibold text-gray-100 mb-6">Order Actions</h2>
<div class="flex flex-wrap gap-4 mb-4">
<button onclick="generatePullSheets()" class="px-4 py-2 bg-green-600 text-white rounded-lg hover:bg-green-700 focus:outline-none focus:ring-2 focus:ring-green-500 focus:ring-offset-2 transition-colors">
Generate Pull Sheets
</button>
<button onclick="generatePackingSlips()" class="px-4 py-2 bg-purple-600 text-white rounded-lg hover:bg-purple-700 focus:outline-none focus:ring-2 focus:ring-purple-500 focus:ring-offset-2 transition-colors">
Generate Packing Slips
</button>
<button onclick="generateAddressLabels()" class="px-4 py-2 bg-indigo-600 text-white rounded-lg hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 transition-colors">
Generate Address Labels
</button>
</div>
<div id="labelOptions" class="bg-gray-700 rounded-lg p-4">
<label class="block text-sm font-medium text-gray-300 mb-2">Label Type</label>
<select id="labelType" class="rounded-lg border-gray-600 bg-gray-800 text-gray-100 focus:ring-blue-500 focus:border-blue-500">
<option value="dk1201">DK1201</option>
<option value="dk1241">DK1241</option>
</select>
</div>
</div>
<!-- Order List Section -->
<div class="bg-gray-800 rounded-xl shadow-sm p-6">
<div class="flex items-center justify-between mb-6">
<h2 class="text-xl font-semibold text-gray-100">Orders</h2>
<div class="flex items-center space-x-4">
<select id="searchRange" class="rounded-lg border-gray-600 bg-gray-800 text-gray-100 focus:ring-blue-500 focus:border-blue-500">
<option value="LastWeek" selected>Last Week</option>
<option value="LastMonth">Last Month</option>
<option value="LastThreeMonths">Last Three Months</option>
<option value="LastFourMonths">Last Four Months</option>
<option value="LastTwoYears">Last Two Years</option>
</select>
<label class="flex items-center space-x-2">
<input type="checkbox" id="openOnly" class="rounded border-gray-600 bg-gray-800 text-blue-600 focus:ring-blue-500" checked>
<span class="text-gray-300">Show Open Orders Only</span>
</label>
<button onclick="selectAllOrders()" class="px-4 py-2 bg-blue-600 text-white rounded-lg hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-blue-500 focus:ring-offset-2 transition-colors">
Select All Orders
</button>
</div>
</div>
<div id="ordersList" class="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4"></div>
</div>
</div>
<script src="/app.js"></script>
</body>
</html>

108
app/static/styles.css Normal file
View File

@ -0,0 +1,108 @@
* {
box-sizing: border-box;
margin: 0;
padding: 0;
}
body {
font-family: Arial, sans-serif;
line-height: 1.6;
padding: 20px;
background-color: #f5f5f5;
}
.container {
max-width: 1200px;
margin: 0 auto;
background-color: white;
padding: 20px;
border-radius: 8px;
box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
}
h1 {
color: #333;
margin-bottom: 20px;
text-align: center;
}
h2 {
color: #444;
margin-bottom: 15px;
}
.section {
margin-bottom: 30px;
padding: 20px;
background-color: #fff;
border-radius: 4px;
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
}
.controls {
margin-bottom: 15px;
display: flex;
gap: 10px;
align-items: center;
flex-wrap: wrap;
}
select, button {
padding: 8px 12px;
border: 1px solid #ddd;
border-radius: 4px;
background-color: white;
}
button {
background-color: #007bff;
color: white;
border: none;
cursor: pointer;
transition: background-color 0.2s;
}
button:hover {
background-color: #0056b3;
}
.orders-list {
display: grid;
grid-template-columns: repeat(auto-fill, minmax(300px, 1fr));
gap: 15px;
}
.order-card {
padding: 15px;
border: 1px solid #ddd;
border-radius: 4px;
background-color: white;
}
.order-card h3 {
margin-bottom: 10px;
color: #333;
}
.order-card p {
margin-bottom: 5px;
color: #666;
}
.label-options {
margin-top: 10px;
padding: 10px;
background-color: #f8f9fa;
border-radius: 4px;
}
@media (max-width: 768px) {
.controls {
flex-direction: column;
align-items: stretch;
}
select, button {
width: 100%;
}
}