Compare commits
No commits in common. "6178fdd15d8283139fc37165e47ca27f1e55798c" and "8f35cedb4ade18e10ca5f4325e48e93c0d5e7ff9" have entirely different histories.
6178fdd15d
...
8f35cedb4a
@ -1,42 +0,0 @@
|
||||
"""tcg product update again
|
||||
|
||||
Revision ID: 1746d35187a2
|
||||
Revises: 9775314e337b
|
||||
Create Date: 2025-04-17 22:02:35.492726
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '1746d35187a2'
|
||||
down_revision: Union[str, None] = '9775314e337b'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('tcgplayer_products', sa.Column('ext_subtype', sa.String(), nullable=True))
|
||||
op.add_column('tcgplayer_products', sa.Column('ext_oracle_text', sa.String(), nullable=True))
|
||||
op.add_column('tcgplayer_products', sa.Column('ext_flavor_text', sa.String(), nullable=True))
|
||||
op.drop_column('tcgplayer_products', 'ext_mana_cost')
|
||||
op.drop_column('tcgplayer_products', 'ext_loyalty')
|
||||
op.drop_column('tcgplayer_products', 'ext_mana_value')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('tcgplayer_products', sa.Column('ext_mana_value', sa.VARCHAR(), autoincrement=False, nullable=True))
|
||||
op.add_column('tcgplayer_products', sa.Column('ext_loyalty', sa.VARCHAR(), autoincrement=False, nullable=True))
|
||||
op.add_column('tcgplayer_products', sa.Column('ext_mana_cost', sa.VARCHAR(), autoincrement=False, nullable=True))
|
||||
op.drop_column('tcgplayer_products', 'ext_flavor_text')
|
||||
op.drop_column('tcgplayer_products', 'ext_oracle_text')
|
||||
op.drop_column('tcgplayer_products', 'ext_subtype')
|
||||
# ### end Alembic commands ###
|
@ -1,32 +0,0 @@
|
||||
"""tcg prices again
|
||||
|
||||
Revision ID: 2fcce9c8883a
|
||||
Revises: b45c43900b56
|
||||
Create Date: 2025-04-17 22:48:53.378544
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '2fcce9c8883a'
|
||||
down_revision: Union[str, None] = 'b45c43900b56'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
@ -1,51 +0,0 @@
|
||||
"""tcg prices again 2
|
||||
|
||||
Revision ID: 493b2cb724d0
|
||||
Revises: 2fcce9c8883a
|
||||
Create Date: 2025-04-17 23:05:11.919652
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '493b2cb724d0'
|
||||
down_revision: Union[str, None] = '2fcce9c8883a'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('ix_tcgplayer_prices_date', table_name='tcgplayer_prices')
|
||||
op.drop_index('ix_tcgplayer_prices_id', table_name='tcgplayer_prices')
|
||||
op.drop_index('ix_tcgplayer_prices_product_id', table_name='tcgplayer_prices')
|
||||
op.drop_table('tcgplayer_prices')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('tcgplayer_prices',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('product_id', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
|
||||
sa.Column('low_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('mid_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('high_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('market_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('direct_low_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('sub_type_name', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
|
||||
sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='tcgplayer_prices_pkey')
|
||||
)
|
||||
op.create_index('ix_tcgplayer_prices_product_id', 'tcgplayer_prices', ['product_id'], unique=False)
|
||||
op.create_index('ix_tcgplayer_prices_id', 'tcgplayer_prices', ['id'], unique=False)
|
||||
op.create_index('ix_tcgplayer_prices_date', 'tcgplayer_prices', ['date'], unique=False)
|
||||
# ### end Alembic commands ###
|
@ -1,53 +0,0 @@
|
||||
"""fuck foreign keys for real dog
|
||||
|
||||
Revision ID: 54cd251d13a3
|
||||
Revises: e34bfa37db00
|
||||
Create Date: 2025-04-17 23:10:59.010644
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '54cd251d13a3'
|
||||
down_revision: Union[str, None] = 'e34bfa37db00'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index('ix_tcgplayer_price_history_date', table_name='tcgplayer_price_history')
|
||||
op.drop_index('ix_tcgplayer_price_history_id', table_name='tcgplayer_price_history')
|
||||
op.drop_index('ix_tcgplayer_price_history_product_id', table_name='tcgplayer_price_history')
|
||||
op.drop_table('tcgplayer_price_history')
|
||||
op.drop_constraint('tcgplayer_products_group_id_fkey', 'tcgplayer_products', type_='foreignkey')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_foreign_key('tcgplayer_products_group_id_fkey', 'tcgplayer_products', 'tcgplayer_groups', ['group_id'], ['group_id'])
|
||||
op.create_table('tcgplayer_price_history',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('product_id', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('date', postgresql.TIMESTAMP(), autoincrement=False, nullable=True),
|
||||
sa.Column('low_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('mid_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('high_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('market_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('direct_low_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('sub_type_name', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
|
||||
sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='tcgplayer_price_history_pkey')
|
||||
)
|
||||
op.create_index('ix_tcgplayer_price_history_product_id', 'tcgplayer_price_history', ['product_id'], unique=False)
|
||||
op.create_index('ix_tcgplayer_price_history_id', 'tcgplayer_price_history', ['id'], unique=False)
|
||||
op.create_index('ix_tcgplayer_price_history_date', 'tcgplayer_price_history', ['date'], unique=False)
|
||||
# ### end Alembic commands ###
|
@ -1,32 +0,0 @@
|
||||
"""fuck foreign keys for real dog
|
||||
|
||||
Revision ID: 7f309a891094
|
||||
Revises: 54cd251d13a3
|
||||
Create Date: 2025-04-17 23:11:55.027126
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '7f309a891094'
|
||||
down_revision: Union[str, None] = '54cd251d13a3'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
@ -1,40 +0,0 @@
|
||||
"""tcg product update
|
||||
|
||||
Revision ID: 9775314e337b
|
||||
Revises: 479003fbead7
|
||||
Create Date: 2025-04-17 21:58:17.637210
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '9775314e337b'
|
||||
down_revision: Union[str, None] = '479003fbead7'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.add_column('tcgplayer_products', sa.Column('ext_power', sa.String(), nullable=True))
|
||||
op.add_column('tcgplayer_products', sa.Column('ext_toughness', sa.String(), nullable=True))
|
||||
op.add_column('tcgplayer_products', sa.Column('ext_loyalty', sa.String(), nullable=True))
|
||||
op.add_column('tcgplayer_products', sa.Column('ext_mana_cost', sa.String(), nullable=True))
|
||||
op.add_column('tcgplayer_products', sa.Column('ext_mana_value', sa.String(), nullable=True))
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_column('tcgplayer_products', 'ext_mana_value')
|
||||
op.drop_column('tcgplayer_products', 'ext_mana_cost')
|
||||
op.drop_column('tcgplayer_products', 'ext_loyalty')
|
||||
op.drop_column('tcgplayer_products', 'ext_toughness')
|
||||
op.drop_column('tcgplayer_products', 'ext_power')
|
||||
# ### end Alembic commands ###
|
@ -1,51 +0,0 @@
|
||||
"""recreate tcgplayer price history
|
||||
|
||||
Revision ID: 9fb73424598c
|
||||
Revises: 7f309a891094
|
||||
Create Date: 2025-04-17 23:13:55.027126
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = '9fb73424598c'
|
||||
down_revision: Union[str, None] = '7f309a891094'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('tcgplayer_price_history',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('product_id', sa.Integer(), nullable=True),
|
||||
sa.Column('date', sa.DateTime(), nullable=True),
|
||||
sa.Column('low_price', sa.Float(), nullable=True),
|
||||
sa.Column('mid_price', sa.Float(), nullable=True),
|
||||
sa.Column('high_price', sa.Float(), nullable=True),
|
||||
sa.Column('market_price', sa.Float(), nullable=True),
|
||||
sa.Column('direct_low_price', sa.Float(), nullable=True),
|
||||
sa.Column('sub_type_name', sa.String(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_tcgplayer_price_history_id'), 'tcgplayer_price_history', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_tcgplayer_price_history_product_id'), 'tcgplayer_price_history', ['product_id'], unique=False)
|
||||
op.create_index(op.f('ix_tcgplayer_price_history_date'), 'tcgplayer_price_history', ['date'], unique=False)
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.drop_index(op.f('ix_tcgplayer_price_history_date'), table_name='tcgplayer_price_history')
|
||||
op.drop_index(op.f('ix_tcgplayer_price_history_product_id'), table_name='tcgplayer_price_history')
|
||||
op.drop_index(op.f('ix_tcgplayer_price_history_id'), table_name='tcgplayer_price_history')
|
||||
op.drop_table('tcgplayer_price_history')
|
||||
# ### end Alembic commands ###
|
@ -1,32 +0,0 @@
|
||||
"""tcg prices
|
||||
|
||||
Revision ID: b45c43900b56
|
||||
Revises: 1746d35187a2
|
||||
Create Date: 2025-04-17 22:47:44.405906
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'b45c43900b56'
|
||||
down_revision: Union[str, None] = '1746d35187a2'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
@ -1,28 +0,0 @@
|
||||
"""changing db bigly
|
||||
|
||||
Revision ID: cc7dd65bcdd9
|
||||
Revises: 9fb73424598c
|
||||
Create Date: 2025-04-19 13:36:41.784661
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'cc7dd65bcdd9'
|
||||
down_revision: Union[str, None] = '9fb73424598c'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
pass
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
pass
|
@ -1,339 +0,0 @@
|
||||
"""changing db bigly
|
||||
|
||||
Revision ID: d4d3f43ce86a
|
||||
Revises: cc7dd65bcdd9
|
||||
Create Date: 2025-04-19 13:46:27.330261
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'd4d3f43ce86a'
|
||||
down_revision: Union[str, None] = 'cc7dd65bcdd9'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
op.create_table('customers',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_customers_id'), 'customers', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_customers_name'), 'customers', ['name'], unique=True)
|
||||
op.create_table('products',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=True),
|
||||
sa.Column('tcgplayer_id', sa.String(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('tcgplayer_inventory',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('tcgplayer_id', sa.String(), nullable=True),
|
||||
sa.Column('product_line', sa.String(), nullable=True),
|
||||
sa.Column('set_name', sa.String(), nullable=True),
|
||||
sa.Column('product_name', sa.String(), nullable=True),
|
||||
sa.Column('title', sa.String(), nullable=True),
|
||||
sa.Column('number', sa.String(), nullable=True),
|
||||
sa.Column('rarity', sa.String(), nullable=True),
|
||||
sa.Column('condition', sa.String(), nullable=True),
|
||||
sa.Column('tcg_market_price', sa.Float(), nullable=True),
|
||||
sa.Column('tcg_direct_low', sa.Float(), nullable=True),
|
||||
sa.Column('tcg_low_price_with_shipping', sa.Float(), nullable=True),
|
||||
sa.Column('tcg_low_price', sa.Float(), nullable=True),
|
||||
sa.Column('total_quantity', sa.Integer(), nullable=True),
|
||||
sa.Column('add_to_quantity', sa.Integer(), nullable=True),
|
||||
sa.Column('tcg_marketplace_price', sa.Float(), nullable=True),
|
||||
sa.Column('photo_url', sa.String(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('CURRENT_TIMESTAMP'), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_tcgplayer_inventory_id'), 'tcgplayer_inventory', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_tcgplayer_inventory_tcgplayer_id'), 'tcgplayer_inventory', ['tcgplayer_id'], unique=True)
|
||||
op.create_table('vendors',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('name', sa.String(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_vendors_id'), 'vendors', ['id'], unique=False)
|
||||
op.create_index(op.f('ix_vendors_name'), 'vendors', ['name'], unique=True)
|
||||
op.create_table('physical_items',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('item_type', sa.String(), nullable=True),
|
||||
sa.Column('product_id', sa.Integer(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['product_id'], ['products.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('transactions',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('vendor_id', sa.Integer(), nullable=True),
|
||||
sa.Column('customer_id', sa.Integer(), nullable=True),
|
||||
sa.Column('transaction_type', sa.String(), nullable=True),
|
||||
sa.Column('transaction_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('transaction_total_amount', sa.Float(), nullable=True),
|
||||
sa.Column('transaction_notes', sa.String(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['customer_id'], ['customers.id'], ),
|
||||
sa.ForeignKeyConstraint(['vendor_id'], ['vendors.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_transactions_id'), 'transactions', ['id'], unique=False)
|
||||
op.create_table('inventory_items',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('physical_item_id', sa.Integer(), nullable=True),
|
||||
sa.Column('cost_basis', sa.Float(), nullable=True),
|
||||
sa.Column('parent_id', sa.Integer(), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['parent_id'], ['inventory_items.id'], ),
|
||||
sa.ForeignKeyConstraint(['physical_item_id'], ['physical_items.id'], ),
|
||||
sa.PrimaryKeyConstraint('id'),
|
||||
sa.UniqueConstraint('physical_item_id')
|
||||
)
|
||||
op.create_index(op.f('ix_inventory_items_id'), 'inventory_items', ['id'], unique=False)
|
||||
op.create_table('sealed_cases',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.ForeignKeyConstraint(['id'], ['physical_items.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('transaction_items',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('transaction_id', sa.Integer(), nullable=True),
|
||||
sa.Column('physical_item_id', sa.Integer(), nullable=True),
|
||||
sa.Column('unit_price', sa.Float(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['physical_item_id'], ['physical_items.id'], ),
|
||||
sa.ForeignKeyConstraint(['transaction_id'], ['transactions.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_transaction_items_id'), 'transaction_items', ['id'], unique=False)
|
||||
op.create_table('sealed_boxes',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('case_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['case_id'], ['sealed_cases.id'], ),
|
||||
sa.ForeignKeyConstraint(['id'], ['physical_items.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('open_events',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('sealed_case_id', sa.Integer(), nullable=True),
|
||||
sa.Column('sealed_box_id', sa.Integer(), nullable=True),
|
||||
sa.Column('open_date', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['sealed_box_id'], ['sealed_boxes.id'], ),
|
||||
sa.ForeignKeyConstraint(['sealed_case_id'], ['sealed_cases.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_open_events_id'), 'open_events', ['id'], unique=False)
|
||||
op.create_table('open_cards',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('open_event_id', sa.Integer(), nullable=True),
|
||||
sa.Column('box_id', sa.Integer(), nullable=True),
|
||||
sa.ForeignKeyConstraint(['box_id'], ['open_boxes.id'], ),
|
||||
sa.ForeignKeyConstraint(['id'], ['physical_items.id'], ),
|
||||
sa.ForeignKeyConstraint(['open_event_id'], ['open_events.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_table('cost_basis',
|
||||
sa.Column('id', sa.Integer(), nullable=False),
|
||||
sa.Column('transaction_item_id', sa.Integer(), nullable=True),
|
||||
sa.Column('sealed_case_id', sa.Integer(), nullable=True),
|
||||
sa.Column('sealed_box_id', sa.Integer(), nullable=True),
|
||||
sa.Column('open_box_id', sa.Integer(), nullable=True),
|
||||
sa.Column('open_card_id', sa.Integer(), nullable=True),
|
||||
sa.Column('quantity', sa.Integer(), nullable=False),
|
||||
sa.Column('unit_cost', sa.Float(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('updated_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.Column('deleted_at', sa.DateTime(timezone=True), nullable=True),
|
||||
sa.ForeignKeyConstraint(['open_box_id'], ['open_boxes.id'], ),
|
||||
sa.ForeignKeyConstraint(['open_card_id'], ['open_cards.id'], ),
|
||||
sa.ForeignKeyConstraint(['sealed_box_id'], ['sealed_boxes.id'], ),
|
||||
sa.ForeignKeyConstraint(['sealed_case_id'], ['sealed_cases.id'], ),
|
||||
sa.ForeignKeyConstraint(['transaction_item_id'], ['transaction_items.id'], ),
|
||||
sa.PrimaryKeyConstraint('id')
|
||||
)
|
||||
op.create_index(op.f('ix_cost_basis_id'), 'cost_basis', ['id'], unique=False)
|
||||
|
||||
# Drop tables in correct dependency order
|
||||
# First drop foreign key constraints
|
||||
op.execute('DROP TABLE IF EXISTS open_cards CASCADE')
|
||||
op.execute('DROP TABLE IF EXISTS cost_basis CASCADE')
|
||||
op.execute('DROP TABLE IF EXISTS open_boxes CASCADE')
|
||||
op.execute('DROP TABLE IF EXISTS boxes CASCADE')
|
||||
op.execute('DROP TABLE IF EXISTS games CASCADE')
|
||||
|
||||
op.drop_index('ix_inventory_id', table_name='inventory')
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
# Create tables in correct dependency order
|
||||
op.create_table('games',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('name', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('description', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('image_url', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
|
||||
sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='games_pkey')
|
||||
)
|
||||
op.create_index('ix_games_id', 'games', ['id'], unique=False)
|
||||
|
||||
op.create_table('boxes',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('product_id', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('type', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('set_code', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('sku', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('name', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('game_id', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('expected_number_of_cards', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('description', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('image_url', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
|
||||
sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
||||
sa.ForeignKeyConstraint(['game_id'], ['games.id'], name='boxes_game_id_fkey'),
|
||||
sa.PrimaryKeyConstraint('id', name='boxes_pkey')
|
||||
)
|
||||
op.create_index('ix_boxes_id', 'boxes', ['id'], unique=False)
|
||||
|
||||
op.create_table('open_boxes',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('box_id', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('date_opened', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
||||
sa.Column('number_of_cards', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
|
||||
sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
||||
sa.ForeignKeyConstraint(['box_id'], ['boxes.id'], name='open_boxes_box_id_fkey'),
|
||||
sa.PrimaryKeyConstraint('id', name='open_boxes_pkey')
|
||||
)
|
||||
op.create_index('ix_open_boxes_id', 'open_boxes', ['id'], unique=False)
|
||||
|
||||
op.create_table('open_cards',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('box_id', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
|
||||
sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
||||
sa.ForeignKeyConstraint(['box_id'], ['open_boxes.id'], name='open_cards_box_id_fkey'),
|
||||
sa.PrimaryKeyConstraint('id', name='open_cards_pkey')
|
||||
)
|
||||
op.create_index('ix_open_cards_id', 'open_cards', ['id'], unique=False)
|
||||
|
||||
op.create_table('cost_basis',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('open_box_id', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
|
||||
sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
||||
sa.ForeignKeyConstraint(['open_box_id'], ['open_boxes.id'], name='cost_basis_open_box_id_fkey'),
|
||||
sa.PrimaryKeyConstraint('id', name='cost_basis_pkey')
|
||||
)
|
||||
op.create_index('ix_cost_basis_id', 'cost_basis', ['id'], unique=False)
|
||||
|
||||
op.create_table('cards',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('name', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('rarity', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('set_name', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('quantity', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('tcgplayer_sku', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('product_line', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('product_name', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('title', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('number', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('condition', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('tcg_market_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('tcg_direct_low', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('tcg_low_price_with_shipping', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('tcg_low_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('total_quantity', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('add_to_quantity', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('tcg_marketplace_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('photo_url', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
|
||||
sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='cards_pkey')
|
||||
)
|
||||
op.create_index('ix_cards_tcgplayer_sku', 'cards', ['tcgplayer_sku'], unique=True)
|
||||
op.create_index('ix_cards_set_name', 'cards', ['set_name'], unique=False)
|
||||
op.create_index('ix_cards_name', 'cards', ['name'], unique=False)
|
||||
op.create_index('ix_cards_id', 'cards', ['id'], unique=False)
|
||||
op.create_table('inventory',
|
||||
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
|
||||
sa.Column('tcgplayer_id', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('product_line', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('set_name', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('product_name', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('title', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('number', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('rarity', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('condition', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('tcg_market_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('tcg_direct_low', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('tcg_low_price_with_shipping', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('tcg_low_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('total_quantity', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('add_to_quantity', sa.INTEGER(), autoincrement=False, nullable=True),
|
||||
sa.Column('tcg_marketplace_price', sa.DOUBLE_PRECISION(precision=53), autoincrement=False, nullable=True),
|
||||
sa.Column('photo_url', sa.VARCHAR(), autoincrement=False, nullable=True),
|
||||
sa.Column('created_at', postgresql.TIMESTAMP(timezone=True), server_default=sa.text('now()'), autoincrement=False, nullable=True),
|
||||
sa.Column('updated_at', postgresql.TIMESTAMP(timezone=True), autoincrement=False, nullable=True),
|
||||
sa.PrimaryKeyConstraint('id', name='inventory_pkey')
|
||||
)
|
||||
op.create_index('ix_inventory_tcgplayer_id', 'inventory', ['tcgplayer_id'], unique=True)
|
||||
op.create_index('ix_inventory_id', 'inventory', ['id'], unique=False)
|
||||
op.drop_index(op.f('ix_cost_basis_id'), table_name='cost_basis')
|
||||
op.drop_table('cost_basis')
|
||||
op.drop_table('open_cards')
|
||||
op.drop_index(op.f('ix_open_events_id'), table_name='open_events')
|
||||
op.drop_table('open_events')
|
||||
op.drop_table('sealed_boxes')
|
||||
op.drop_index(op.f('ix_transaction_items_id'), table_name='transaction_items')
|
||||
op.drop_table('transaction_items')
|
||||
op.drop_table('sealed_cases')
|
||||
op.drop_index(op.f('ix_inventory_items_id'), table_name='inventory_items')
|
||||
op.drop_table('inventory_items')
|
||||
op.drop_index(op.f('ix_transactions_id'), table_name='transactions')
|
||||
op.drop_table('transactions')
|
||||
op.drop_table('physical_items')
|
||||
op.drop_index(op.f('ix_vendors_name'), table_name='vendors')
|
||||
op.drop_index(op.f('ix_vendors_id'), table_name='vendors')
|
||||
op.drop_table('vendors')
|
||||
op.drop_index(op.f('ix_tcgplayer_inventory_tcgplayer_id'), table_name='tcgplayer_inventory')
|
||||
op.drop_index(op.f('ix_tcgplayer_inventory_id'), table_name='tcgplayer_inventory')
|
||||
op.drop_table('tcgplayer_inventory')
|
||||
op.drop_table('products')
|
||||
op.drop_index(op.f('ix_customers_name'), table_name='customers')
|
||||
op.drop_index(op.f('ix_customers_id'), table_name='customers')
|
||||
op.drop_table('customers')
|
||||
# ### end Alembic commands ###
|
@ -1,32 +0,0 @@
|
||||
"""tcg prices again 3
|
||||
|
||||
Revision ID: e34bfa37db00
|
||||
Revises: 493b2cb724d0
|
||||
Create Date: 2025-04-17 23:05:40.805511
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = 'e34bfa37db00'
|
||||
down_revision: Union[str, None] = '493b2cb724d0'
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
"""Upgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
"""Downgrade schema."""
|
||||
# ### commands auto generated by Alembic - please adjust! ###
|
||||
pass
|
||||
# ### end Alembic commands ###
|
61
app.log
61
app.log
@ -1,28 +1,33 @@
|
||||
2025-04-19 13:56:40,410 - INFO - app.main - Application starting up...
|
||||
2025-04-19 13:56:40,492 - INFO - app.main - Database initialized successfully
|
||||
2025-04-19 13:56:40,492 - INFO - app.services.service_manager - Service OrderManagementService registered
|
||||
2025-04-19 13:56:40,492 - INFO - app.services.service_manager - Service TCGPlayerInventoryService registered
|
||||
2025-04-19 13:56:40,492 - INFO - app.services.service_manager - Service LabelPrinterService registered
|
||||
2025-04-19 13:56:40,492 - INFO - app.services.service_manager - Service RegularPrinterService registered
|
||||
2025-04-19 13:56:40,495 - INFO - app.services.service_manager - Service AddressLabelService registered
|
||||
2025-04-19 13:56:40,497 - INFO - app.services.service_manager - Service PullSheetService registered
|
||||
2025-04-19 13:56:40,497 - INFO - app.services.service_manager - Service SetLabelService registered
|
||||
2025-04-19 13:56:40,497 - INFO - app.services.service_manager - Service DataInitializationService registered
|
||||
2025-04-19 13:56:40,498 - INFO - app.services.service_manager - Service SchedulerService registered
|
||||
2025-04-19 13:56:40,498 - INFO - app.services.service_manager - Service FileService registered
|
||||
2025-04-19 13:56:40,498 - INFO - app.services.service_manager - Service TCGCSVService registered
|
||||
2025-04-19 13:56:40,498 - INFO - app.services.service_manager - Service MTGJSONService registered
|
||||
2025-04-19 13:56:40,499 - INFO - app.services.service_manager - All services initialized successfully
|
||||
2025-04-19 13:56:40,499 - INFO - app.services.data_initialization - Starting data initialization process
|
||||
2025-04-19 13:56:40,499 - INFO - app.services.data_initialization - Data initialization completed
|
||||
2025-04-19 13:56:40,499 - INFO - app.main - Data initialization results: {}
|
||||
2025-04-19 13:56:40,499 - INFO - apscheduler.scheduler - Adding job tentatively -- it will be properly scheduled when the scheduler starts
|
||||
2025-04-19 13:56:40,499 - INFO - app.services.scheduler.base_scheduler - Scheduled task update_open_orders_hourly to run every 3600 seconds
|
||||
2025-04-19 13:56:40,499 - INFO - apscheduler.scheduler - Adding job tentatively -- it will be properly scheduled when the scheduler starts
|
||||
2025-04-19 13:56:40,499 - INFO - app.services.scheduler.base_scheduler - Scheduled task update_all_orders_daily to run every 86400 seconds
|
||||
2025-04-19 13:56:40,499 - INFO - apscheduler.scheduler - Added job "SchedulerService.start_scheduled_tasks.<locals>.<lambda>" to job store "default"
|
||||
2025-04-19 13:56:40,500 - INFO - apscheduler.scheduler - Added job "SchedulerService.start_scheduled_tasks.<locals>.<lambda>" to job store "default"
|
||||
2025-04-19 13:56:40,500 - INFO - apscheduler.scheduler - Scheduler started
|
||||
2025-04-19 13:56:40,500 - INFO - app.services.scheduler.base_scheduler - Scheduler started
|
||||
2025-04-19 13:56:40,500 - INFO - app.services.scheduler.scheduler_service - All scheduled tasks started
|
||||
2025-04-19 13:56:40,500 - INFO - app.main - Scheduler started successfully
|
||||
2025-04-17 13:27:20,268 - INFO - app.main - Application starting up...
|
||||
2025-04-17 13:27:20,319 - INFO - app.main - Database initialized successfully
|
||||
2025-04-17 13:27:20,366 - INFO - app.services.service_manager - Service OrderManagementService registered
|
||||
2025-04-17 13:27:20,366 - INFO - app.services.service_manager - Service TCGPlayerInventoryService registered
|
||||
2025-04-17 13:27:20,369 - INFO - app.services.service_manager - Service LabelPrinterService registered
|
||||
2025-04-17 13:27:20,372 - INFO - app.services.service_manager - Service RegularPrinterService registered
|
||||
2025-04-17 13:27:20,387 - INFO - app.services.service_manager - Service AddressLabelService registered
|
||||
2025-04-17 13:27:20,592 - INFO - app.services.service_manager - Service PullSheetService registered
|
||||
2025-04-17 13:27:20,592 - INFO - app.services.service_manager - Service SetLabelService registered
|
||||
2025-04-17 13:27:20,634 - INFO - app.services.service_manager - Service DataInitializationService registered
|
||||
2025-04-17 13:27:20,652 - INFO - app.services.service_manager - Service SchedulerService registered
|
||||
2025-04-17 13:27:20,652 - INFO - app.services.service_manager - Service FileService registered
|
||||
2025-04-17 13:27:20,653 - INFO - app.services.service_manager - All services initialized successfully
|
||||
2025-04-17 13:27:20,653 - INFO - apscheduler.scheduler - Adding job tentatively -- it will be properly scheduled when the scheduler starts
|
||||
2025-04-17 13:27:20,653 - INFO - app.services.scheduler.base_scheduler - Scheduled task update_open_orders_hourly to run every 3600 seconds
|
||||
2025-04-17 13:27:20,653 - INFO - apscheduler.scheduler - Adding job tentatively -- it will be properly scheduled when the scheduler starts
|
||||
2025-04-17 13:27:20,653 - INFO - app.services.scheduler.base_scheduler - Scheduled task update_all_orders_daily to run every 86400 seconds
|
||||
2025-04-17 13:27:20,653 - INFO - apscheduler.scheduler - Added job "SchedulerService.update_open_orders_hourly" to job store "default"
|
||||
2025-04-17 13:27:20,653 - INFO - apscheduler.scheduler - Added job "SchedulerService.update_all_orders_daily" to job store "default"
|
||||
2025-04-17 13:27:20,653 - INFO - apscheduler.scheduler - Scheduler started
|
||||
2025-04-17 13:27:20,653 - INFO - app.services.scheduler.base_scheduler - Scheduler started
|
||||
2025-04-17 13:27:20,653 - INFO - app.services.scheduler.scheduler_service - All scheduled tasks started
|
||||
2025-04-17 13:27:20,653 - INFO - app.main - Scheduler started successfully
|
||||
2025-04-17 13:27:24,285 - INFO - app.services.regular_printer_service - Print job 85 submitted to printer MFCL2750DW-3
|
||||
2025-04-17 13:28:05,282 - INFO - app.services.external_api.base_external_service - Making request to https://order-management-api.tcgplayer.com/orders/packing-slips/export?api-version=2.0
|
||||
2025-04-17 13:28:05,417 - INFO - app.services.label_printer_service - Converting PDF app/data/cache/tcgplayer/packing_slips/pdf/packing_slip_2025-04-17_13-28-05.pdf to images
|
||||
2025-04-17 13:28:05,489 - INFO - app.services.label_printer_service - Successfully converted PDF to 2 images
|
||||
2025-04-17 13:28:05,489 - INFO - app.services.label_printer_service - Processing page 1 with dimensions (1700, 2200)
|
||||
2025-04-17 13:28:09,731 - INFO - app.services.label_printer_service - Processing page 2 with dimensions (1700, 2200)
|
||||
2025-04-17 13:28:15,097 - INFO - app.services.label_printer_service - Converting PDF app/data/cache/tcgplayer/packing_slips/pdf/packing_slip_2025-04-17_13-28-05.pdf to images
|
||||
2025-04-17 13:28:15,167 - INFO - app.services.label_printer_service - Successfully converted PDF to 2 images
|
||||
2025-04-17 13:28:15,167 - INFO - app.services.label_printer_service - Processing page 1 with dimensions (1700, 2200)
|
||||
2025-04-17 13:28:19,411 - INFO - app.services.label_printer_service - Processing page 2 with dimensions (1700, 2200)
|
||||
|
51
app/main.py
51
app/main.py
@ -1,4 +1,4 @@
|
||||
from fastapi import FastAPI, HTTPException
|
||||
from fastapi import FastAPI
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.responses import FileResponse
|
||||
@ -6,17 +6,15 @@ from contextlib import asynccontextmanager
|
||||
import uvicorn
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from app.routes import routes
|
||||
from app.db.database import init_db, SessionLocal
|
||||
from app.services.service_manager import ServiceManager
|
||||
import logging
|
||||
|
||||
# Configure logging
|
||||
log_file = Path("app.log")
|
||||
if log_file.exists():
|
||||
# Archive old log file instead of deleting
|
||||
archive_path = log_file.with_suffix(f'.{log_file.stat().st_mtime}.log')
|
||||
log_file.rename(archive_path)
|
||||
log_file = "app.log"
|
||||
if os.path.exists(log_file):
|
||||
os.remove(log_file) # Remove existing log file to start fresh
|
||||
|
||||
# Create a formatter
|
||||
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(name)s - %(message)s')
|
||||
@ -39,45 +37,25 @@ logger = logging.getLogger(__name__)
|
||||
logger.info("Application starting up...")
|
||||
|
||||
# Initialize service manager
|
||||
service_manager = None
|
||||
service_manager = ServiceManager()
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
global service_manager
|
||||
service_manager = ServiceManager()
|
||||
|
||||
# Startup
|
||||
try:
|
||||
init_db()
|
||||
logger.info("Database initialized successfully")
|
||||
|
||||
# Initialize all services
|
||||
await service_manager.initialize_services()
|
||||
|
||||
# Get a database session
|
||||
db = SessionLocal()
|
||||
try:
|
||||
data_init_service = service_manager.get_service('data_initialization')
|
||||
data_init = await data_init_service.initialize_data(db, game_ids=[1, 3], use_cache=False, init_categories=False, init_products=False, init_groups=False, init_archived_prices=False, init_mtgjson=False, archived_prices_start_date="2024-03-05", archived_prices_end_date="2025-04-17")
|
||||
logger.info(f"Data initialization results: {data_init}")
|
||||
|
||||
# Start the scheduler
|
||||
scheduler = service_manager.get_service('scheduler')
|
||||
await scheduler.start_scheduled_tasks(db)
|
||||
await scheduler.start_scheduled_tasks()
|
||||
logger.info("Scheduler started successfully")
|
||||
|
||||
yield
|
||||
except Exception as e:
|
||||
logger.error(f"Error during application startup: {str(e)}")
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
except Exception as e:
|
||||
logger.error(f"Critical error during application startup: {str(e)}")
|
||||
raise
|
||||
finally:
|
||||
|
||||
# Shutdown
|
||||
if service_manager:
|
||||
await service_manager.cleanup_services()
|
||||
logger.info("All services cleaned up successfully")
|
||||
|
||||
@ -94,23 +72,16 @@ app.mount("/static", StaticFiles(directory="app/static"), name="static")
|
||||
# Serve index.html at root
|
||||
@app.get("/")
|
||||
async def read_root():
|
||||
index_path = Path('app/static/index.html')
|
||||
if not index_path.exists():
|
||||
raise HTTPException(status_code=404, detail="Index file not found")
|
||||
return FileResponse(index_path)
|
||||
return FileResponse('app/static/index.html')
|
||||
|
||||
# Serve app.js
|
||||
@app.get("/app.js")
|
||||
async def read_app_js():
|
||||
js_path = Path('app/static/app.js')
|
||||
if not js_path.exists():
|
||||
raise HTTPException(status_code=404, detail="App.js file not found")
|
||||
return FileResponse(js_path)
|
||||
return FileResponse('app/static/app.js')
|
||||
|
||||
# Configure CORS with specific origins in production
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["http://localhost:3000"], # Update with your frontend URL
|
||||
allow_origins=["*"],
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
|
@ -1,50 +1,26 @@
|
||||
from app.models.box import Box
|
||||
from app.models.card import Card
|
||||
from app.models.file import File
|
||||
from app.models.inventory_management import (
|
||||
PhysicalItem,
|
||||
InventoryItem,
|
||||
TransactionItem,
|
||||
OpenEvent,
|
||||
Vendor,
|
||||
Customer,
|
||||
Transaction,
|
||||
CostBasis
|
||||
)
|
||||
from app.models.game import Game
|
||||
from app.models.inventory import Inventory
|
||||
from app.models.mtgjson_card import MTGJSONCard
|
||||
from app.models.mtgjson_sku import MTGJSONSKU
|
||||
from app.models.product import Product
|
||||
from app.models.tcgplayer_category import TCGPlayerCategory
|
||||
from app.models.tcgplayer_group import TCGPlayerGroup
|
||||
from app.models.tcgplayer_inventory import TCGPlayerInventory
|
||||
from app.models.tcgplayer_order import (
|
||||
TCGPlayerOrder,
|
||||
TCGPlayerOrderTransaction,
|
||||
TCGPlayerOrderProduct,
|
||||
TCGPlayerOrderRefund
|
||||
)
|
||||
from app.models.tcgplayer_price_history import TCGPlayerPriceHistory
|
||||
from app.models.tcgplayer_order import TCGPlayerOrder
|
||||
from app.models.tcgplayer_product import TCGPlayerProduct
|
||||
|
||||
# This makes all models available for Alembic to discover
|
||||
__all__ = [
|
||||
'Box',
|
||||
'Card',
|
||||
'File',
|
||||
'PhysicalItem',
|
||||
'InventoryItem',
|
||||
'TransactionItem',
|
||||
'OpenEvent',
|
||||
'Vendor',
|
||||
'Customer',
|
||||
'Transaction',
|
||||
'CostBasis',
|
||||
'Game',
|
||||
'Inventory',
|
||||
'MTGJSONCard',
|
||||
'MTGJSONSKU',
|
||||
'Product',
|
||||
'TCGPlayerCategory',
|
||||
'TCGPlayerGroup',
|
||||
'TCGPlayerInventory',
|
||||
'TCGPlayerOrder',
|
||||
'TCGPlayerOrderTransaction',
|
||||
'TCGPlayerOrderProduct',
|
||||
'TCGPlayerOrderRefund',
|
||||
'TCGPlayerPriceHistory',
|
||||
'TCGPlayerProduct'
|
||||
]
|
30
app/models/box.py
Normal file
30
app/models/box.py
Normal file
@ -0,0 +1,30 @@
|
||||
from sqlalchemy import Column, Integer, String, DateTime, ForeignKey
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
from app.db.database import Base
|
||||
|
||||
class Box(Base):
|
||||
__tablename__ = "boxes"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
product_id = Column(Integer)
|
||||
type = Column(String)
|
||||
set_code = Column(String)
|
||||
sku = Column(Integer)
|
||||
name = Column(String)
|
||||
game_id = Column(Integer, ForeignKey("games.id"))
|
||||
expected_number_of_cards = Column(Integer)
|
||||
description = Column(String)
|
||||
image_url = Column(String)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
||||
|
||||
class OpenBox(Base):
|
||||
__tablename__ = "open_boxes"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
box_id = Column(Integer, ForeignKey("boxes.id"))
|
||||
number_of_cards = Column(Integer)
|
||||
date_opened = Column(DateTime(timezone=True))
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
37
app/models/card.py
Normal file
37
app/models/card.py
Normal file
@ -0,0 +1,37 @@
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from sqlalchemy import Column, Integer, String, Float, ForeignKey, DateTime
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
from app.db.database import Base
|
||||
|
||||
|
||||
class Card(Base):
|
||||
__tablename__ = "cards"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
name = Column(String, index=True)
|
||||
rarity = Column(String)
|
||||
set_name = Column(String, index=True)
|
||||
price = Column(Float)
|
||||
quantity = Column(Integer, default=0)
|
||||
|
||||
# TCGPlayer specific fields
|
||||
tcgplayer_sku = Column(String, unique=True, index=True)
|
||||
product_line = Column(String)
|
||||
product_name = Column(String)
|
||||
title = Column(String)
|
||||
number = Column(String)
|
||||
condition = Column(String)
|
||||
tcg_market_price = Column(Float)
|
||||
tcg_direct_low = Column(Float)
|
||||
tcg_low_price_with_shipping = Column(Float)
|
||||
tcg_low_price = Column(Float)
|
||||
total_quantity = Column(Integer)
|
||||
add_to_quantity = Column(Integer)
|
||||
tcg_marketplace_price = Column(Float)
|
||||
photo_url = Column(String)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
14
app/models/game.py
Normal file
14
app/models/game.py
Normal file
@ -0,0 +1,14 @@
|
||||
from sqlalchemy import Column, Integer, String, DateTime
|
||||
from sqlalchemy.orm import relationship
|
||||
from sqlalchemy.sql import func
|
||||
from app.db.database import Base
|
||||
|
||||
class Game(Base):
|
||||
__tablename__ = "games"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
name = Column(String)
|
||||
description = Column(String)
|
||||
image_url = Column(String)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
@ -2,8 +2,8 @@ from sqlalchemy import Column, Integer, String, Float, DateTime
|
||||
from sqlalchemy.sql import func
|
||||
from app.db.database import Base
|
||||
|
||||
class TCGPlayerInventory(Base):
|
||||
__tablename__ = "tcgplayer_inventory"
|
||||
class Inventory(Base):
|
||||
__tablename__ = "inventory"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
tcgplayer_id = Column(String, unique=True, index=True)
|
||||
@ -22,6 +22,7 @@ class TCGPlayerInventory(Base):
|
||||
add_to_quantity = Column(Integer)
|
||||
tcg_marketplace_price = Column(Float)
|
||||
photo_url = Column(String)
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.current_timestamp())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.current_timestamp())
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Timestamps
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
@ -1,185 +0,0 @@
|
||||
from sqlalchemy import Column, Integer, String, Float, DateTime, ForeignKey, Table
|
||||
from sqlalchemy.orm import relationship
|
||||
from app.db.database import Base
|
||||
|
||||
class PhysicalItem(Base):
|
||||
__tablename__ = "physical_items"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
item_type = Column(String)
|
||||
product_id = Column(Integer, ForeignKey("products.id"), nullable=False)
|
||||
created_at = Column(DateTime(timezone=True))
|
||||
updated_at = Column(DateTime(timezone=True))
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_on': item_type,
|
||||
'polymorphic_identity': 'physical_item'
|
||||
}
|
||||
|
||||
# Relationships
|
||||
product = relationship("Product")
|
||||
inventory_item = relationship("InventoryItem", uselist=False, back_populates="physical_item")
|
||||
transaction_items = relationship("TransactionItem", back_populates="physical_item")
|
||||
|
||||
class SealedCase(PhysicalItem):
|
||||
__tablename__ = "sealed_cases"
|
||||
|
||||
id = Column(Integer, ForeignKey('physical_items.id'), primary_key=True)
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': 'sealed_case'
|
||||
}
|
||||
|
||||
# Relationships
|
||||
boxes = relationship("SealedBox", back_populates="case")
|
||||
open_event = relationship("OpenEvent", uselist=False, back_populates="sealed_case")
|
||||
|
||||
class SealedBox(PhysicalItem):
|
||||
__tablename__ = "sealed_boxes"
|
||||
|
||||
id = Column(Integer, ForeignKey('physical_items.id'), primary_key=True)
|
||||
case_id = Column(Integer, ForeignKey("sealed_cases.id"), nullable=True)
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': 'sealed_box'
|
||||
}
|
||||
|
||||
# Relationships
|
||||
case = relationship("SealedCase", back_populates="boxes")
|
||||
open_event = relationship("OpenEvent", uselist=False, back_populates="sealed_box")
|
||||
|
||||
class OpenBox(PhysicalItem):
|
||||
__tablename__ = "open_boxes"
|
||||
|
||||
id = Column(Integer, ForeignKey('physical_items.id'), primary_key=True)
|
||||
open_event_id = Column(Integer, ForeignKey("open_events.id"))
|
||||
sealed_box_id = Column(Integer, ForeignKey("sealed_boxes.id"))
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': 'open_box'
|
||||
}
|
||||
|
||||
# Relationships
|
||||
open_event = relationship("OpenEvent", back_populates="resulting_boxes")
|
||||
sealed_box = relationship("SealedBox")
|
||||
cards = relationship("OpenCard", back_populates="box")
|
||||
|
||||
class OpenCard(PhysicalItem):
|
||||
__tablename__ = "open_cards"
|
||||
|
||||
id = Column(Integer, ForeignKey('physical_items.id'), primary_key=True)
|
||||
open_event_id = Column(Integer, ForeignKey("open_events.id"))
|
||||
box_id = Column(Integer, ForeignKey("open_boxes.id"), nullable=True)
|
||||
|
||||
__mapper_args__ = {
|
||||
'polymorphic_identity': 'open_card'
|
||||
}
|
||||
|
||||
# Relationships
|
||||
open_event = relationship("OpenEvent", back_populates="resulting_cards")
|
||||
box = relationship("OpenBox", back_populates="cards")
|
||||
|
||||
class InventoryItem(Base):
|
||||
__tablename__ = "inventory_items"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
physical_item_id = Column(Integer, ForeignKey("physical_items.id"), unique=True)
|
||||
cost_basis = Column(Float) # Current cost basis for this item
|
||||
parent_id = Column(Integer, ForeignKey("inventory_items.id"), nullable=True) # For tracking hierarchy
|
||||
created_at = Column(DateTime(timezone=True))
|
||||
updated_at = Column(DateTime(timezone=True))
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
physical_item = relationship("PhysicalItem", back_populates="inventory_item")
|
||||
parent = relationship("InventoryItem", remote_side=[id])
|
||||
children = relationship("InventoryItem")
|
||||
|
||||
class TransactionItem(Base):
|
||||
__tablename__ = "transaction_items"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
transaction_id = Column(Integer, ForeignKey("transactions.id"))
|
||||
physical_item_id = Column(Integer, ForeignKey("physical_items.id"))
|
||||
unit_price = Column(Float, nullable=False)
|
||||
created_at = Column(DateTime(timezone=True))
|
||||
updated_at = Column(DateTime(timezone=True))
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
transaction = relationship("Transaction", back_populates="transaction_items")
|
||||
physical_item = relationship("PhysicalItem", back_populates="transaction_items")
|
||||
|
||||
class OpenEvent(Base):
|
||||
__tablename__ = "open_events"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
sealed_case_id = Column(Integer, ForeignKey("sealed_cases.id"), nullable=True)
|
||||
sealed_box_id = Column(Integer, ForeignKey("sealed_boxes.id"), nullable=True)
|
||||
open_date = Column(DateTime(timezone=True))
|
||||
created_at = Column(DateTime(timezone=True))
|
||||
updated_at = Column(DateTime(timezone=True))
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
sealed_case = relationship("SealedCase", back_populates="open_event")
|
||||
sealed_box = relationship("SealedBox", back_populates="open_event")
|
||||
resulting_boxes = relationship("OpenBox", back_populates="open_event")
|
||||
resulting_cards = relationship("OpenCard", back_populates="open_event")
|
||||
|
||||
class Vendor(Base):
|
||||
__tablename__ = "vendors"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
name = Column(String, unique=True, index=True)
|
||||
created_at = Column(DateTime(timezone=True))
|
||||
updated_at = Column(DateTime(timezone=True))
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
class Customer(Base):
|
||||
__tablename__ = "customers"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
name = Column(String, unique=True, index=True)
|
||||
created_at = Column(DateTime(timezone=True))
|
||||
updated_at = Column(DateTime(timezone=True))
|
||||
|
||||
class Transaction(Base):
|
||||
__tablename__ = "transactions"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
vendor_id = Column(Integer, ForeignKey("vendors.id"), nullable=True)
|
||||
customer_id = Column(Integer, ForeignKey("customers.id"), nullable=True)
|
||||
transaction_type = Column(String) # 'purchase' or 'sale'
|
||||
transaction_date = Column(DateTime(timezone=True))
|
||||
transaction_total_amount = Column(Float)
|
||||
transaction_notes = Column(String)
|
||||
created_at = Column(DateTime(timezone=True))
|
||||
updated_at = Column(DateTime(timezone=True))
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
transaction_items = relationship("TransactionItem", back_populates="transaction")
|
||||
|
||||
class CostBasis(Base):
|
||||
__tablename__ = "cost_basis"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
transaction_item_id = Column(Integer, ForeignKey("transaction_items.id"))
|
||||
sealed_case_id = Column(Integer, ForeignKey("sealed_cases.id"), nullable=True)
|
||||
sealed_box_id = Column(Integer, ForeignKey("sealed_boxes.id"), nullable=True)
|
||||
open_box_id = Column(Integer, ForeignKey("open_boxes.id"), nullable=True)
|
||||
open_card_id = Column(Integer, ForeignKey("open_cards.id"), nullable=True)
|
||||
quantity = Column(Integer, nullable=False, default=1)
|
||||
unit_cost = Column(Float, nullable=False)
|
||||
created_at = Column(DateTime(timezone=True))
|
||||
updated_at = Column(DateTime(timezone=True))
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
||||
|
||||
# Relationships
|
||||
transaction_item = relationship("TransactionItem")
|
||||
sealed_case = relationship("SealedCase")
|
||||
sealed_box = relationship("SealedBox")
|
||||
open_box = relationship("OpenBox")
|
||||
open_card = relationship("OpenCard")
|
@ -1,12 +0,0 @@
|
||||
from sqlalchemy import Column, Integer, String, DateTime
|
||||
from app.db.database import Base
|
||||
|
||||
class Product(Base):
|
||||
__tablename__ = "products"
|
||||
|
||||
id = Column(Integer, primary_key=True)
|
||||
name = Column(String)
|
||||
tcgplayer_id = Column(String)
|
||||
created_at = Column(DateTime(timezone=True))
|
||||
updated_at = Column(DateTime(timezone=True))
|
||||
deleted_at = Column(DateTime(timezone=True), nullable=True)
|
@ -1,4 +1,5 @@
|
||||
from sqlalchemy import Column, Integer, String, Float, DateTime, JSON
|
||||
from sqlalchemy import Column, Integer, String, Float, DateTime, ForeignKey, JSON
|
||||
from sqlalchemy.orm import relationship
|
||||
from datetime import datetime, UTC
|
||||
from app.db.database import Base
|
||||
|
||||
|
@ -1,19 +0,0 @@
|
||||
from sqlalchemy import Column, Integer, Float, DateTime, String
|
||||
from sqlalchemy.sql import func
|
||||
from app.db.database import Base
|
||||
|
||||
class TCGPlayerPriceHistory(Base):
|
||||
__tablename__ = "tcgplayer_price_history"
|
||||
|
||||
id = Column(Integer, primary_key=True, index=True)
|
||||
product_id = Column(Integer, index=True)
|
||||
date = Column(DateTime, index=True)
|
||||
low_price = Column(Float)
|
||||
mid_price = Column(Float)
|
||||
high_price = Column(Float)
|
||||
market_price = Column(Float)
|
||||
direct_low_price = Column(Float)
|
||||
sub_type_name = Column(String)
|
||||
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
@ -1,4 +1,4 @@
|
||||
from sqlalchemy import Column, Integer, String, Float, DateTime
|
||||
from sqlalchemy import Column, Integer, String, Float, DateTime, ForeignKey
|
||||
from sqlalchemy.sql import func
|
||||
from app.db.database import Base
|
||||
|
||||
@ -11,13 +11,11 @@ class TCGPlayerProduct(Base):
|
||||
clean_name = Column(String)
|
||||
image_url = Column(String)
|
||||
category_id = Column(Integer)
|
||||
group_id = Column(Integer)
|
||||
group_id = Column(Integer, ForeignKey("tcgplayer_groups.group_id"))
|
||||
url = Column(String)
|
||||
modified_on = Column(DateTime)
|
||||
image_count = Column(Integer)
|
||||
ext_rarity = Column(String)
|
||||
ext_subtype = Column(String)
|
||||
ext_oracle_text = Column(String)
|
||||
ext_number = Column(String)
|
||||
low_price = Column(Float)
|
||||
mid_price = Column(Float)
|
||||
@ -25,9 +23,5 @@ class TCGPlayerProduct(Base):
|
||||
market_price = Column(Float)
|
||||
direct_low_price = Column(Float)
|
||||
sub_type_name = Column(String)
|
||||
ext_power = Column(String)
|
||||
ext_toughness = Column(String)
|
||||
ext_flavor_text = Column(String)
|
||||
|
||||
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
||||
updated_at = Column(DateTime(timezone=True), onupdate=func.now())
|
@ -1,19 +1,12 @@
|
||||
from fastapi import APIRouter, HTTPException, Depends, Query, UploadFile, File
|
||||
from fastapi import APIRouter, HTTPException, Depends, Query
|
||||
from typing import List
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from app.schemas.tcgplayer import TCGPlayerAPIOrderSummary, TCGPlayerAPIOrder
|
||||
from app.schemas.generate import GenerateAddressLabelsRequest, GeneratePackingSlipsRequest, GeneratePullSheetsRequest, GenerateResponse, GenerateReturnLabelsRequest
|
||||
from app.schemas.file import FileUpload
|
||||
from app.schemas.generate import GenerateRequest, GenerateAddressLabelsRequest, GeneratePackingSlipsRequest, GeneratePullSheetsRequest, GenerateResponse
|
||||
from app.services.service_manager import ServiceManager
|
||||
from app.services.file_service import FileService
|
||||
from sqlalchemy.orm import Session
|
||||
from app.db.database import get_db
|
||||
import os
|
||||
import tempfile
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SearchRange(str, Enum):
|
||||
@ -160,66 +153,3 @@ async def generate_address_labels(
|
||||
return {"success": False, "message": "Address labels not found"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to generate address labels: {str(e)}")
|
||||
|
||||
@router.post("/generate-return-labels")
|
||||
async def generate_return_labels(
|
||||
request: GenerateReturnLabelsRequest,
|
||||
db: Session = Depends(get_db)
|
||||
) -> GenerateResponse:
|
||||
"""
|
||||
Generate and print return labels for the specified number of labels.
|
||||
|
||||
Args:
|
||||
request: Dictionary containing:
|
||||
- number_of_labels: Number of return labels to generate
|
||||
"""
|
||||
try:
|
||||
label_printer = service_manager.get_service('label_printer')
|
||||
success = await label_printer.print_file("app/data/assets/images/ccrcardsaddress.png", label_size="dk1201", label_type="return_label", copies=request.number_of_labels)
|
||||
return {"success": success, "message": "Return labels generated and printed successfully"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to generate return labels: {str(e)}")
|
||||
|
||||
@router.post("/print-pirate-ship-label")
|
||||
async def print_pirate_ship_label(
|
||||
file: UploadFile = File(...),
|
||||
db: Session = Depends(get_db)
|
||||
) -> GenerateResponse:
|
||||
"""
|
||||
Print a PDF file uploaded via the API.
|
||||
|
||||
Args:
|
||||
file: The PDF file to print
|
||||
|
||||
Returns:
|
||||
Success status of the operation
|
||||
"""
|
||||
try:
|
||||
# Read the file content
|
||||
content = await file.read()
|
||||
|
||||
# Store the file using FileService
|
||||
file_service = service_manager.get_service('file')
|
||||
stored_file = await file_service.save_file(
|
||||
db=db,
|
||||
file_data=content,
|
||||
filename=file.filename,
|
||||
subdir="pirate_ship_labels",
|
||||
file_type="pdf",
|
||||
content_type=file.content_type,
|
||||
metadata={"filename": file.filename}
|
||||
)
|
||||
|
||||
try:
|
||||
# Use the label printer service to print the file
|
||||
label_printer = service_manager.get_service('label_printer')
|
||||
success = await label_printer.print_file(stored_file, label_size="dk1241", label_type="pirate_ship_label")
|
||||
|
||||
return {"success": success, "message": "Pirate Ship label printed successfully"}
|
||||
except Exception as e:
|
||||
# If printing fails, we'll keep the file in storage for potential retry
|
||||
logger.error(f"Failed to print file: {str(e)}")
|
||||
raise e
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Failed to print Pirate Ship label: {str(e)}")
|
||||
|
@ -3,6 +3,11 @@ from sqlalchemy.orm import Session
|
||||
from app.db.database import get_db
|
||||
from app.models.file import File as FileModel
|
||||
from app.schemas.file import FileCreate, FileUpdate, FileDelete, FileList, FileInDB
|
||||
from app.models.box import Box as BoxModel, OpenBox as OpenBoxModel
|
||||
from app.schemas.box import BoxCreate, BoxUpdate, BoxDelete, BoxList, OpenBoxCreate, OpenBoxUpdate, OpenBoxDelete, OpenBoxList, BoxInDB, OpenBoxInDB
|
||||
from app.models.game import Game as GameModel
|
||||
from app.schemas.game import GameCreate, GameUpdate, GameDelete, GameList, GameInDB
|
||||
from app.models.card import Card as CardModel
|
||||
from app.routes.set_label_routes import router as set_label_router
|
||||
from app.routes.order_routes import router as order_router
|
||||
|
||||
@ -43,3 +48,61 @@ async def update_file(file_id: int, file: FileUpdate):
|
||||
@router.delete("/files/{file_id}", response_model=FileDelete)
|
||||
async def delete_file(file_id: int):
|
||||
return {"message": "File deleted successfully"}
|
||||
|
||||
# ============================================================================
|
||||
# Box Management Endpoints
|
||||
# ============================================================================
|
||||
@router.get("/boxes", response_model=BoxList)
|
||||
async def get_boxes(page: int = 1, limit: int = 10, type: str = None, id: int = None):
|
||||
return {"boxes": [], "total": 0, "page": page, "limit": limit}
|
||||
|
||||
@router.post("/boxes", response_model=BoxInDB)
|
||||
async def create_box(box: BoxCreate):
|
||||
return {"message": "Box created successfully"}
|
||||
|
||||
@router.put("/boxes/{box_id}", response_model=BoxInDB)
|
||||
async def update_box(box_id: int, box: BoxUpdate):
|
||||
return {"message": "Box updated successfully"}
|
||||
|
||||
@router.delete("/boxes/{box_id}", response_model=BoxDelete)
|
||||
async def delete_box(box_id: int):
|
||||
return {"message": "Box deleted successfully"}
|
||||
|
||||
# ============================================================================
|
||||
# Open Box Management Endpoints
|
||||
# ============================================================================
|
||||
@router.get("/open_boxes", response_model=OpenBoxList)
|
||||
async def get_open_boxes(page: int = 1, limit: int = 10, type: str = None, id: int = None):
|
||||
return {"open_boxes": [], "total": 0, "page": page, "limit": limit}
|
||||
|
||||
@router.post("/open_boxes", response_model=OpenBoxInDB)
|
||||
async def create_open_box(open_box: OpenBoxCreate):
|
||||
return {"message": "Open box created successfully"}
|
||||
|
||||
@router.put("/open_boxes/{open_box_id}", response_model=OpenBoxInDB)
|
||||
async def update_open_box(open_box_id: int, open_box: OpenBoxUpdate):
|
||||
return {"message": "Open box updated successfully"}
|
||||
|
||||
@router.delete("/open_boxes/{open_box_id}", response_model=OpenBoxDelete)
|
||||
async def delete_open_box(open_box_id: int):
|
||||
return {"message": "Open box deleted successfully"}
|
||||
|
||||
# ============================================================================
|
||||
# Game Management Endpoints
|
||||
# ============================================================================
|
||||
@router.get("/games", response_model=GameList)
|
||||
async def get_games(page: int = 1, limit: int = 10, type: str = None, id: int = None):
|
||||
return {"games": [], "total": 0, "page": page, "limit": limit}
|
||||
|
||||
@router.post("/games", response_model=GameInDB)
|
||||
async def create_game(game: GameCreate):
|
||||
return {"message": "Game created successfully"}
|
||||
|
||||
@router.put("/games/{game_id}", response_model=GameInDB)
|
||||
async def update_game(game_id: int, game: GameUpdate):
|
||||
return {"message": "Game updated successfully"}
|
||||
|
||||
@router.delete("/games/{game_id}", response_model=GameDelete)
|
||||
async def delete_game(game_id: int):
|
||||
return {"message": "Game deleted successfully"}
|
||||
|
||||
|
72
app/schemas/box.py
Normal file
72
app/schemas/box.py
Normal file
@ -0,0 +1,72 @@
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel
|
||||
|
||||
# Base schema with common attributes
|
||||
class BoxBase(BaseModel):
|
||||
name: str
|
||||
description: Optional[str] = None
|
||||
game_id: int
|
||||
set_id: Optional[int] = None
|
||||
price: Optional[float] = None
|
||||
quantity: Optional[int] = 0
|
||||
status: Optional[str] = "available" # available, sold, reserved
|
||||
|
||||
# Schema for creating a new box
|
||||
class BoxCreate(BoxBase):
|
||||
pass
|
||||
|
||||
# Schema for updating a box
|
||||
class BoxUpdate(BoxBase):
|
||||
pass
|
||||
|
||||
# Schema for reading a box
|
||||
class BoxInDB(BoxBase):
|
||||
id: int
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
# Schema for deleting a box
|
||||
class BoxDelete(BaseModel):
|
||||
message: str
|
||||
|
||||
# Schema for listing boxes
|
||||
class BoxList(BaseModel):
|
||||
boxes: List[BoxInDB]
|
||||
total: int
|
||||
page: int
|
||||
limit: int
|
||||
|
||||
# OpenBox schemas
|
||||
class OpenBoxBase(BaseModel):
|
||||
box_id: int
|
||||
opened_at: Optional[datetime] = None
|
||||
opened_by: Optional[str] = None
|
||||
contents: Optional[List[dict]] = None
|
||||
status: Optional[str] = "pending" # pending, opened, verified, listed
|
||||
|
||||
class OpenBoxCreate(OpenBoxBase):
|
||||
pass
|
||||
|
||||
class OpenBoxUpdate(OpenBoxBase):
|
||||
pass
|
||||
|
||||
class OpenBoxInDB(OpenBoxBase):
|
||||
id: int
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
class OpenBoxDelete(BaseModel):
|
||||
message: str
|
||||
|
||||
class OpenBoxList(BaseModel):
|
||||
open_boxes: List[OpenBoxInDB]
|
||||
total: int
|
||||
page: int
|
||||
limit: int
|
55
app/schemas/card.py
Normal file
55
app/schemas/card.py
Normal file
@ -0,0 +1,55 @@
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel
|
||||
|
||||
# Base schema with common attributes
|
||||
class CardBase(BaseModel):
|
||||
name: str
|
||||
rarity: Optional[str] = None
|
||||
set_name: Optional[str] = None
|
||||
price: Optional[float] = None
|
||||
quantity: Optional[int] = 0
|
||||
|
||||
# TCGPlayer specific fields
|
||||
tcgplayer_sku: Optional[str] = None
|
||||
product_line: Optional[str] = None
|
||||
product_name: Optional[str] = None
|
||||
title: Optional[str] = None
|
||||
number: Optional[str] = None
|
||||
condition: Optional[str] = None
|
||||
tcg_market_price: Optional[float] = None
|
||||
tcg_direct_low: Optional[float] = None
|
||||
tcg_low_price_with_shipping: Optional[float] = None
|
||||
tcg_low_price: Optional[float] = None
|
||||
total_quantity: Optional[int] = None
|
||||
add_to_quantity: Optional[int] = None
|
||||
tcg_marketplace_price: Optional[float] = None
|
||||
photo_url: Optional[str] = None
|
||||
|
||||
# Schema for creating a new card
|
||||
class CardCreate(CardBase):
|
||||
pass
|
||||
|
||||
# Schema for updating a card
|
||||
class CardUpdate(CardBase):
|
||||
pass
|
||||
|
||||
# Schema for reading a card (includes id and relationships)
|
||||
class CardInDB(CardBase):
|
||||
id: int
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
# Schema for listing cards
|
||||
class CardList(BaseModel):
|
||||
cards: List[CardInDB]
|
||||
total: int
|
||||
page: int
|
||||
limit: int
|
||||
|
||||
# Schema for deleting a card
|
||||
class CardDelete(BaseModel):
|
||||
message: str
|
@ -1,7 +1,6 @@
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel
|
||||
from fastapi import UploadFile
|
||||
|
||||
# Base schema with common attributes
|
||||
class FileBase(BaseModel):
|
||||
@ -39,7 +38,3 @@ class FileList(BaseModel):
|
||||
total: int
|
||||
page: int
|
||||
limit: int
|
||||
|
||||
# Schema for file upload
|
||||
class FileUpload(BaseModel):
|
||||
file: UploadFile
|
41
app/schemas/game.py
Normal file
41
app/schemas/game.py
Normal file
@ -0,0 +1,41 @@
|
||||
from typing import List, Optional
|
||||
from datetime import datetime
|
||||
from pydantic import BaseModel
|
||||
|
||||
# Base schema with common attributes
|
||||
class GameBase(BaseModel):
|
||||
name: str
|
||||
publisher: Optional[str] = None
|
||||
release_date: Optional[datetime] = None
|
||||
description: Optional[str] = None
|
||||
website: Optional[str] = None
|
||||
logo_url: Optional[str] = None
|
||||
status: Optional[str] = "active" # active, inactive, discontinued
|
||||
|
||||
# Schema for creating a new game
|
||||
class GameCreate(GameBase):
|
||||
pass
|
||||
|
||||
# Schema for updating a game
|
||||
class GameUpdate(GameBase):
|
||||
pass
|
||||
|
||||
# Schema for reading a game
|
||||
class GameInDB(GameBase):
|
||||
id: int
|
||||
created_at: datetime
|
||||
updated_at: Optional[datetime] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
# Schema for deleting a game
|
||||
class GameDelete(BaseModel):
|
||||
message: str
|
||||
|
||||
# Schema for listing games
|
||||
class GameList(BaseModel):
|
||||
games: List[GameInDB]
|
||||
total: int
|
||||
page: int
|
||||
limit: int
|
@ -27,6 +27,3 @@ class GeneratePullSheetsRequest(GenerateRequest):
|
||||
class GenerateResponse(BaseModel):
|
||||
message: str
|
||||
success: bool
|
||||
|
||||
class GenerateReturnLabelsRequest(BaseModel):
|
||||
number_of_labels: int
|
@ -3,33 +3,11 @@ from app.services.service_manager import ServiceManager
|
||||
from app.services.file_processing_service import FileProcessingService
|
||||
from app.services.inventory_service import InventoryService
|
||||
from app.services.file_service import FileService
|
||||
from app.services.data_initialization import DataInitializationService
|
||||
from app.services.external_api.tcgcsv.tcgcsv_service import TCGCSVService
|
||||
from app.services.external_api.mtgjson.mtgjson_service import MTGJSONService
|
||||
from app.services.label_printer_service import LabelPrinterService
|
||||
from app.services.regular_printer_service import RegularPrinterService
|
||||
from app.services.address_label_service import AddressLabelService
|
||||
from app.services.pull_sheet_service import PullSheetService
|
||||
from app.services.set_label_service import SetLabelService
|
||||
from app.services.scheduler.scheduler_service import SchedulerService
|
||||
from app.services.external_api.tcgplayer.order_management_service import OrderManagementService
|
||||
from app.services.external_api.tcgplayer.tcgplayer_inventory_service import TCGPlayerInventoryService
|
||||
|
||||
__all__ = [
|
||||
'BaseService',
|
||||
'ServiceManager',
|
||||
'FileProcessingService',
|
||||
'InventoryService',
|
||||
'FileService',
|
||||
'DataInitializationService',
|
||||
'TCGCSVService',
|
||||
'MTGJSONService',
|
||||
'LabelPrinterService',
|
||||
'RegularPrinterService',
|
||||
'AddressLabelService',
|
||||
'PullSheetService',
|
||||
'SetLabelService',
|
||||
'SchedulerService',
|
||||
'OrderManagementService',
|
||||
'TCGPlayerInventoryService'
|
||||
'FileService'
|
||||
]
|
@ -1,97 +1,121 @@
|
||||
import os
|
||||
import json
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, List, Dict, Any, Union, Generator, Callable
|
||||
from typing import Optional, List, Dict, Any
|
||||
from sqlalchemy.orm import Session
|
||||
from app.services.external_api.tcgcsv.tcgcsv_service import TCGCSVService
|
||||
from app.services.external_api.mtgjson.mtgjson_service import MTGJSONService
|
||||
from app.models.tcgplayer_group import TCGPlayerGroup
|
||||
from app.models.tcgplayer_product import TCGPlayerProduct
|
||||
from app.models.tcgplayer_category import TCGPlayerCategory
|
||||
from app.services.base_service import BaseService
|
||||
from app.schemas.file import FileInDB
|
||||
from app.db.database import transaction
|
||||
import logging
|
||||
from app.models.tcgplayer_price_history import TCGPlayerPriceHistory
|
||||
from sqlalchemy import and_, bindparam, update, insert
|
||||
import py7zr
|
||||
import shutil
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
class DataInitializationService:
|
||||
def __init__(self, cache_dir: str = "app/data/cache/tcgcsv"):
|
||||
self.cache_dir = cache_dir
|
||||
self.categories_dir = os.path.join(cache_dir, "categories")
|
||||
self.groups_dir = os.path.join(cache_dir, "groups")
|
||||
self.products_dir = os.path.join(cache_dir, "products")
|
||||
self.tcgcsv_service = TCGCSVService()
|
||||
self.mtgjson_service = MTGJSONService()
|
||||
|
||||
# Create all necessary directories
|
||||
os.makedirs(cache_dir, exist_ok=True)
|
||||
os.makedirs(self.categories_dir, exist_ok=True)
|
||||
os.makedirs(self.groups_dir, exist_ok=True)
|
||||
os.makedirs(self.products_dir, exist_ok=True)
|
||||
|
||||
class DataInitializationService(BaseService):
|
||||
def __init__(self):
|
||||
super().__init__(None)
|
||||
def _get_cache_path(self, filename: str, subdir: str) -> str:
|
||||
"""Get the full path for a cached file in the specified subdirectory"""
|
||||
return os.path.join(self.cache_dir, subdir, filename)
|
||||
|
||||
async def _cache_data(
|
||||
self,
|
||||
db: Session,
|
||||
data: Union[dict, list],
|
||||
filename: str,
|
||||
subdir: str,
|
||||
default_str: bool = False,
|
||||
file_type: str = "json",
|
||||
content_type: str = "application/json",
|
||||
metadata: Optional[Dict] = None
|
||||
) -> FileInDB:
|
||||
"""Generic function to cache data to a JSON file"""
|
||||
file_data = json.dumps(data, default=str if default_str else None, indent=2)
|
||||
return await self.file_service.save_file(
|
||||
db,
|
||||
file_data,
|
||||
filename,
|
||||
subdir,
|
||||
file_type=file_type,
|
||||
content_type=content_type,
|
||||
metadata=metadata
|
||||
)
|
||||
async def _cache_categories(self, categories_data: dict):
|
||||
"""Cache categories data to a JSON file"""
|
||||
cache_path = self._get_cache_path("categories.json", "categories")
|
||||
with open(cache_path, 'w') as f:
|
||||
json.dump(categories_data, f, indent=2)
|
||||
|
||||
async def _load_cached_data(
|
||||
self,
|
||||
db: Session,
|
||||
filename: str
|
||||
) -> Optional[Dict[str, Any]]:
|
||||
"""Generic function to load cached data from a JSON file with 7-day expiration"""
|
||||
file_record = await self.file_service.get_file_by_filename(db, filename)
|
||||
if file_record:
|
||||
# Check if cache is expired (7 days)
|
||||
cache_age = datetime.now() - file_record.created_at
|
||||
if cache_age.days < 7:
|
||||
with open(file_record.path, 'r') as f:
|
||||
async def _cache_groups(self, game_ids: List[int], groups_data: dict):
|
||||
for game_id in game_ids:
|
||||
cache_path = self._get_cache_path(f"groups_{game_id}.json", "groups")
|
||||
with open(cache_path, 'w') as f:
|
||||
json.dump(groups_data, f, default=str)
|
||||
|
||||
async def _cache_products(self, game_ids: List[int], group_id: int, products_data: list):
|
||||
for game_id in game_ids:
|
||||
cache_path = self._get_cache_path(f"products_{game_id}_{group_id}.json", "products")
|
||||
with open(cache_path, 'w') as f:
|
||||
json.dump(products_data, f, default=str)
|
||||
|
||||
async def _load_cached_categories(self) -> Optional[dict]:
|
||||
cache_path = self._get_cache_path("categories.json", "categories")
|
||||
if os.path.exists(cache_path):
|
||||
with open(cache_path, 'r') as f:
|
||||
return json.load(f)
|
||||
else:
|
||||
logger.info(f"Cache expired for {filename}, age: {cache_age.days} days")
|
||||
# Delete the expired cache file
|
||||
await self.file_service.delete_file(db, file_record.id)
|
||||
return None
|
||||
|
||||
async def sync_categories(self, db: Session, categories_data: dict):
|
||||
"""Sync categories data to the database using streaming for large datasets"""
|
||||
categories = categories_data.get("results", [])
|
||||
batch_size = 1000 # Process in batches of 1000
|
||||
total_categories = len(categories)
|
||||
async def _load_cached_groups(self, game_ids: List[int]) -> Optional[dict]:
|
||||
# Try to load cached data for any of the game IDs
|
||||
for game_id in game_ids:
|
||||
cache_path = self._get_cache_path(f"groups_{game_id}.json", "groups")
|
||||
if os.path.exists(cache_path):
|
||||
with open(cache_path, 'r') as f:
|
||||
return json.load(f)
|
||||
return None
|
||||
|
||||
with transaction(db):
|
||||
for i in range(0, total_categories, batch_size):
|
||||
batch = categories[i:i + batch_size]
|
||||
for category_data in batch:
|
||||
async def _load_cached_products(self, game_ids: List[int], group_id: int) -> Optional[list]:
|
||||
# Try to load cached data for any of the game IDs
|
||||
for game_id in game_ids:
|
||||
cache_path = self._get_cache_path(f"products_{game_id}_{group_id}.json", "products")
|
||||
if os.path.exists(cache_path):
|
||||
with open(cache_path, 'r') as f:
|
||||
return json.load(f)
|
||||
return None
|
||||
|
||||
async def initialize_data(
|
||||
self,
|
||||
db: Session,
|
||||
game_ids: List[int],
|
||||
use_cache: bool = True,
|
||||
init_categories: bool = True,
|
||||
init_groups: bool = True,
|
||||
init_products: bool = True,
|
||||
init_archived_prices: bool = False,
|
||||
archived_prices_start_date: Optional[str] = None,
|
||||
archived_prices_end_date: Optional[str] = None,
|
||||
init_mtgjson: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
"""Initialize TCGPlayer data with configurable steps"""
|
||||
print("Initializing TCGPlayer data...")
|
||||
results = {
|
||||
"categories": 0,
|
||||
"groups": {},
|
||||
"products": {},
|
||||
"archived_prices": False,
|
||||
"mtgjson": {}
|
||||
}
|
||||
|
||||
if init_categories:
|
||||
print("\nInitializing categories...")
|
||||
categories_data = None
|
||||
if use_cache:
|
||||
categories_data = await self._load_cached_categories()
|
||||
|
||||
if not categories_data:
|
||||
print("Fetching categories from API...")
|
||||
categories_data = await self.tcgcsv_service.get_categories()
|
||||
if use_cache:
|
||||
await self._cache_categories(categories_data)
|
||||
|
||||
if not categories_data.get("success"):
|
||||
raise Exception(f"Failed to fetch categories: {categories_data.get('errors')}")
|
||||
|
||||
# Sync categories to database
|
||||
categories = categories_data.get("results", [])
|
||||
synced_categories = []
|
||||
for category_data in categories:
|
||||
existing_category = db.query(TCGPlayerCategory).filter(TCGPlayerCategory.category_id == category_data["categoryId"]).first()
|
||||
if existing_category:
|
||||
# Update existing category
|
||||
for key, value in {
|
||||
"name": category_data["name"],
|
||||
"display_name": category_data.get("displayName"),
|
||||
"seo_category_name": category_data.get("seoCategoryName"),
|
||||
"category_description": category_data.get("categoryDescription"),
|
||||
"category_page_title": category_data.get("categoryPageTitle"),
|
||||
"sealed_label": category_data.get("sealedLabel"),
|
||||
"non_sealed_label": category_data.get("nonSealedLabel"),
|
||||
"condition_guide_url": category_data.get("conditionGuideUrl"),
|
||||
"is_scannable": category_data.get("isScannable", False),
|
||||
"popularity": category_data.get("popularity", 0),
|
||||
"is_direct": category_data.get("isDirect", False),
|
||||
"modified_on": datetime.fromisoformat(category_data["modifiedOn"].replace("Z", "+00:00")) if category_data.get("modifiedOn") else None
|
||||
}.items():
|
||||
setattr(existing_category, key, value)
|
||||
synced_categories.append(existing_category)
|
||||
else:
|
||||
new_category = TCGPlayerCategory(
|
||||
category_id=category_data["categoryId"],
|
||||
@ -109,63 +133,39 @@ class DataInitializationService(BaseService):
|
||||
modified_on=datetime.fromisoformat(category_data["modifiedOn"].replace("Z", "+00:00")) if category_data.get("modifiedOn") else None
|
||||
)
|
||||
db.add(new_category)
|
||||
|
||||
# Commit after each batch
|
||||
synced_categories.append(new_category)
|
||||
db.commit()
|
||||
logger.info(f"Processed {min(i + batch_size, total_categories)}/{total_categories} categories")
|
||||
results["categories"] = len(synced_categories)
|
||||
print(f"Synced {len(synced_categories)} categories")
|
||||
|
||||
async def init_categories(self, db: Session, use_cache: bool = True) -> bool:
|
||||
"""Initialize categories data"""
|
||||
logger.info("Starting categories initialization")
|
||||
# Process each game ID separately
|
||||
for game_id in game_ids:
|
||||
print(f"\nProcessing game ID: {game_id}")
|
||||
results["groups"][game_id] = 0
|
||||
results["products"][game_id] = {}
|
||||
|
||||
if init_groups:
|
||||
print(f"Initializing groups for game ID {game_id}...")
|
||||
groups_data = None
|
||||
if use_cache:
|
||||
categories_data = await self._load_cached_data(db, "categories.json")
|
||||
if categories_data:
|
||||
await self.sync_categories(db, categories_data)
|
||||
logger.info("Categories initialized from cache")
|
||||
return True
|
||||
else:
|
||||
logger.warning("No cached categories data found")
|
||||
return False
|
||||
else:
|
||||
tcgcsv_service = self.get_service('tcgcsv')
|
||||
categories_data = await tcgcsv_service.get_categories()
|
||||
groups_data = await self._load_cached_groups([game_id])
|
||||
|
||||
# Save the categories data
|
||||
await self._cache_data(
|
||||
db,
|
||||
categories_data,
|
||||
"categories.json",
|
||||
"tcgcsv/categories",
|
||||
file_type="json",
|
||||
content_type="application/json"
|
||||
)
|
||||
if not groups_data:
|
||||
print(f"Fetching groups for game ID {game_id} from API...")
|
||||
groups_data = await self.tcgcsv_service.get_groups([game_id])
|
||||
if use_cache:
|
||||
await self._cache_groups([game_id], groups_data)
|
||||
|
||||
await self.sync_categories(db, categories_data)
|
||||
logger.info("Categories initialized from API")
|
||||
return True
|
||||
if not groups_data.get("success"):
|
||||
raise Exception(f"Failed to fetch groups for game ID {game_id}: {groups_data.get('errors')}")
|
||||
|
||||
async def sync_groups(self, db: Session, groups_data: dict):
|
||||
"""Sync groups data to the database using streaming for large datasets"""
|
||||
# Sync groups to database
|
||||
groups = groups_data.get("results", [])
|
||||
batch_size = 1000 # Process in batches of 1000
|
||||
total_groups = len(groups)
|
||||
|
||||
with transaction(db):
|
||||
for i in range(0, total_groups, batch_size):
|
||||
batch = groups[i:i + batch_size]
|
||||
for group_data in batch:
|
||||
synced_groups = []
|
||||
for group_data in groups:
|
||||
existing_group = db.query(TCGPlayerGroup).filter(TCGPlayerGroup.group_id == group_data["groupId"]).first()
|
||||
if existing_group:
|
||||
# Update existing group
|
||||
for key, value in {
|
||||
"name": group_data["name"],
|
||||
"abbreviation": group_data.get("abbreviation"),
|
||||
"is_supplemental": group_data.get("isSupplemental", False),
|
||||
"published_on": datetime.fromisoformat(group_data["publishedOn"].replace("Z", "+00:00")) if group_data.get("publishedOn") else None,
|
||||
"modified_on": datetime.fromisoformat(group_data["modifiedOn"].replace("Z", "+00:00")) if group_data.get("modifiedOn") else None,
|
||||
"category_id": group_data.get("categoryId")
|
||||
}.items():
|
||||
setattr(existing_group, key, value)
|
||||
synced_groups.append(existing_group)
|
||||
else:
|
||||
new_group = TCGPlayerGroup(
|
||||
group_id=group_data["groupId"],
|
||||
@ -177,561 +177,88 @@ class DataInitializationService(BaseService):
|
||||
category_id=group_data.get("categoryId")
|
||||
)
|
||||
db.add(new_group)
|
||||
|
||||
# Commit after each batch
|
||||
synced_groups.append(new_group)
|
||||
db.commit()
|
||||
logger.info(f"Processed {min(i + batch_size, total_groups)}/{total_groups} groups")
|
||||
results["groups"][game_id] = len(synced_groups)
|
||||
print(f"Synced {len(synced_groups)} groups for game ID {game_id}")
|
||||
|
||||
async def init_groups(self, db: Session, use_cache: bool = True, game_ids: List[int] = None) -> bool:
|
||||
"""Initialize groups data"""
|
||||
logger.info(f"Starting groups initialization for game IDs: {game_ids}")
|
||||
tcgcsv_service = self.get_service('tcgcsv')
|
||||
for game_id in game_ids:
|
||||
if init_products:
|
||||
# Handle products for each group in this game ID
|
||||
for group in synced_groups:
|
||||
print(f"Initializing products for group {group.name} (game ID {game_id})...")
|
||||
products_data = None
|
||||
if use_cache:
|
||||
groups_data = await self._load_cached_data(db, f"groups_{game_id}.json")
|
||||
if groups_data:
|
||||
await self.sync_groups(db, groups_data)
|
||||
logger.info(f"Groups initialized from cache for game ID {game_id}")
|
||||
else:
|
||||
logger.warning(f"No cached groups data found for game ID {game_id}")
|
||||
return False
|
||||
else:
|
||||
groups_data = await tcgcsv_service.get_groups(game_id)
|
||||
products_data = await self._load_cached_products([game_id], group.group_id)
|
||||
|
||||
# Save the groups data
|
||||
await self._cache_data(
|
||||
db,
|
||||
groups_data,
|
||||
f"groups_{game_id}.json",
|
||||
"tcgcsv/groups",
|
||||
file_type="json",
|
||||
content_type="application/json"
|
||||
)
|
||||
if not products_data:
|
||||
print(f"Fetching products for group {group.name} (game ID {game_id}) from API...")
|
||||
products_data = await self.tcgcsv_service.get_products_and_prices([game_id], group.group_id)
|
||||
if use_cache:
|
||||
await self._cache_products([game_id], group.group_id, products_data)
|
||||
|
||||
await self.sync_groups(db, groups_data)
|
||||
logger.info(f"Groups initialized from API for game ID {game_id}")
|
||||
return True
|
||||
|
||||
async def sync_products(self, db: Session, products_data: str):
|
||||
"""Sync products data to the database using streaming for large datasets"""
|
||||
import csv
|
||||
import io
|
||||
|
||||
# Parse CSV data
|
||||
csv_reader = csv.DictReader(io.StringIO(products_data))
|
||||
products_list = list(csv_reader)
|
||||
batch_size = 1000 # Process in batches of 1000
|
||||
total_products = len(products_list)
|
||||
|
||||
with transaction(db):
|
||||
for i in range(0, total_products, batch_size):
|
||||
batch = products_list[i:i + batch_size]
|
||||
for product_data in batch:
|
||||
existing_product = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.product_id == product_data["productId"]).first()
|
||||
# Sync products to database
|
||||
synced_products = []
|
||||
for product_data in products_data:
|
||||
existing_product = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.product_id == int(product_data["productId"])).first()
|
||||
if existing_product:
|
||||
# Update existing product
|
||||
for key, value in {
|
||||
"name": product_data["name"],
|
||||
"clean_name": product_data.get("cleanName"),
|
||||
"image_url": product_data.get("imageUrl"),
|
||||
"category_id": product_data.get("categoryId"),
|
||||
"group_id": product_data.get("groupId"),
|
||||
"url": product_data.get("url"),
|
||||
"modified_on": datetime.fromisoformat(product_data["modifiedOn"].replace("Z", "+00:00")) if product_data.get("modifiedOn") else None,
|
||||
"image_count": product_data.get("imageCount", 0),
|
||||
"ext_rarity": product_data.get("extRarity"),
|
||||
"ext_number": product_data.get("extNumber"),
|
||||
"low_price": float(product_data.get("lowPrice")) if product_data.get("lowPrice") else None,
|
||||
"mid_price": float(product_data.get("midPrice")) if product_data.get("midPrice") else None,
|
||||
"high_price": float(product_data.get("highPrice")) if product_data.get("highPrice") else None,
|
||||
"market_price": float(product_data.get("marketPrice")) if product_data.get("marketPrice") else None,
|
||||
"direct_low_price": float(product_data.get("directLowPrice")) if product_data.get("directLowPrice") else None,
|
||||
"sub_type_name": product_data.get("subTypeName")
|
||||
}.items():
|
||||
setattr(existing_product, key, value)
|
||||
synced_products.append(existing_product)
|
||||
else:
|
||||
new_product = TCGPlayerProduct(
|
||||
product_id=product_data["productId"],
|
||||
product_id=int(product_data["productId"]),
|
||||
name=product_data["name"],
|
||||
clean_name=product_data.get("cleanName"),
|
||||
image_url=product_data.get("imageUrl"),
|
||||
category_id=product_data.get("categoryId"),
|
||||
group_id=product_data.get("groupId"),
|
||||
category_id=int(product_data["categoryId"]),
|
||||
group_id=int(product_data["groupId"]),
|
||||
url=product_data.get("url"),
|
||||
modified_on=datetime.fromisoformat(product_data["modifiedOn"].replace("Z", "+00:00")) if product_data.get("modifiedOn") else None,
|
||||
image_count=product_data.get("imageCount", 0),
|
||||
image_count=int(product_data.get("imageCount", 0)),
|
||||
ext_rarity=product_data.get("extRarity"),
|
||||
ext_subtype=product_data.get("extSubtype"),
|
||||
ext_oracle_text=product_data.get("extOracleText"),
|
||||
ext_number=product_data.get("extNumber"),
|
||||
low_price=float(product_data.get("lowPrice")) if product_data.get("lowPrice") else None,
|
||||
mid_price=float(product_data.get("midPrice")) if product_data.get("midPrice") else None,
|
||||
high_price=float(product_data.get("highPrice")) if product_data.get("highPrice") else None,
|
||||
market_price=float(product_data.get("marketPrice")) if product_data.get("marketPrice") else None,
|
||||
direct_low_price=float(product_data.get("directLowPrice")) if product_data.get("directLowPrice") else None,
|
||||
sub_type_name=product_data.get("subTypeName"),
|
||||
ext_power=product_data.get("extPower"),
|
||||
ext_toughness=product_data.get("extToughness"),
|
||||
ext_flavor_text=product_data.get("extFlavorText")
|
||||
|
||||
sub_type_name=product_data.get("subTypeName")
|
||||
)
|
||||
db.add(new_product)
|
||||
|
||||
# Commit after each batch
|
||||
synced_products.append(new_product)
|
||||
db.commit()
|
||||
logger.info(f"Processed {min(i + batch_size, total_products)}/{total_products} products")
|
||||
results["products"][game_id][group.group_id] = len(synced_products)
|
||||
print(f"Synced {len(synced_products)} products for group {group.name} (game ID {game_id})")
|
||||
|
||||
async def init_products(self, db: Session, use_cache: bool = True, game_ids: List[int] = None) -> bool:
|
||||
"""Initialize products data"""
|
||||
logger.info(f"Starting products initialization for game IDs: {game_ids}")
|
||||
tcgcsv_service = self.get_service('tcgcsv')
|
||||
for game_id in game_ids:
|
||||
groups = db.query(TCGPlayerGroup).filter(TCGPlayerGroup.category_id == game_id).all()
|
||||
logger.info(f"Processing {len(groups)} groups for game ID {game_id}")
|
||||
for group in groups:
|
||||
if use_cache:
|
||||
products_data = await self._load_cached_data(db, f"products_{game_id}_{group.group_id}.json")
|
||||
if products_data:
|
||||
await self.sync_products(db, products_data)
|
||||
logger.info(f"Products initialized from cache for group {group.group_id}")
|
||||
else:
|
||||
logger.warning(f"No cached products data found for group {group.group_id}")
|
||||
continue
|
||||
else:
|
||||
# Get CSV data from API
|
||||
csv_data = await tcgcsv_service.get_products_and_prices(game_id, group.group_id)
|
||||
|
||||
# Save the CSV file
|
||||
await self.file_service.save_file(
|
||||
db,
|
||||
csv_data,
|
||||
f"products_{game_id}_{group.group_id}.csv",
|
||||
"tcgcsv/products",
|
||||
file_type="csv",
|
||||
content_type="text/csv"
|
||||
)
|
||||
|
||||
# Parse and sync the CSV data
|
||||
await self.sync_products(db, csv_data)
|
||||
logger.info(f"Products initialized from API for group {group.group_id}")
|
||||
return True
|
||||
|
||||
async def sync_archived_prices(self, db: Session, archived_prices_data: dict, date: datetime):
|
||||
"""Sync archived prices data to the database using bulk operations.
|
||||
Note: Historical prices are never updated, only new records are inserted."""
|
||||
from sqlalchemy import insert
|
||||
from app.models.tcgplayer_price_history import TCGPlayerPriceHistory
|
||||
|
||||
# Prepare data for bulk operations
|
||||
price_records = []
|
||||
|
||||
for price_data in archived_prices_data.get("results", []):
|
||||
record = {
|
||||
"product_id": price_data["productId"],
|
||||
"date": date,
|
||||
"sub_type_name": price_data["subTypeName"],
|
||||
"low_price": price_data.get("lowPrice"),
|
||||
"mid_price": price_data.get("midPrice"),
|
||||
"high_price": price_data.get("highPrice"),
|
||||
"market_price": price_data.get("marketPrice"),
|
||||
"direct_low_price": price_data.get("directLowPrice")
|
||||
}
|
||||
price_records.append(record)
|
||||
|
||||
if not price_records:
|
||||
return
|
||||
|
||||
# Get existing records in bulk to avoid duplicates
|
||||
product_ids = [r["product_id"] for r in price_records]
|
||||
sub_type_names = [r["sub_type_name"] for r in price_records]
|
||||
|
||||
existing_records = db.query(TCGPlayerPriceHistory).filter(
|
||||
TCGPlayerPriceHistory.product_id.in_(product_ids),
|
||||
TCGPlayerPriceHistory.date == date,
|
||||
TCGPlayerPriceHistory.sub_type_name.in_(sub_type_names)
|
||||
).all()
|
||||
|
||||
# Filter out existing records
|
||||
existing_keys = {(r.product_id, r.date, r.sub_type_name) for r in existing_records}
|
||||
to_insert = [
|
||||
record for record in price_records
|
||||
if (record["product_id"], record["date"], record["sub_type_name"]) not in existing_keys
|
||||
]
|
||||
|
||||
# Perform bulk insert for new records only
|
||||
if to_insert:
|
||||
stmt = insert(TCGPlayerPriceHistory)
|
||||
db.execute(stmt, to_insert)
|
||||
db.commit()
|
||||
|
||||
async def init_archived_prices(self, db: Session, start_date: datetime, end_date: datetime, use_cache: bool = True, game_ids: List[int] = None) -> bool:
|
||||
"""Initialize archived prices data"""
|
||||
logger.info(f"Starting archived prices initialization from {start_date} to {end_date}")
|
||||
tcgcsv_service = self.get_service('tcgcsv')
|
||||
processed_dates = await tcgcsv_service.get_tcgcsv_date_range(start_date, end_date)
|
||||
logger.info(f"Processing {len(processed_dates)} dates")
|
||||
|
||||
# Convert game_ids to set for faster lookups
|
||||
desired_game_ids = set(game_ids) if game_ids else set()
|
||||
|
||||
for date in processed_dates:
|
||||
date_path = f"app/data/cache/tcgcsv/prices/{date}"
|
||||
|
||||
# Check if we already have the data for this date
|
||||
if use_cache and os.path.exists(date_path):
|
||||
logger.info(f"Using cached price data for {date}")
|
||||
else:
|
||||
logger.info(f"Downloading and processing archived prices for {date}")
|
||||
# Download and extract the archive
|
||||
archive_data = await tcgcsv_service.get_archived_prices_for_date(date)
|
||||
|
||||
# Save the archive file
|
||||
file_record = await self.file_service.save_file(
|
||||
db,
|
||||
archive_data,
|
||||
f"prices-{date}.ppmd.7z",
|
||||
"tcgcsv/prices/zip",
|
||||
file_type="application/x-7z-compressed",
|
||||
content_type="application/x-7z-compressed"
|
||||
)
|
||||
|
||||
# Extract the 7z file to a temporary directory
|
||||
temp_extract_path = f"app/data/cache/tcgcsv/prices/temp_{date}"
|
||||
os.makedirs(temp_extract_path, exist_ok=True)
|
||||
|
||||
with py7zr.SevenZipFile(file_record.path, 'r') as archive:
|
||||
archive.extractall(path=temp_extract_path)
|
||||
|
||||
# Find the date subdirectory in the temp directory
|
||||
date_subdir = os.path.join(temp_extract_path, str(date))
|
||||
if os.path.exists(date_subdir):
|
||||
# Remove existing directory if it exists
|
||||
if os.path.exists(date_path):
|
||||
shutil.rmtree(date_path)
|
||||
|
||||
# Create the destination directory
|
||||
os.makedirs(date_path, exist_ok=True)
|
||||
|
||||
# Move contents from the date subdirectory to the final path
|
||||
for item in os.listdir(date_subdir):
|
||||
src = os.path.join(date_subdir, item)
|
||||
dst = os.path.join(date_path, item)
|
||||
os.rename(src, dst)
|
||||
|
||||
# Clean up the temporary directory
|
||||
os.rmdir(date_subdir)
|
||||
os.rmdir(temp_extract_path)
|
||||
|
||||
# Process each category directory
|
||||
for category_id in os.listdir(date_path):
|
||||
# Skip categories that aren't in our desired game IDs
|
||||
if int(category_id) not in desired_game_ids:
|
||||
continue
|
||||
|
||||
category_path = os.path.join(date_path, category_id)
|
||||
if not os.path.isdir(category_path):
|
||||
continue
|
||||
|
||||
# Process each group directory
|
||||
for group_id in os.listdir(category_path):
|
||||
group_path = os.path.join(category_path, group_id)
|
||||
if not os.path.isdir(group_path):
|
||||
continue
|
||||
|
||||
# Process the prices file
|
||||
prices_file = os.path.join(group_path, "prices")
|
||||
if not os.path.exists(prices_file):
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(prices_file, 'r') as f:
|
||||
price_data = json.load(f)
|
||||
if price_data.get("success"):
|
||||
await self.sync_archived_prices(db, price_data, datetime.strptime(date, "%Y-%m-%d"))
|
||||
logger.info(f"Processed prices for category {category_id}, group {group_id} on {date}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing prices file {prices_file}: {str(e)}")
|
||||
continue
|
||||
|
||||
return True
|
||||
|
||||
async def init_mtgjson(self, db: Session, use_cache: bool = True) -> Dict[str, Any]:
|
||||
"""Initialize MTGJSON data"""
|
||||
logger.info("Starting MTGJSON initialization")
|
||||
mtgjson_service = self.get_service('mtgjson')
|
||||
identifiers_count = 0
|
||||
skus_count = 0
|
||||
|
||||
# Process identifiers
|
||||
if use_cache:
|
||||
cached_file = await self.file_service.get_file_by_filename(db, "mtgjson_identifiers.json")
|
||||
if cached_file and os.path.exists(cached_file.path):
|
||||
logger.info("MTGJSON identifiers initialized from cache")
|
||||
identifiers_count = await self._process_streamed_data(
|
||||
db,
|
||||
self._stream_json_file(cached_file.path),
|
||||
"mtgjson_identifiers.json",
|
||||
"mtgjson",
|
||||
self.sync_mtgjson_identifiers
|
||||
)
|
||||
else:
|
||||
logger.info("Downloading MTGJSON identifiers from API")
|
||||
identifiers_count = await self._process_streamed_data(
|
||||
db,
|
||||
await mtgjson_service.get_identifiers(db),
|
||||
"mtgjson_identifiers.json",
|
||||
"mtgjson",
|
||||
self.sync_mtgjson_identifiers
|
||||
)
|
||||
else:
|
||||
logger.info("Downloading MTGJSON identifiers from API")
|
||||
identifiers_count = await self._process_streamed_data(
|
||||
db,
|
||||
await mtgjson_service.get_identifiers(db),
|
||||
"mtgjson_identifiers.json",
|
||||
"mtgjson",
|
||||
self.sync_mtgjson_identifiers
|
||||
)
|
||||
|
||||
# Process SKUs
|
||||
if use_cache:
|
||||
cached_file = await self.file_service.get_file_by_filename(db, "mtgjson_skus.json")
|
||||
if cached_file and os.path.exists(cached_file.path):
|
||||
logger.info("MTGJSON SKUs initialized from cache")
|
||||
skus_count = await self._process_streamed_data(
|
||||
db,
|
||||
self._stream_json_file(cached_file.path),
|
||||
"mtgjson_skus.json",
|
||||
"mtgjson",
|
||||
self.sync_mtgjson_skus
|
||||
)
|
||||
else:
|
||||
logger.info("Downloading MTGJSON SKUs from API")
|
||||
skus_count = await self._process_streamed_data(
|
||||
db,
|
||||
await mtgjson_service.get_skus(db),
|
||||
"mtgjson_skus.json",
|
||||
"mtgjson",
|
||||
self.sync_mtgjson_skus
|
||||
)
|
||||
else:
|
||||
logger.info("Downloading MTGJSON SKUs from API")
|
||||
skus_count = await self._process_streamed_data(
|
||||
db,
|
||||
await mtgjson_service.get_skus(db),
|
||||
"mtgjson_skus.json",
|
||||
"mtgjson",
|
||||
self.sync_mtgjson_skus
|
||||
)
|
||||
|
||||
return {
|
||||
"identifiers_processed": identifiers_count,
|
||||
"skus_processed": skus_count
|
||||
}
|
||||
|
||||
async def _process_streamed_data(
|
||||
self,
|
||||
db: Session,
|
||||
data_stream: Generator[Dict[str, Any], None, None],
|
||||
filename: str,
|
||||
subdir: str,
|
||||
sync_func: Callable
|
||||
) -> int:
|
||||
"""Process streamed data and sync to database"""
|
||||
count = 0
|
||||
items = []
|
||||
batch_size = 1000
|
||||
|
||||
for item in data_stream:
|
||||
if item["type"] == "meta":
|
||||
# Handle meta data separately
|
||||
continue
|
||||
|
||||
count += 1
|
||||
items.append(item["data"])
|
||||
|
||||
# Process in batches
|
||||
if len(items) >= batch_size:
|
||||
await sync_func(db, items)
|
||||
items = []
|
||||
|
||||
# Process any remaining items
|
||||
if items:
|
||||
await sync_func(db, items)
|
||||
|
||||
return count
|
||||
|
||||
async def sync_mtgjson_identifiers(self, db: Session, identifiers_data: dict):
|
||||
"""Sync MTGJSON identifiers data to the database"""
|
||||
from app.models.mtgjson_card import MTGJSONCard
|
||||
|
||||
with transaction(db):
|
||||
for card_id, card_data in identifiers_data.items():
|
||||
existing_card = db.query(MTGJSONCard).filter(MTGJSONCard.card_id == card_id).first()
|
||||
if existing_card:
|
||||
# Update existing card
|
||||
for key, value in {
|
||||
"name": card_data.get("name"),
|
||||
"set_code": card_data.get("setCode"),
|
||||
"uuid": card_data.get("uuid"),
|
||||
"abu_id": card_data.get("identifiers", {}).get("abuId"),
|
||||
"card_kingdom_etched_id": card_data.get("identifiers", {}).get("cardKingdomEtchedId"),
|
||||
"card_kingdom_foil_id": card_data.get("identifiers", {}).get("cardKingdomFoilId"),
|
||||
"card_kingdom_id": card_data.get("identifiers", {}).get("cardKingdomId"),
|
||||
"cardsphere_id": card_data.get("identifiers", {}).get("cardsphereId"),
|
||||
"cardsphere_foil_id": card_data.get("identifiers", {}).get("cardsphereFoilId"),
|
||||
"cardtrader_id": card_data.get("identifiers", {}).get("cardtraderId"),
|
||||
"csi_id": card_data.get("identifiers", {}).get("csiId"),
|
||||
"mcm_id": card_data.get("identifiers", {}).get("mcmId"),
|
||||
"mcm_meta_id": card_data.get("identifiers", {}).get("mcmMetaId"),
|
||||
"miniaturemarket_id": card_data.get("identifiers", {}).get("miniaturemarketId"),
|
||||
"mtg_arena_id": card_data.get("identifiers", {}).get("mtgArenaId"),
|
||||
"mtgjson_foil_version_id": card_data.get("identifiers", {}).get("mtgjsonFoilVersionId"),
|
||||
"mtgjson_non_foil_version_id": card_data.get("identifiers", {}).get("mtgjsonNonFoilVersionId"),
|
||||
"mtgjson_v4_id": card_data.get("identifiers", {}).get("mtgjsonV4Id"),
|
||||
"mtgo_foil_id": card_data.get("identifiers", {}).get("mtgoFoilId"),
|
||||
"mtgo_id": card_data.get("identifiers", {}).get("mtgoId"),
|
||||
"multiverse_id": card_data.get("identifiers", {}).get("multiverseId"),
|
||||
"scg_id": card_data.get("identifiers", {}).get("scgId"),
|
||||
"scryfall_id": card_data.get("identifiers", {}).get("scryfallId"),
|
||||
"scryfall_card_back_id": card_data.get("identifiers", {}).get("scryfallCardBackId"),
|
||||
"scryfall_oracle_id": card_data.get("identifiers", {}).get("scryfallOracleId"),
|
||||
"scryfall_illustration_id": card_data.get("identifiers", {}).get("scryfallIllustrationId"),
|
||||
"tcgplayer_product_id": card_data.get("identifiers", {}).get("tcgplayerProductId"),
|
||||
"tcgplayer_etched_product_id": card_data.get("identifiers", {}).get("tcgplayerEtchedProductId"),
|
||||
"tnt_id": card_data.get("identifiers", {}).get("tntId")
|
||||
}.items():
|
||||
setattr(existing_card, key, value)
|
||||
else:
|
||||
new_card = MTGJSONCard(
|
||||
card_id=card_id,
|
||||
name=card_data.get("name"),
|
||||
set_code=card_data.get("setCode"),
|
||||
uuid=card_data.get("uuid"),
|
||||
abu_id=card_data.get("identifiers", {}).get("abuId"),
|
||||
card_kingdom_etched_id=card_data.get("identifiers", {}).get("cardKingdomEtchedId"),
|
||||
card_kingdom_foil_id=card_data.get("identifiers", {}).get("cardKingdomFoilId"),
|
||||
card_kingdom_id=card_data.get("identifiers", {}).get("cardKingdomId"),
|
||||
cardsphere_id=card_data.get("identifiers", {}).get("cardsphereId"),
|
||||
cardsphere_foil_id=card_data.get("identifiers", {}).get("cardsphereFoilId"),
|
||||
cardtrader_id=card_data.get("identifiers", {}).get("cardtraderId"),
|
||||
csi_id=card_data.get("identifiers", {}).get("csiId"),
|
||||
mcm_id=card_data.get("identifiers", {}).get("mcmId"),
|
||||
mcm_meta_id=card_data.get("identifiers", {}).get("mcmMetaId"),
|
||||
miniaturemarket_id=card_data.get("identifiers", {}).get("miniaturemarketId"),
|
||||
mtg_arena_id=card_data.get("identifiers", {}).get("mtgArenaId"),
|
||||
mtgjson_foil_version_id=card_data.get("identifiers", {}).get("mtgjsonFoilVersionId"),
|
||||
mtgjson_non_foil_version_id=card_data.get("identifiers", {}).get("mtgjsonNonFoilVersionId"),
|
||||
mtgjson_v4_id=card_data.get("identifiers", {}).get("mtgjsonV4Id"),
|
||||
mtgo_foil_id=card_data.get("identifiers", {}).get("mtgoFoilId"),
|
||||
mtgo_id=card_data.get("identifiers", {}).get("mtgoId"),
|
||||
multiverse_id=card_data.get("identifiers", {}).get("multiverseId"),
|
||||
scg_id=card_data.get("identifiers", {}).get("scgId"),
|
||||
scryfall_id=card_data.get("identifiers", {}).get("scryfallId"),
|
||||
scryfall_card_back_id=card_data.get("identifiers", {}).get("scryfallCardBackId"),
|
||||
scryfall_oracle_id=card_data.get("identifiers", {}).get("scryfallOracleId"),
|
||||
scryfall_illustration_id=card_data.get("identifiers", {}).get("scryfallIllustrationId"),
|
||||
tcgplayer_product_id=card_data.get("identifiers", {}).get("tcgplayerProductId"),
|
||||
tcgplayer_etched_product_id=card_data.get("identifiers", {}).get("tcgplayerEtchedProductId"),
|
||||
tnt_id=card_data.get("identifiers", {}).get("tntId")
|
||||
)
|
||||
db.add(new_card)
|
||||
|
||||
async def sync_mtgjson_skus(self, db: Session, skus_data: dict):
|
||||
"""Sync MTGJSON SKUs data to the database"""
|
||||
from app.models.mtgjson_sku import MTGJSONSKU
|
||||
|
||||
with transaction(db):
|
||||
for card_uuid, sku_list in skus_data.items():
|
||||
for sku in sku_list:
|
||||
# Handle case where sku is a string (skuId)
|
||||
if isinstance(sku, str):
|
||||
sku_id = sku
|
||||
existing_sku = db.query(MTGJSONSKU).filter(MTGJSONSKU.sku_id == sku_id).first()
|
||||
if existing_sku:
|
||||
# Update existing SKU
|
||||
existing_sku.card_id = card_uuid
|
||||
else:
|
||||
new_sku = MTGJSONSKU(
|
||||
sku_id=sku_id,
|
||||
card_id=card_uuid
|
||||
)
|
||||
db.add(new_sku)
|
||||
# Handle case where sku is a dictionary
|
||||
else:
|
||||
sku_id = str(sku.get("skuId"))
|
||||
existing_sku = db.query(MTGJSONSKU).filter(MTGJSONSKU.sku_id == sku_id).first()
|
||||
if existing_sku:
|
||||
# Update existing SKU
|
||||
for key, value in {
|
||||
"product_id": str(sku.get("productId")),
|
||||
"condition": sku.get("condition"),
|
||||
"finish": sku.get("finish"),
|
||||
"language": sku.get("language"),
|
||||
"printing": sku.get("printing"),
|
||||
"card_id": card_uuid
|
||||
}.items():
|
||||
setattr(existing_sku, key, value)
|
||||
else:
|
||||
new_sku = MTGJSONSKU(
|
||||
sku_id=sku_id,
|
||||
product_id=str(sku.get("productId")),
|
||||
condition=sku.get("condition"),
|
||||
finish=sku.get("finish"),
|
||||
language=sku.get("language"),
|
||||
printing=sku.get("printing"),
|
||||
card_id=card_uuid
|
||||
)
|
||||
db.add(new_sku)
|
||||
|
||||
async def initialize_data(
|
||||
self,
|
||||
db: Session,
|
||||
game_ids: List[int],
|
||||
use_cache: bool = False,
|
||||
init_categories: bool = True,
|
||||
init_groups: bool = True,
|
||||
init_products: bool = True,
|
||||
init_archived_prices: bool = True,
|
||||
archived_prices_start_date: Optional[str] = None,
|
||||
archived_prices_end_date: Optional[str] = None,
|
||||
init_mtgjson: bool = True
|
||||
) -> Dict[str, Any]:
|
||||
"""Initialize 3rd party API data loads with configurable steps"""
|
||||
logger.info("Starting data initialization process")
|
||||
results = {}
|
||||
if init_categories:
|
||||
logger.info("Initializing categories...")
|
||||
results["categories"] = await self.init_categories(db, use_cache)
|
||||
if init_groups:
|
||||
logger.info("Initializing groups...")
|
||||
results["groups"] = await self.init_groups(db, use_cache, game_ids)
|
||||
if init_products:
|
||||
logger.info("Initializing products...")
|
||||
results["products"] = await self.init_products(db, use_cache, game_ids)
|
||||
if init_archived_prices:
|
||||
logger.info("Initializing archived prices...")
|
||||
results["archived_prices"] = await self.init_archived_prices(
|
||||
db,
|
||||
archived_prices_start_date,
|
||||
archived_prices_end_date,
|
||||
use_cache,
|
||||
game_ids
|
||||
)
|
||||
if init_mtgjson:
|
||||
logger.info("Initializing MTGJSON data...")
|
||||
results["mtgjson"] = await self.init_mtgjson(db, use_cache)
|
||||
if not archived_prices_start_date or not archived_prices_end_date:
|
||||
raise ValueError("Both start_date and end_date are required for archived prices initialization")
|
||||
|
||||
print(f"\nInitializing archived prices from {archived_prices_start_date} to {archived_prices_end_date}...")
|
||||
await self.tcgcsv_service.get_archived_prices_for_date_range(archived_prices_start_date, archived_prices_end_date)
|
||||
results["archived_prices"] = True
|
||||
print("Archived prices initialization completed")
|
||||
|
||||
if init_mtgjson:
|
||||
print("\nInitializing MTGJSON data...")
|
||||
identifiers_result = await self.mtgjson_service.download_and_process_identifiers(db)
|
||||
skus_result = await self.mtgjson_service.download_and_process_skus(db)
|
||||
results["mtgjson"] = {
|
||||
"cards_processed": identifiers_result["cards_processed"],
|
||||
"skus_processed": skus_result["skus_processed"]
|
||||
}
|
||||
|
||||
logger.info("Data initialization completed")
|
||||
return results
|
||||
|
||||
async def clear_cache(self, db: Session) -> None:
|
||||
async def clear_cache(self) -> None:
|
||||
"""Clear all cached data"""
|
||||
# Delete all files in categories, groups, and products directories
|
||||
for subdir in ["categories", "groups", "products"]:
|
||||
files = await self.file_service.list_files(db, file_type="json")
|
||||
for file in files:
|
||||
if file.path.startswith(subdir):
|
||||
await self.file_service.delete_file(db, file.id)
|
||||
dir_path = os.path.join(self.cache_dir, subdir)
|
||||
if os.path.exists(dir_path):
|
||||
for filename in os.listdir(dir_path):
|
||||
file_path = os.path.join(dir_path, filename)
|
||||
if os.path.isfile(file_path):
|
||||
os.unlink(file_path)
|
||||
await self.mtgjson_service.clear_cache()
|
||||
print("Cache cleared")
|
||||
|
||||
async def close(self):
|
||||
await self.tcgcsv_service.close()
|
@ -92,3 +92,24 @@ class BaseExternalService:
|
||||
def file_service(self):
|
||||
"""Convenience property for file service"""
|
||||
return self.get_service('file')
|
||||
|
||||
async def save_file(self, db: Session, file_data: Union[bytes, list[dict]], file_name: str, subdir: str, file_type: Optional[str] = None) -> FileInDB:
|
||||
"""Save a file using the FileService"""
|
||||
if isinstance(file_data, list):
|
||||
# Convert list of dictionaries to CSV bytes
|
||||
output = io.StringIO()
|
||||
writer = csv.DictWriter(output, fieldnames=file_data[0].keys())
|
||||
writer.writeheader()
|
||||
writer.writerows(file_data)
|
||||
file_data = output.getvalue().encode('utf-8')
|
||||
file_type = file_type or 'text/csv'
|
||||
|
||||
# Use FileService to save the file
|
||||
file_service = self.get_service('file')
|
||||
return await file_service.save_file(
|
||||
db=db,
|
||||
file_data=file_data,
|
||||
filename=file_name,
|
||||
subdir=subdir,
|
||||
file_type=file_type
|
||||
)
|
@ -1,24 +1,29 @@
|
||||
import os
|
||||
import json
|
||||
import zipfile
|
||||
import aiohttp
|
||||
import asyncio
|
||||
import time
|
||||
import sys
|
||||
from typing import Dict, Any, Optional, Generator
|
||||
from sqlalchemy.orm import Session
|
||||
from datetime import datetime
|
||||
from app.models.mtgjson_card import MTGJSONCard
|
||||
from app.models.mtgjson_sku import MTGJSONSKU
|
||||
from app.db.database import get_db, transaction
|
||||
from app.services.external_api.base_external_service import BaseExternalService
|
||||
from app.schemas.file import FileInDB
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class MTGJSONService(BaseExternalService):
|
||||
def __init__(self, cache_dir: str = "app/data/cache/mtgjson"):
|
||||
def __init__(self, cache_dir: str = "app/data/cache/mtgjson", batch_size: int = 1000):
|
||||
super().__init__(base_url="https://mtgjson.com/api/v5/")
|
||||
# Ensure the cache directory exists
|
||||
os.makedirs(cache_dir, exist_ok=True)
|
||||
self.cache_dir = cache_dir
|
||||
self.identifiers_dir = os.path.join(cache_dir, "identifiers")
|
||||
self.skus_dir = os.path.join(cache_dir, "skus")
|
||||
# Ensure subdirectories exist
|
||||
self.batch_size = batch_size
|
||||
|
||||
# Create necessary directories
|
||||
os.makedirs(cache_dir, exist_ok=True)
|
||||
os.makedirs(self.identifiers_dir, exist_ok=True)
|
||||
os.makedirs(self.skus_dir, exist_ok=True)
|
||||
|
||||
@ -41,133 +46,112 @@ class MTGJSONService(BaseExternalService):
|
||||
print(f"Downloading {url}...")
|
||||
start_time = time.time()
|
||||
|
||||
# Use the base external service's _make_request method
|
||||
file_data = await self._make_request(
|
||||
method="GET",
|
||||
endpoint=url.replace(self.base_url, ""),
|
||||
binary=True
|
||||
)
|
||||
|
||||
# Save the file using the file service
|
||||
return await self.file_service.save_file(
|
||||
async with aiohttp.ClientSession() as session:
|
||||
async with session.get(url) as response:
|
||||
if response.status == 200:
|
||||
file_data = await response.read()
|
||||
return await self.save_file(
|
||||
db=db,
|
||||
file_data=file_data,
|
||||
filename=filename,
|
||||
file_name=filename,
|
||||
subdir=f"mtgjson/{subdir}",
|
||||
file_type="application/zip",
|
||||
content_type="application/zip"
|
||||
file_type=response.headers.get('content-type', 'application/octet-stream')
|
||||
)
|
||||
else:
|
||||
raise Exception(f"Failed to download file from {url}. Status: {response.status}")
|
||||
|
||||
async def _unzip_file(self, file_record: FileInDB, subdir: str, db: Session) -> str:
|
||||
"""Unzip a file to the specified subdirectory and return the path to the extracted JSON file"""
|
||||
try:
|
||||
# Use the appropriate subdirectory based on the type
|
||||
extract_path = self.identifiers_dir if subdir == "identifiers" else self.skus_dir
|
||||
os.makedirs(extract_path, exist_ok=True)
|
||||
|
||||
with zipfile.ZipFile(file_record.path, 'r') as zip_ref:
|
||||
async def _unzip_file(self, zip_path: str, extract_dir: str) -> str:
|
||||
"""Unzip a file to the specified directory and return the path to the extracted JSON file"""
|
||||
with zipfile.ZipFile(zip_path, 'r') as zip_ref:
|
||||
json_filename = zip_ref.namelist()[0]
|
||||
zip_ref.extractall(extract_path)
|
||||
json_path = os.path.join(extract_path, json_filename)
|
||||
|
||||
# Create a file record for the extracted JSON file
|
||||
with open(json_path, 'r') as f:
|
||||
json_data = f.read()
|
||||
json_file_record = await self.file_service.save_file(
|
||||
db=db,
|
||||
file_data=json_data,
|
||||
filename=json_filename,
|
||||
subdir=f"mtgjson/{subdir}",
|
||||
file_type="application/json",
|
||||
content_type="application/json"
|
||||
)
|
||||
|
||||
return str(json_file_record.path)
|
||||
except Exception as e:
|
||||
logger.error(f"Error unzipping file: {e}")
|
||||
raise
|
||||
zip_ref.extractall(extract_dir)
|
||||
return os.path.join(extract_dir, json_filename)
|
||||
|
||||
def _stream_json_file(self, file_path: str) -> Generator[Dict[str, Any], None, None]:
|
||||
"""Stream a JSON file and yield items one at a time using a streaming parser"""
|
||||
logger.info(f"Starting to stream JSON file: {file_path}")
|
||||
try:
|
||||
"""Stream a JSON file and yield items one at a time"""
|
||||
print(f"Starting to stream JSON file: {file_path}")
|
||||
with open(file_path, 'r') as f:
|
||||
# First, we need to find the start of the data section
|
||||
data_started = False
|
||||
current_key = None
|
||||
current_value = []
|
||||
brace_count = 0
|
||||
# Load the entire file since MTGJSON uses a specific format
|
||||
data = json.load(f)
|
||||
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if not line:
|
||||
# First yield the meta data
|
||||
if "meta" in data:
|
||||
yield {"type": "meta", "data": data["meta"]}
|
||||
|
||||
# Then yield each item in the data section
|
||||
if "data" in data:
|
||||
for key, value in data["data"].items():
|
||||
yield {"type": "item", "data": {key: value}}
|
||||
|
||||
async def _process_batch(self, db: Session, items: list, model_class) -> int:
|
||||
"""Process a batch of items and add them to the database"""
|
||||
processed = 0
|
||||
with transaction(db):
|
||||
for item in items:
|
||||
if model_class == MTGJSONCard:
|
||||
# Check if card already exists
|
||||
existing_card = db.query(MTGJSONCard).filter(MTGJSONCard.card_id == item["card_id"]).first()
|
||||
if existing_card:
|
||||
continue
|
||||
|
||||
if not data_started:
|
||||
if '"data":' in line:
|
||||
data_started = True
|
||||
# Skip the opening brace of the data object
|
||||
line = line[line.find('"data":') + 7:].strip()
|
||||
if line.startswith('{'):
|
||||
line = line[1:].strip()
|
||||
else:
|
||||
# Yield meta data if found
|
||||
if '"meta":' in line:
|
||||
meta_start = line.find('"meta":') + 7
|
||||
meta_end = line.rfind('}')
|
||||
if meta_end > meta_start:
|
||||
meta_json = line[meta_start:meta_end + 1]
|
||||
try:
|
||||
meta_data = json.loads(meta_json)
|
||||
yield {"type": "meta", "data": meta_data}
|
||||
except json.JSONDecodeError as e:
|
||||
logger.warning(f"Failed to parse meta data: {e}")
|
||||
new_item = MTGJSONCard(
|
||||
card_id=item["card_id"],
|
||||
name=item["name"],
|
||||
set_code=item["set_code"],
|
||||
uuid=item["uuid"],
|
||||
abu_id=item.get("abu_id"),
|
||||
card_kingdom_etched_id=item.get("card_kingdom_etched_id"),
|
||||
card_kingdom_foil_id=item.get("card_kingdom_foil_id"),
|
||||
card_kingdom_id=item.get("card_kingdom_id"),
|
||||
cardsphere_id=item.get("cardsphere_id"),
|
||||
cardsphere_foil_id=item.get("cardsphere_foil_id"),
|
||||
cardtrader_id=item.get("cardtrader_id"),
|
||||
csi_id=item.get("csi_id"),
|
||||
mcm_id=item.get("mcm_id"),
|
||||
mcm_meta_id=item.get("mcm_meta_id"),
|
||||
miniaturemarket_id=item.get("miniaturemarket_id"),
|
||||
mtg_arena_id=item.get("mtg_arena_id"),
|
||||
mtgjson_foil_version_id=item.get("mtgjson_foil_version_id"),
|
||||
mtgjson_non_foil_version_id=item.get("mtgjson_non_foil_version_id"),
|
||||
mtgjson_v4_id=item.get("mtgjson_v4_id"),
|
||||
mtgo_foil_id=item.get("mtgo_foil_id"),
|
||||
mtgo_id=item.get("mtgo_id"),
|
||||
multiverse_id=item.get("multiverse_id"),
|
||||
scg_id=item.get("scg_id"),
|
||||
scryfall_id=item.get("scryfall_id"),
|
||||
scryfall_card_back_id=item.get("scryfall_card_back_id"),
|
||||
scryfall_oracle_id=item.get("scryfall_oracle_id"),
|
||||
scryfall_illustration_id=item.get("scryfall_illustration_id"),
|
||||
tcgplayer_product_id=item.get("tcgplayer_product_id"),
|
||||
tcgplayer_etched_product_id=item.get("tcgplayer_etched_product_id"),
|
||||
tnt_id=item.get("tnt_id")
|
||||
)
|
||||
else: # MTGJSONSKU
|
||||
# Check if SKU already exists
|
||||
existing_sku = db.query(MTGJSONSKU).filter(MTGJSONSKU.sku_id == item["sku_id"]).first()
|
||||
if existing_sku:
|
||||
continue
|
||||
|
||||
# Process the data section
|
||||
if data_started:
|
||||
if not current_key:
|
||||
# Look for a new key
|
||||
if '"' in line:
|
||||
key_start = line.find('"') + 1
|
||||
key_end = line.find('"', key_start)
|
||||
if key_end > key_start:
|
||||
current_key = line[key_start:key_end]
|
||||
# Get the rest of the line after the key
|
||||
line = line[key_end + 1:].strip()
|
||||
if ':' in line:
|
||||
line = line[line.find(':') + 1:].strip()
|
||||
new_item = MTGJSONSKU(
|
||||
sku_id=str(item["sku_id"]),
|
||||
product_id=str(item["product_id"]),
|
||||
condition=item["condition"],
|
||||
finish=item["finish"],
|
||||
language=item["language"],
|
||||
printing=item["printing"],
|
||||
card_id=item["card_id"]
|
||||
)
|
||||
db.add(new_item)
|
||||
processed += 1
|
||||
|
||||
if current_key:
|
||||
# Accumulate the value
|
||||
current_value.append(line)
|
||||
brace_count += line.count('{') - line.count('}')
|
||||
return processed
|
||||
|
||||
if brace_count == 0 and line.endswith(','):
|
||||
# We have a complete value
|
||||
value_str = ''.join(current_value).rstrip(',')
|
||||
try:
|
||||
value = json.loads(value_str)
|
||||
yield {"type": "item", "data": {current_key: value}}
|
||||
except json.JSONDecodeError as e:
|
||||
logger.warning(f"Failed to parse value for key {current_key}: {e}")
|
||||
current_key = None
|
||||
current_value = []
|
||||
async def download_and_process_identifiers(self, db: Session) -> Dict[str, int]:
|
||||
"""Download, unzip and process AllIdentifiers.json.zip using streaming"""
|
||||
self._print_progress("Starting MTGJSON identifiers processing...")
|
||||
start_time = time.time()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error streaming JSON file: {e}")
|
||||
raise
|
||||
|
||||
async def get_identifiers(self, db: Session) -> Generator[Dict[str, Any], None, None]:
|
||||
"""Download and get MTGJSON identifiers data"""
|
||||
# Check if we have a cached version
|
||||
cached_file = await self.file_service.get_file_by_filename(db, "AllIdentifiers.json")
|
||||
if cached_file:
|
||||
# Ensure the file exists at the path
|
||||
if os.path.exists(cached_file.path):
|
||||
return self._stream_json_file(cached_file.path)
|
||||
|
||||
# Download and process the file
|
||||
# Download the file using FileService
|
||||
file_record = await self._download_file(
|
||||
db=db,
|
||||
url="https://mtgjson.com/api/v5/AllIdentifiers.json.zip",
|
||||
@ -175,22 +159,87 @@ class MTGJSONService(BaseExternalService):
|
||||
subdir="identifiers"
|
||||
)
|
||||
|
||||
# Unzip and process the file
|
||||
json_path = await self._unzip_file(file_record, "identifiers", db)
|
||||
# Get the file path from the database record
|
||||
zip_path = file_record.path
|
||||
|
||||
# Return a generator that streams the JSON file
|
||||
return self._stream_json_file(json_path)
|
||||
cards_processed = 0
|
||||
current_batch = []
|
||||
total_cards = 0
|
||||
last_progress_time = time.time()
|
||||
|
||||
async def get_skus(self, db: Session) -> Generator[Dict[str, Any], None, None]:
|
||||
"""Download and get MTGJSON SKUs data"""
|
||||
# Check if we have a cached version
|
||||
cached_file = await self.file_service.get_file_by_filename(db, "TcgplayerSkus.json")
|
||||
if cached_file:
|
||||
# Ensure the file exists at the path
|
||||
if os.path.exists(cached_file.path):
|
||||
return self._stream_json_file(cached_file.path)
|
||||
self._print_progress("Processing cards...")
|
||||
try:
|
||||
for item in self._stream_json_file(zip_path):
|
||||
if item["type"] == "meta":
|
||||
self._print_progress(f"Processing MTGJSON data version {item['data'].get('version')} from {item['data'].get('date')}")
|
||||
continue
|
||||
|
||||
# Download and process the file
|
||||
card_data = item["data"]
|
||||
card_id = list(card_data.keys())[0]
|
||||
card_info = card_data[card_id]
|
||||
total_cards += 1
|
||||
|
||||
current_batch.append({
|
||||
"card_id": card_id,
|
||||
"name": card_info.get("name"),
|
||||
"set_code": card_info.get("setCode"),
|
||||
"uuid": card_info.get("uuid"),
|
||||
"abu_id": card_info.get("identifiers", {}).get("abuId"),
|
||||
"card_kingdom_etched_id": card_info.get("identifiers", {}).get("cardKingdomEtchedId"),
|
||||
"card_kingdom_foil_id": card_info.get("identifiers", {}).get("cardKingdomFoilId"),
|
||||
"card_kingdom_id": card_info.get("identifiers", {}).get("cardKingdomId"),
|
||||
"cardsphere_id": card_info.get("identifiers", {}).get("cardsphereId"),
|
||||
"cardsphere_foil_id": card_info.get("identifiers", {}).get("cardsphereFoilId"),
|
||||
"cardtrader_id": card_info.get("identifiers", {}).get("cardtraderId"),
|
||||
"csi_id": card_info.get("identifiers", {}).get("csiId"),
|
||||
"mcm_id": card_info.get("identifiers", {}).get("mcmId"),
|
||||
"mcm_meta_id": card_info.get("identifiers", {}).get("mcmMetaId"),
|
||||
"miniaturemarket_id": card_info.get("identifiers", {}).get("miniaturemarketId"),
|
||||
"mtg_arena_id": card_info.get("identifiers", {}).get("mtgArenaId"),
|
||||
"mtgjson_foil_version_id": card_info.get("identifiers", {}).get("mtgjsonFoilVersionId"),
|
||||
"mtgjson_non_foil_version_id": card_info.get("identifiers", {}).get("mtgjsonNonFoilVersionId"),
|
||||
"mtgjson_v4_id": card_info.get("identifiers", {}).get("mtgjsonV4Id"),
|
||||
"mtgo_foil_id": card_info.get("identifiers", {}).get("mtgoFoilId"),
|
||||
"mtgo_id": card_info.get("identifiers", {}).get("mtgoId"),
|
||||
"multiverse_id": card_info.get("identifiers", {}).get("multiverseId"),
|
||||
"scg_id": card_info.get("identifiers", {}).get("scgId"),
|
||||
"scryfall_id": card_info.get("identifiers", {}).get("scryfallId"),
|
||||
"scryfall_card_back_id": card_info.get("identifiers", {}).get("scryfallCardBackId"),
|
||||
"scryfall_oracle_id": card_info.get("identifiers", {}).get("scryfallOracleId"),
|
||||
"scryfall_illustration_id": card_info.get("identifiers", {}).get("scryfallIllustrationId"),
|
||||
"tcgplayer_product_id": card_info.get("identifiers", {}).get("tcgplayerProductId"),
|
||||
"tcgplayer_etched_product_id": card_info.get("identifiers", {}).get("tcgplayerEtchedProductId"),
|
||||
"tnt_id": card_info.get("identifiers", {}).get("tntId"),
|
||||
"data": card_info
|
||||
})
|
||||
|
||||
if len(current_batch) >= self.batch_size:
|
||||
batch_processed = await self._process_batch(db, current_batch, MTGJSONCard)
|
||||
cards_processed += batch_processed
|
||||
current_batch = []
|
||||
current_time = time.time()
|
||||
if current_time - last_progress_time >= 1.0: # Update progress every second
|
||||
self._print_progress(f"\r{self._format_progress(cards_processed, total_cards, start_time)}", end="")
|
||||
last_progress_time = current_time
|
||||
except Exception as e:
|
||||
self._print_progress(f"\nError during processing: {str(e)}")
|
||||
raise
|
||||
|
||||
# Process remaining items
|
||||
if current_batch:
|
||||
batch_processed = await self._process_batch(db, current_batch, MTGJSONCard)
|
||||
cards_processed += batch_processed
|
||||
|
||||
total_time = time.time() - start_time
|
||||
self._print_progress(f"\nProcessing complete! Processed {cards_processed} cards in {total_time:.1f} seconds")
|
||||
return {"cards_processed": cards_processed}
|
||||
|
||||
async def download_and_process_skus(self, db: Session) -> Dict[str, int]:
|
||||
"""Download, unzip and process TcgplayerSkus.json.zip using streaming"""
|
||||
self._print_progress("Starting MTGJSON SKUs processing...")
|
||||
start_time = time.time()
|
||||
|
||||
# Download the file using FileService
|
||||
file_record = await self._download_file(
|
||||
db=db,
|
||||
url="https://mtgjson.com/api/v5/TcgplayerSkus.json.zip",
|
||||
@ -198,21 +247,64 @@ class MTGJSONService(BaseExternalService):
|
||||
subdir="skus"
|
||||
)
|
||||
|
||||
# Unzip and process the file
|
||||
json_path = await self._unzip_file(file_record, "skus", db)
|
||||
# Get the file path from the database record
|
||||
zip_path = file_record.path
|
||||
|
||||
# Return a generator that streams the JSON file
|
||||
return self._stream_json_file(json_path)
|
||||
skus_processed = 0
|
||||
current_batch = []
|
||||
total_skus = 0
|
||||
last_progress_time = time.time()
|
||||
|
||||
async def clear_cache(self, db: Session) -> None:
|
||||
"""Clear all cached data"""
|
||||
self._print_progress("Processing SKUs...")
|
||||
try:
|
||||
# Delete all files in the mtgjson subdirectory
|
||||
files = await self.file_service.list_files(db, file_type=["json", "zip"])
|
||||
for file in files:
|
||||
if file.path.startswith("mtgjson/"):
|
||||
await self.file_service.delete_file(db, file.id)
|
||||
logger.info("MTGJSON cache cleared")
|
||||
for item in self._stream_json_file(zip_path):
|
||||
if item["type"] == "meta":
|
||||
self._print_progress(f"Processing MTGJSON SKUs version {item['data'].get('version')} from {item['data'].get('date')}")
|
||||
continue
|
||||
|
||||
# The data structure is {card_uuid: [sku1, sku2, ...]}
|
||||
for card_uuid, sku_list in item["data"].items():
|
||||
for sku in sku_list:
|
||||
total_skus += 1
|
||||
current_batch.append({
|
||||
"sku_id": str(sku.get("skuId")),
|
||||
"product_id": str(sku.get("productId")),
|
||||
"condition": sku.get("condition"),
|
||||
"finish": sku.get("finish"),
|
||||
"language": sku.get("language"),
|
||||
"printing": sku.get("printing"),
|
||||
"card_id": card_uuid,
|
||||
"data": sku
|
||||
})
|
||||
|
||||
if len(current_batch) >= self.batch_size:
|
||||
batch_processed = await self._process_batch(db, current_batch, MTGJSONSKU)
|
||||
skus_processed += batch_processed
|
||||
current_batch = []
|
||||
current_time = time.time()
|
||||
if current_time - last_progress_time >= 1.0: # Update progress every second
|
||||
self._print_progress(f"\r{self._format_progress(skus_processed, total_skus, start_time)}", end="")
|
||||
last_progress_time = current_time
|
||||
except Exception as e:
|
||||
logger.error(f"Error clearing cache: {e}")
|
||||
self._print_progress(f"\nError during processing: {str(e)}")
|
||||
raise
|
||||
|
||||
# Process remaining items
|
||||
if current_batch:
|
||||
batch_processed = await self._process_batch(db, current_batch, MTGJSONSKU)
|
||||
skus_processed += batch_processed
|
||||
|
||||
total_time = time.time() - start_time
|
||||
self._print_progress(f"\nProcessing complete! Processed {skus_processed} SKUs in {total_time:.1f} seconds")
|
||||
return {"skus_processed": skus_processed}
|
||||
|
||||
async def clear_cache(self) -> None:
|
||||
"""Clear all cached data"""
|
||||
for subdir in ["identifiers", "skus"]:
|
||||
dir_path = os.path.join(self.cache_dir, subdir)
|
||||
if os.path.exists(dir_path):
|
||||
for filename in os.listdir(dir_path):
|
||||
file_path = os.path.join(dir_path, filename)
|
||||
if os.path.isfile(file_path):
|
||||
os.unlink(file_path)
|
||||
print("MTGJSON cache cleared")
|
||||
|
@ -3,49 +3,256 @@ from datetime import datetime, timedelta
|
||||
import csv
|
||||
import io
|
||||
from app.services.external_api.base_external_service import BaseExternalService
|
||||
from app.models.tcgplayer_group import TCGPlayerGroup
|
||||
from app.models.tcgplayer_product import TCGPlayerProduct
|
||||
from app.models.tcgplayer_category import TCGPlayerCategory
|
||||
from app.db.database import get_db, transaction
|
||||
from sqlalchemy.orm import Session
|
||||
import py7zr
|
||||
import os
|
||||
from app.schemas.file import FileInDB
|
||||
|
||||
class TCGCSVService(BaseExternalService):
|
||||
def __init__(self):
|
||||
super().__init__(base_url="https://tcgcsv.com/")
|
||||
|
||||
async def get_groups(self, game_id: int) -> Dict[str, Any]:
|
||||
async def get_groups(self, game_ids: List[int]) -> Dict[str, Any]:
|
||||
"""Fetch groups for specific game IDs from TCGCSV API"""
|
||||
endpoint = f"tcgplayer/{game_id}/groups"
|
||||
game_ids_str = ",".join(map(str, game_ids))
|
||||
endpoint = f"tcgplayer/{game_ids_str}/groups"
|
||||
return await self._make_request("GET", endpoint)
|
||||
|
||||
async def get_products_and_prices(self, game_id: str, group_id: int) -> str:
|
||||
async def get_products_and_prices(self, game_ids: List[int], group_id: int) -> List[Dict[str, Any]]:
|
||||
"""Fetch products and prices for a specific group from TCGCSV API"""
|
||||
endpoint = f"tcgplayer/{game_id}/{group_id}/ProductsAndPrices.csv"
|
||||
return await self._make_request("GET", endpoint, headers={"Accept": "text/csv"})
|
||||
game_ids_str = ",".join(map(str, game_ids))
|
||||
endpoint = f"tcgplayer/{game_ids_str}/{group_id}/ProductsAndPrices.csv"
|
||||
response = await self._make_request("GET", endpoint, headers={"Accept": "text/csv"})
|
||||
|
||||
# Parse CSV response
|
||||
csv_data = io.StringIO(response)
|
||||
reader = csv.DictReader(csv_data)
|
||||
return list(reader)
|
||||
|
||||
async def get_categories(self) -> Dict[str, Any]:
|
||||
"""Fetch all categories from TCGCSV API"""
|
||||
endpoint = "tcgplayer/categories"
|
||||
return await self._make_request("GET", endpoint)
|
||||
|
||||
async def get_archived_prices_for_date(self, date_str: str) -> bytes:
|
||||
async def get_archived_prices_for_date(self, db: Session, date_str: str) -> str:
|
||||
"""Fetch archived prices from TCGCSV API"""
|
||||
# Download the archive file
|
||||
endpoint = f"archive/tcgplayer/prices-{date_str}.ppmd.7z"
|
||||
return await self._make_request("GET", endpoint, binary=True)
|
||||
response = await self._make_request("GET", endpoint, binary=True)
|
||||
|
||||
async def get_tcgcsv_date_range(self, start_date: datetime, end_date: datetime) -> List[datetime]:
|
||||
"""Get a date range for a given start and end date"""
|
||||
# Save the archive file using FileService
|
||||
file_record = await self.save_file(
|
||||
db=db,
|
||||
file_data=response,
|
||||
file_name=f"prices-{date_str}.ppmd.7z",
|
||||
subdir=f"tcgcsv/prices/zip",
|
||||
file_type="application/x-7z-compressed"
|
||||
)
|
||||
|
||||
# Extract the 7z file
|
||||
with py7zr.SevenZipFile(file_record.path, 'r') as archive:
|
||||
# Extract to a directory named after the date
|
||||
extract_path = f"app/data/cache/tcgcsv/prices/{date_str}"
|
||||
os.makedirs(extract_path, exist_ok=True)
|
||||
archive.extractall(path=extract_path)
|
||||
|
||||
return date_str
|
||||
|
||||
async def get_archived_prices_for_date_range(self, start_date: str, end_date: str):
|
||||
"""Fetch archived prices for a date range from TCGCSV API"""
|
||||
# Convert string dates to datetime objects
|
||||
start_dt = datetime.strptime(start_date, "%Y-%m-%d")
|
||||
end_dt = datetime.strptime(end_date, "%Y-%m-%d")
|
||||
min_start_date = datetime.strptime("2024-02-08", "%Y-%m-%d")
|
||||
max_end_date = datetime.now()
|
||||
|
||||
# Set minimum start date
|
||||
min_start_date = datetime.strptime("2025-02-08", "%Y-%m-%d")
|
||||
if start_dt < min_start_date:
|
||||
start_dt = min_start_date
|
||||
if end_dt > max_end_date:
|
||||
end_dt = max_end_date
|
||||
|
||||
# Set maximum end date to today
|
||||
today = datetime.now()
|
||||
if end_dt > today:
|
||||
end_dt = today
|
||||
|
||||
# Generate date range
|
||||
date_range = []
|
||||
current_dt = start_dt
|
||||
while current_dt <= end_dt:
|
||||
date_range.append(current_dt.strftime("%Y-%m-%d"))
|
||||
current_dt += timedelta(days=1)
|
||||
return date_range
|
||||
|
||||
async def get_archived_prices_for_date_range(self, start_date: datetime, end_date: datetime) -> List[datetime]:
|
||||
"""Fetch archived prices for a date range from TCGCSV API"""
|
||||
date_range = await self.get_tcgcsv_date_range(start_date, end_date)
|
||||
return date_range
|
||||
# Process each date
|
||||
for date_str in date_range:
|
||||
await self.get_archived_prices_for_date(date_str)
|
||||
|
||||
async def sync_groups_to_db(self, db: Session, game_ids: List[int]) -> List[TCGPlayerGroup]:
|
||||
"""Fetch groups from API and sync them to the database"""
|
||||
response = await self.get_groups(game_ids)
|
||||
|
||||
if not response.get("success"):
|
||||
raise Exception(f"Failed to fetch groups: {response.get('errors')}")
|
||||
|
||||
groups = response.get("results", [])
|
||||
synced_groups = []
|
||||
with transaction(db):
|
||||
for group_data in groups:
|
||||
# Convert string dates to datetime objects
|
||||
published_on = datetime.fromisoformat(group_data["publishedOn"].replace("Z", "+00:00")) if group_data.get("publishedOn") else None
|
||||
modified_on = datetime.fromisoformat(group_data["modifiedOn"].replace("Z", "+00:00")) if group_data.get("modifiedOn") else None
|
||||
|
||||
# Check if group already exists
|
||||
existing_group = db.query(TCGPlayerGroup).filter(TCGPlayerGroup.group_id == group_data["groupId"]).first()
|
||||
|
||||
if existing_group:
|
||||
# Update existing group
|
||||
for key, value in {
|
||||
"name": group_data["name"],
|
||||
"abbreviation": group_data.get("abbreviation"),
|
||||
"is_supplemental": group_data.get("isSupplemental", False),
|
||||
"published_on": published_on,
|
||||
"modified_on": modified_on,
|
||||
"category_id": group_data.get("categoryId")
|
||||
}.items():
|
||||
setattr(existing_group, key, value)
|
||||
synced_groups.append(existing_group)
|
||||
else:
|
||||
# Create new group
|
||||
new_group = TCGPlayerGroup(
|
||||
group_id=group_data["groupId"],
|
||||
name=group_data["name"],
|
||||
abbreviation=group_data.get("abbreviation"),
|
||||
is_supplemental=group_data.get("isSupplemental", False),
|
||||
published_on=published_on,
|
||||
modified_on=modified_on,
|
||||
category_id=group_data.get("categoryId")
|
||||
)
|
||||
db.add(new_group)
|
||||
synced_groups.append(new_group)
|
||||
|
||||
return synced_groups
|
||||
|
||||
async def sync_products_to_db(self, db: Session, game_id: int, group_id: int) -> List[TCGPlayerProduct]:
|
||||
"""Fetch products and prices for a group and sync them to the database"""
|
||||
products_data = await self.get_products_and_prices(game_id, group_id)
|
||||
synced_products = []
|
||||
|
||||
for product_data in products_data:
|
||||
# Convert string dates to datetime objects
|
||||
modified_on = datetime.fromisoformat(product_data["modifiedOn"].replace("Z", "+00:00")) if product_data.get("modifiedOn") else None
|
||||
|
||||
# Convert price strings to floats, handling empty strings
|
||||
def parse_price(price_str):
|
||||
return float(price_str) if price_str else None
|
||||
|
||||
# Check if product already exists
|
||||
existing_product = db.query(TCGPlayerProduct).filter(TCGPlayerProduct.product_id == int(product_data["productId"])).first()
|
||||
|
||||
if existing_product:
|
||||
# Update existing product
|
||||
for key, value in {
|
||||
"name": product_data["name"],
|
||||
"clean_name": product_data.get("cleanName"),
|
||||
"image_url": product_data.get("imageUrl"),
|
||||
"category_id": int(product_data["categoryId"]),
|
||||
"group_id": int(product_data["groupId"]),
|
||||
"url": product_data.get("url"),
|
||||
"modified_on": modified_on,
|
||||
"image_count": int(product_data.get("imageCount", 0)),
|
||||
"ext_rarity": product_data.get("extRarity"),
|
||||
"ext_number": product_data.get("extNumber"),
|
||||
"low_price": parse_price(product_data.get("lowPrice")),
|
||||
"mid_price": parse_price(product_data.get("midPrice")),
|
||||
"high_price": parse_price(product_data.get("highPrice")),
|
||||
"market_price": parse_price(product_data.get("marketPrice")),
|
||||
"direct_low_price": parse_price(product_data.get("directLowPrice")),
|
||||
"sub_type_name": product_data.get("subTypeName")
|
||||
}.items():
|
||||
setattr(existing_product, key, value)
|
||||
synced_products.append(existing_product)
|
||||
else:
|
||||
# Create new product
|
||||
with transaction(db):
|
||||
new_product = TCGPlayerProduct(
|
||||
product_id=int(product_data["productId"]),
|
||||
name=product_data["name"],
|
||||
clean_name=product_data.get("cleanName"),
|
||||
image_url=product_data.get("imageUrl"),
|
||||
category_id=int(product_data["categoryId"]),
|
||||
group_id=int(product_data["groupId"]),
|
||||
url=product_data.get("url"),
|
||||
modified_on=modified_on,
|
||||
image_count=int(product_data.get("imageCount", 0)),
|
||||
ext_rarity=product_data.get("extRarity"),
|
||||
ext_number=product_data.get("extNumber"),
|
||||
low_price=parse_price(product_data.get("lowPrice")),
|
||||
mid_price=parse_price(product_data.get("midPrice")),
|
||||
high_price=parse_price(product_data.get("highPrice")),
|
||||
market_price=parse_price(product_data.get("marketPrice")),
|
||||
direct_low_price=parse_price(product_data.get("directLowPrice")),
|
||||
sub_type_name=product_data.get("subTypeName")
|
||||
)
|
||||
db.add(new_product)
|
||||
synced_products.append(new_product)
|
||||
|
||||
return synced_products
|
||||
|
||||
async def sync_categories_to_db(self, db: Session) -> List[TCGPlayerCategory]:
|
||||
"""Fetch categories from API and sync them to the database"""
|
||||
response = await self.get_categories()
|
||||
|
||||
if not response.get("success"):
|
||||
raise Exception(f"Failed to fetch categories: {response.get('errors')}")
|
||||
|
||||
categories = response.get("results", [])
|
||||
synced_categories = []
|
||||
with transaction(db):
|
||||
for category_data in categories:
|
||||
# Convert string dates to datetime objects
|
||||
modified_on = datetime.fromisoformat(category_data["modifiedOn"].replace("Z", "+00:00")) if category_data.get("modifiedOn") else None
|
||||
|
||||
# Check if category already exists
|
||||
existing_category = db.query(TCGPlayerCategory).filter(TCGPlayerCategory.category_id == category_data["categoryId"]).first()
|
||||
|
||||
if existing_category:
|
||||
# Update existing category
|
||||
for key, value in {
|
||||
"name": category_data["name"],
|
||||
"display_name": category_data.get("displayName"),
|
||||
"seo_category_name": category_data.get("seoCategoryName"),
|
||||
"category_description": category_data.get("categoryDescription"),
|
||||
"category_page_title": category_data.get("categoryPageTitle"),
|
||||
"sealed_label": category_data.get("sealedLabel"),
|
||||
"non_sealed_label": category_data.get("nonSealedLabel"),
|
||||
"condition_guide_url": category_data.get("conditionGuideUrl"),
|
||||
"is_scannable": category_data.get("isScannable", False),
|
||||
"popularity": category_data.get("popularity", 0),
|
||||
"is_direct": category_data.get("isDirect", False),
|
||||
"modified_on": modified_on
|
||||
}.items():
|
||||
setattr(existing_category, key, value)
|
||||
synced_categories.append(existing_category)
|
||||
else:
|
||||
# Create new category
|
||||
new_category = TCGPlayerCategory(
|
||||
category_id=category_data["categoryId"],
|
||||
name=category_data["name"],
|
||||
display_name=category_data.get("displayName"),
|
||||
seo_category_name=category_data.get("seoCategoryName"),
|
||||
category_description=category_data.get("categoryDescription"),
|
||||
category_page_title=category_data.get("categoryPageTitle"),
|
||||
sealed_label=category_data.get("sealedLabel"),
|
||||
non_sealed_label=category_data.get("nonSealedLabel"),
|
||||
condition_guide_url=category_data.get("conditionGuideUrl"),
|
||||
is_scannable=category_data.get("isScannable", False),
|
||||
popularity=category_data.get("popularity", 0),
|
||||
is_direct=category_data.get("isDirect", False),
|
||||
modified_on=modified_on
|
||||
)
|
||||
db.add(new_category)
|
||||
synced_categories.append(new_category)
|
||||
|
||||
return synced_categories
|
||||
|
@ -6,7 +6,7 @@ import json
|
||||
from datetime import datetime
|
||||
from sqlalchemy.orm import Session
|
||||
from app.db.database import transaction
|
||||
from app.models.tcgplayer_inventory import TCGPlayerInventory
|
||||
from app.models.inventory import Inventory
|
||||
from app.models.tcgplayer_product import TCGPlayerProduct
|
||||
from app.services.inventory_service import InventoryService
|
||||
|
||||
|
@ -150,10 +150,3 @@ class FileService:
|
||||
return FileInDB.model_validate(file_record)
|
||||
else:
|
||||
return None
|
||||
|
||||
async def get_file_by_filename(self, db: Session, filename: str) -> Optional[FileInDB]:
|
||||
"""Get a file record from the database by filename"""
|
||||
file_record = db.query(File).filter(File.name == filename).first()
|
||||
if file_record:
|
||||
return FileInDB.model_validate(file_record)
|
||||
return None
|
||||
|
@ -1,13 +1,13 @@
|
||||
from typing import List, Optional, Dict
|
||||
from sqlalchemy.orm import Session
|
||||
from app.models.tcgplayer_inventory import TCGPlayerInventory
|
||||
from app.models.inventory import Inventory
|
||||
from app.services.base_service import BaseService
|
||||
|
||||
class InventoryService(BaseService[TCGPlayerInventory]):
|
||||
class InventoryService(BaseService[Inventory]):
|
||||
def __init__(self):
|
||||
super().__init__(TCGPlayerInventory)
|
||||
super().__init__(Inventory)
|
||||
|
||||
def create(self, db: Session, obj_in: Dict) -> TCGPlayerInventory:
|
||||
def create(self, db: Session, obj_in: Dict) -> Inventory:
|
||||
"""
|
||||
Create a new inventory item in the database.
|
||||
|
||||
@ -20,7 +20,7 @@ class InventoryService(BaseService[TCGPlayerInventory]):
|
||||
"""
|
||||
return super().create(db, obj_in)
|
||||
|
||||
def update(self, db: Session, db_obj: TCGPlayerInventory, obj_in: Dict) -> TCGPlayerInventory:
|
||||
def update(self, db: Session, db_obj: Inventory, obj_in: Dict) -> Inventory:
|
||||
"""
|
||||
Update an existing inventory item in the database.
|
||||
|
||||
@ -34,7 +34,7 @@ class InventoryService(BaseService[TCGPlayerInventory]):
|
||||
"""
|
||||
return super().update(db, db_obj, obj_in)
|
||||
|
||||
def get_by_tcgplayer_id(self, db: Session, tcgplayer_id: str) -> Optional[TCGPlayerInventory]:
|
||||
def get_by_tcgplayer_id(self, db: Session, tcgplayer_id: str) -> Optional[Inventory]:
|
||||
"""
|
||||
Get an inventory item by its TCGPlayer ID.
|
||||
|
||||
@ -43,11 +43,11 @@ class InventoryService(BaseService[TCGPlayerInventory]):
|
||||
tcgplayer_id: The TCGPlayer ID to find
|
||||
|
||||
Returns:
|
||||
Optional[TCGPlayerInventory]: The inventory item if found, None otherwise
|
||||
Optional[Inventory]: The inventory item if found, None otherwise
|
||||
"""
|
||||
return db.query(self.model).filter(self.model.tcgplayer_id == tcgplayer_id).first()
|
||||
|
||||
def get_by_set(self, db: Session, set_name: str, skip: int = 0, limit: int = 100) -> List[TCGPlayerInventory]:
|
||||
def get_by_set(self, db: Session, set_name: str, skip: int = 0, limit: int = 100) -> List[Inventory]:
|
||||
"""
|
||||
Get all inventory items from a specific set.
|
||||
|
||||
@ -58,6 +58,6 @@ class InventoryService(BaseService[TCGPlayerInventory]):
|
||||
limit: Maximum number of records to return
|
||||
|
||||
Returns:
|
||||
List[TCGPlayerInventory]: List of inventory items from the specified set
|
||||
List[Inventory]: List of inventory items from the specified set
|
||||
"""
|
||||
return db.query(self.model).filter(self.model.set_name == set_name).offset(skip).limit(limit).all()
|
@ -142,14 +142,13 @@ class LabelPrinterService:
|
||||
logger.error(f"Unexpected error in _send_print_request: {e}")
|
||||
return False
|
||||
|
||||
async def print_file(self, file_path: Union[str, Path, FileInDB], label_size: Literal["dk1201", "dk1241"], label_type: Optional[Literal["address_label", "packing_slip", "set_label", "return_label", "pirate_ship_label"]] = None, copies: Optional[int] = None) -> bool:
|
||||
async def print_file(self, file_path: Union[str, Path, FileInDB], label_size: Literal["dk1201", "dk1241"], label_type: Optional[Literal["address_label", "packing_slip", "set_label"]] = None) -> bool:
|
||||
"""Print a PDF or PNG file to the label printer.
|
||||
|
||||
Args:
|
||||
file_path: Path to the PDF or PNG file, or a FileInDB object
|
||||
label_size: Size of label to use ("dk1201" or "dk1241")
|
||||
label_type: Type of label to use ("address_label" or "packing_slip" or "set_label")
|
||||
copies: Optional number of copies to print. If None, prints once.
|
||||
|
||||
Returns:
|
||||
bool: True if print was successful, False otherwise
|
||||
@ -207,7 +206,7 @@ class LabelPrinterService:
|
||||
resized_image = resized_image.resize((991, 306), Image.Resampling.LANCZOS)
|
||||
|
||||
# if file path contains address_label, rotate image 90 degrees
|
||||
if label_type == "address_label" or label_type == "set_label" or label_type == "return_label":
|
||||
if label_type == "address_label" or label_type == "set_label":
|
||||
rotate = "90"
|
||||
cut = False
|
||||
else:
|
||||
@ -241,21 +240,7 @@ class LabelPrinterService:
|
||||
with open(cache_path, "wb") as f:
|
||||
f.write(converted_image)
|
||||
|
||||
if copies:
|
||||
# Send to API for each copy
|
||||
for copy in range(copies):
|
||||
logger.info(f"Printing copy {copy + 1} of {copies}")
|
||||
if not await self._send_print_request(cache_path):
|
||||
logger.error(f"Failed to print page {i+1}, copy {copy + 1}")
|
||||
return False
|
||||
|
||||
# Wait for printer to be ready before next copy or page
|
||||
if copy < copies - 1 or i < len(images) - 1:
|
||||
if not await self._wait_for_printer_ready():
|
||||
logger.error("Printer not ready for next copy/page")
|
||||
return False
|
||||
else:
|
||||
# Send to API once (original behavior)
|
||||
# Send to API
|
||||
if not await self._send_print_request(cache_path):
|
||||
logger.error(f"Failed to print page {i+1}")
|
||||
return False
|
||||
|
@ -1,4 +1,4 @@
|
||||
from app.db.database import transaction
|
||||
from app.db.database import transaction, get_db
|
||||
from app.services.scheduler.base_scheduler import BaseScheduler
|
||||
import logging
|
||||
|
||||
@ -17,10 +17,11 @@ class SchedulerService:
|
||||
self._service_manager = ServiceManager()
|
||||
return self._service_manager
|
||||
|
||||
async def update_open_orders_hourly(self, db):
|
||||
async def update_open_orders_hourly(self):
|
||||
"""
|
||||
Hourly update of orders from TCGPlayer API to database
|
||||
"""
|
||||
db = next(get_db())
|
||||
try:
|
||||
logger.info("Starting hourly order update")
|
||||
# Get order management service
|
||||
@ -38,11 +39,14 @@ class SchedulerService:
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating open orders: {str(e)}")
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def update_all_orders_daily(self, db):
|
||||
async def update_all_orders_daily(self):
|
||||
"""
|
||||
Daily update of all orders from TCGPlayer API to database
|
||||
"""
|
||||
db = next(get_db())
|
||||
try:
|
||||
logger.info("Starting daily order update")
|
||||
# Get order management service
|
||||
@ -60,19 +64,21 @@ class SchedulerService:
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating all orders: {str(e)}")
|
||||
raise
|
||||
finally:
|
||||
db.close()
|
||||
|
||||
async def start_scheduled_tasks(self, db):
|
||||
async def start_scheduled_tasks(self):
|
||||
"""Start all scheduled tasks"""
|
||||
# Schedule open orders update to run hourly at 00 minutes
|
||||
await self.scheduler.schedule_task(
|
||||
task_name="update_open_orders_hourly",
|
||||
func=lambda: self.update_open_orders_hourly(db),
|
||||
func=self.update_open_orders_hourly,
|
||||
interval_seconds=60 * 60, # 1 hour
|
||||
)
|
||||
# Schedule all orders update to run daily at 1 AM
|
||||
await self.scheduler.schedule_task(
|
||||
task_name="update_all_orders_daily",
|
||||
func=lambda: self.update_all_orders_daily(db),
|
||||
func=self.update_all_orders_daily,
|
||||
interval_seconds=24 * 60 * 60, # 24 hours
|
||||
)
|
||||
|
||||
|
@ -26,9 +26,7 @@ class ServiceManager:
|
||||
'set_label': 'app.services.set_label_service.SetLabelService',
|
||||
'data_initialization': 'app.services.data_initialization.DataInitializationService',
|
||||
'scheduler': 'app.services.scheduler.scheduler_service.SchedulerService',
|
||||
'file': 'app.services.file_service.FileService',
|
||||
'tcgcsv': 'app.services.external_api.tcgcsv.tcgcsv_service.TCGCSVService',
|
||||
'mtgjson': 'app.services.external_api.mtgjson.mtgjson_service.MTGJSONService'
|
||||
'file': 'app.services.file_service.FileService'
|
||||
}
|
||||
self._service_configs = {
|
||||
'label_printer': {'printer_api_url': "http://192.168.1.110:8000"},
|
||||
|
@ -228,233 +228,6 @@ async function generateAddressLabels() {
|
||||
}
|
||||
}
|
||||
|
||||
// Show return labels modal
|
||||
function showReturnLabelsModal() {
|
||||
const modal = document.getElementById('returnLabelsModal');
|
||||
modal.classList.remove('hidden');
|
||||
modal.classList.add('flex');
|
||||
}
|
||||
|
||||
// Close return labels modal
|
||||
function closeReturnLabelsModal() {
|
||||
const modal = document.getElementById('returnLabelsModal');
|
||||
modal.classList.remove('flex');
|
||||
modal.classList.add('hidden');
|
||||
}
|
||||
|
||||
// Submit return labels request
|
||||
async function submitReturnLabels() {
|
||||
try {
|
||||
const numberOfLabels = document.getElementById('numberOfLabels').value;
|
||||
if (!numberOfLabels || numberOfLabels < 1) {
|
||||
showToast('Please enter a valid number of labels', 'error');
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
const response = await fetch(`${API_BASE_URL}/orders/generate-return-labels`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
number_of_labels: parseInt(numberOfLabels)
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
throw new Error(errorData.detail || 'Failed to generate return labels');
|
||||
}
|
||||
|
||||
showToast('Return labels generated successfully');
|
||||
closeReturnLabelsModal();
|
||||
} catch (error) {
|
||||
showToast('Error generating return labels: ' + error.message, 'error');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
// Generate return labels (opens modal)
|
||||
function generateReturnLabels() {
|
||||
showReturnLabelsModal();
|
||||
}
|
||||
|
||||
// Show Pirate Ship label modal
|
||||
function showPirateShipModal() {
|
||||
const modal = document.getElementById('pirateShipModal');
|
||||
modal.classList.remove('hidden');
|
||||
modal.classList.add('flex');
|
||||
}
|
||||
|
||||
// Close Pirate Ship label modal
|
||||
function closePirateShipModal() {
|
||||
const modal = document.getElementById('pirateShipModal');
|
||||
modal.classList.remove('flex');
|
||||
modal.classList.add('hidden');
|
||||
// Reset file input
|
||||
document.getElementById('pirateShipFile').value = '';
|
||||
}
|
||||
|
||||
// Submit Pirate Ship label
|
||||
async function submitPirateShipLabel() {
|
||||
try {
|
||||
const fileInput = document.getElementById('pirateShipFile');
|
||||
const file = fileInput.files[0];
|
||||
|
||||
if (!file) {
|
||||
showToast('Please select a PDF file', 'error');
|
||||
return;
|
||||
}
|
||||
|
||||
if (file.type !== 'application/pdf') {
|
||||
showToast('Please select a valid PDF file', 'error');
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
const response = await fetch(`${API_BASE_URL}/orders/print-pirate-ship-label`, {
|
||||
method: 'POST',
|
||||
body: formData
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
throw new Error(errorData.detail || 'Failed to print Pirate Ship label');
|
||||
}
|
||||
|
||||
showToast('Pirate Ship label printed successfully');
|
||||
closePirateShipModal();
|
||||
} catch (error) {
|
||||
showToast('Error printing Pirate Ship label: ' + error.message, 'error');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
// Show set labels modal
|
||||
function showSetLabelsModal() {
|
||||
const modal = document.getElementById('setLabelsModal');
|
||||
modal.classList.remove('hidden');
|
||||
modal.classList.add('flex');
|
||||
fetchAvailableSets();
|
||||
|
||||
// Add event listener for search input
|
||||
const searchInput = document.getElementById('setSearch');
|
||||
searchInput.addEventListener('input', filterSets);
|
||||
}
|
||||
|
||||
// Close set labels modal
|
||||
function closeSetLabelsModal() {
|
||||
const modal = document.getElementById('setLabelsModal');
|
||||
modal.classList.remove('flex');
|
||||
modal.classList.add('hidden');
|
||||
|
||||
// Clear search input
|
||||
document.getElementById('setSearch').value = '';
|
||||
}
|
||||
|
||||
// Filter sets based on search input
|
||||
function filterSets() {
|
||||
const searchTerm = document.getElementById('setSearch').value.toLowerCase();
|
||||
const setItems = document.querySelectorAll('#setLabelsList > div');
|
||||
|
||||
setItems.forEach(item => {
|
||||
const label = item.querySelector('label');
|
||||
const text = label.textContent.toLowerCase();
|
||||
if (text.includes(searchTerm)) {
|
||||
item.style.display = 'flex';
|
||||
} else {
|
||||
item.style.display = 'none';
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Fetch available sets from the API
|
||||
async function fetchAvailableSets() {
|
||||
try {
|
||||
setLoading(true);
|
||||
const response = await fetch(`${API_BASE_URL}/set-labels/available-sets`);
|
||||
if (!response.ok) {
|
||||
throw new Error('Failed to fetch available sets');
|
||||
}
|
||||
|
||||
const sets = await response.json();
|
||||
displayAvailableSets(sets);
|
||||
} catch (error) {
|
||||
showToast('Error fetching available sets: ' + error.message, 'error');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
// Display available sets in the modal
|
||||
function displayAvailableSets(sets) {
|
||||
const setList = document.getElementById('setLabelsList');
|
||||
setList.innerHTML = '';
|
||||
|
||||
if (!sets || sets.length === 0) {
|
||||
setList.innerHTML = '<div class="text-center text-gray-400 py-4">No sets available</div>';
|
||||
return;
|
||||
}
|
||||
|
||||
// Sort sets alphabetically by name
|
||||
sets.sort((a, b) => a.name.localeCompare(b.name));
|
||||
|
||||
sets.forEach(set => {
|
||||
const setItem = document.createElement('div');
|
||||
setItem.className = 'flex items-center p-2 hover:bg-gray-600 rounded-lg cursor-pointer';
|
||||
setItem.innerHTML = `
|
||||
<input type="checkbox" id="set-${set.code}" class="rounded border-gray-600 bg-gray-800 text-teal-600 focus:ring-teal-500">
|
||||
<label for="set-${set.code}" class="ml-2 text-gray-300">${set.name} (${set.code})</label>
|
||||
`;
|
||||
setList.appendChild(setItem);
|
||||
});
|
||||
|
||||
// Trigger initial filter in case there's text in the search box
|
||||
filterSets();
|
||||
}
|
||||
|
||||
// Submit set labels request
|
||||
async function submitSetLabels() {
|
||||
try {
|
||||
const selectedSets = Array.from(document.querySelectorAll('#setLabelsList input[type="checkbox"]:checked'))
|
||||
.map(checkbox => checkbox.id.replace('set-', ''));
|
||||
|
||||
if (selectedSets.length === 0) {
|
||||
showToast('Please select at least one set', 'error');
|
||||
return;
|
||||
}
|
||||
|
||||
setLoading(true);
|
||||
const response = await fetch(`${API_BASE_URL}/set-labels/generate`, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({
|
||||
sets: selectedSets
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
throw new Error(errorData.detail || 'Failed to generate set labels');
|
||||
}
|
||||
|
||||
showToast('Set labels generated successfully');
|
||||
closeSetLabelsModal();
|
||||
} catch (error) {
|
||||
showToast('Error generating set labels: ' + error.message, 'error');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
}
|
||||
|
||||
// Load orders when page loads
|
||||
document.addEventListener('DOMContentLoaded', () => {
|
||||
fetchOrders();
|
||||
|
@ -39,15 +39,6 @@
|
||||
<button onclick="generateAddressLabels()" class="px-4 py-2 bg-indigo-600 text-white rounded-lg hover:bg-indigo-700 focus:outline-none focus:ring-2 focus:ring-indigo-500 focus:ring-offset-2 transition-colors">
|
||||
Generate Address Labels
|
||||
</button>
|
||||
<button onclick="generateReturnLabels()" class="px-4 py-2 bg-red-600 text-white rounded-lg hover:bg-red-700 focus:outline-none focus:ring-2 focus:ring-red-500 focus:ring-offset-2 transition-colors">
|
||||
Generate Return Labels
|
||||
</button>
|
||||
<button onclick="showPirateShipModal()" class="px-4 py-2 bg-yellow-600 text-white rounded-lg hover:bg-yellow-700 focus:outline-none focus:ring-2 focus:ring-yellow-500 focus:ring-offset-2 transition-colors">
|
||||
Upload Pirate Ship Label
|
||||
</button>
|
||||
<button onclick="showSetLabelsModal()" class="px-4 py-2 bg-teal-600 text-white rounded-lg hover:bg-teal-700 focus:outline-none focus:ring-2 focus:ring-teal-500 focus:ring-offset-2 transition-colors">
|
||||
Generate Set Labels
|
||||
</button>
|
||||
</div>
|
||||
<div id="labelOptions" class="bg-gray-700 rounded-lg p-4">
|
||||
<label class="block text-sm font-medium text-gray-300 mb-2">Label Type</label>
|
||||
@ -58,69 +49,6 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Return Labels Modal -->
|
||||
<div id="returnLabelsModal" class="fixed inset-0 bg-black bg-opacity-50 hidden items-center justify-center">
|
||||
<div class="bg-gray-800 rounded-lg p-6 max-w-md w-full mx-4">
|
||||
<h3 class="text-xl font-semibold text-gray-100 mb-4">Generate Return Labels</h3>
|
||||
<div class="mb-4">
|
||||
<label for="numberOfLabels" class="block text-sm font-medium text-gray-300 mb-2">Number of Labels</label>
|
||||
<input type="number" id="numberOfLabels" min="1" value="1" class="w-full rounded-lg border-gray-600 bg-gray-700 text-gray-100 focus:ring-blue-500 focus:border-blue-500">
|
||||
</div>
|
||||
<div class="flex justify-end space-x-3">
|
||||
<button onclick="closeReturnLabelsModal()" class="px-4 py-2 bg-gray-600 text-white rounded-lg hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-gray-500 focus:ring-offset-2 transition-colors">
|
||||
Cancel
|
||||
</button>
|
||||
<button onclick="submitReturnLabels()" class="px-4 py-2 bg-red-600 text-white rounded-lg hover:bg-red-700 focus:outline-none focus:ring-2 focus:ring-red-500 focus:ring-offset-2 transition-colors">
|
||||
Generate
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Pirate Ship Label Modal -->
|
||||
<div id="pirateShipModal" class="fixed inset-0 bg-black bg-opacity-50 hidden items-center justify-center">
|
||||
<div class="bg-gray-800 rounded-lg p-6 max-w-md w-full mx-4">
|
||||
<h3 class="text-xl font-semibold text-gray-100 mb-4">Upload Pirate Ship Label</h3>
|
||||
<div class="mb-4">
|
||||
<label for="pirateShipFile" class="block text-sm font-medium text-gray-300 mb-2">Select PDF File</label>
|
||||
<input type="file" id="pirateShipFile" accept=".pdf" class="w-full rounded-lg border-gray-600 bg-gray-700 text-gray-100 focus:ring-blue-500 focus:border-blue-500">
|
||||
</div>
|
||||
<div class="flex justify-end space-x-3">
|
||||
<button onclick="closePirateShipModal()" class="px-4 py-2 bg-gray-600 text-white rounded-lg hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-gray-500 focus:ring-offset-2 transition-colors">
|
||||
Cancel
|
||||
</button>
|
||||
<button onclick="submitPirateShipLabel()" class="px-4 py-2 bg-yellow-600 text-white rounded-lg hover:bg-yellow-700 focus:outline-none focus:ring-2 focus:ring-yellow-500 focus:ring-offset-2 transition-colors">
|
||||
Upload & Print
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Set Labels Modal -->
|
||||
<div id="setLabelsModal" class="fixed inset-0 bg-black bg-opacity-50 hidden items-center justify-center">
|
||||
<div class="bg-gray-800 rounded-lg p-6 max-w-md w-full mx-4">
|
||||
<h3 class="text-xl font-semibold text-gray-100 mb-4">Generate Set Labels</h3>
|
||||
<div class="mb-4">
|
||||
<div class="mb-2">
|
||||
<label for="setSearch" class="block text-sm font-medium text-gray-300 mb-2">Search Sets</label>
|
||||
<input type="text" id="setSearch" placeholder="Search sets..." class="w-full rounded-lg border-gray-600 bg-gray-700 text-gray-100 focus:ring-blue-500 focus:border-blue-500">
|
||||
</div>
|
||||
<label class="block text-sm font-medium text-gray-300 mb-2">Select Sets</label>
|
||||
<div id="setLabelsList" class="max-h-60 overflow-y-auto bg-gray-700 rounded-lg p-2">
|
||||
<!-- Sets will be populated here -->
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex justify-end space-x-3">
|
||||
<button onclick="closeSetLabelsModal()" class="px-4 py-2 bg-gray-600 text-white rounded-lg hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-gray-500 focus:ring-offset-2 transition-colors">
|
||||
Cancel
|
||||
</button>
|
||||
<button onclick="submitSetLabels()" class="px-4 py-2 bg-teal-600 text-white rounded-lg hover:bg-teal-700 focus:outline-none focus:ring-2 focus:ring-teal-500 focus:ring-offset-2 transition-colors">
|
||||
Generate
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<!-- Order List Section -->
|
||||
<div class="bg-gray-800 rounded-xl shadow-sm p-6">
|
||||
<div class="flex items-center justify-between mb-6">
|
||||
|
Loading…
x
Reference in New Issue
Block a user