Squashed commit of the following:
commit 893b229cc6b35c09181a84050f34fb79024e41c2 Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 22:14:08 2025 -0500 j commit 06f539aea2f4fff9da7038d43d0de553c4423796 Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:55:30 2025 -0500 fk commit d0c2960ec9f334448d2eb3573b9d7817482abf46 Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:50:53 2025 -0500 frick commit 6b1362c166fc5f51c3bcf316a99116f0d11074a5 Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:49:40 2025 -0500 database commit 8cadc6df4c817d9d05503807e56287fd00e5e939 Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:38:09 2025 -0500 asdf commit 1ca6f9868452e34143b8df4a412be35e6902a31e Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:32:50 2025 -0500 fffff commit 8bb337a9c35e830ef9ce3dac0a0f2df3fe9bc5a0 Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:31:13 2025 -0500 ffff commit 65aba280c55fa09c6a37f688f485efab1f70792b Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:26:16 2025 -0500 aa commit 59ef03a59ee4a15c30e080a1aef7c31c0214a2e3 Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:24:21 2025 -0500 asdf commit f44d5740fc9315ccb0792ecac3e8ec9f28f171be Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:23:32 2025 -0500 aaa commit 13c96b164316b4908d9d01e454cbdc9103157558 Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:18:54 2025 -0500 sdf commit 949c795fd13d93c9618613740fb093f6bb7b7710 Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 21:17:53 2025 -0500 asdf commit 8c3cd423fe228e8aff112a050170246a5fc9f8bd Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:56:01 2025 -0500 app2 commit 78eafc739ebb7f100f657964b3ad8f4937a4046b Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:54:55 2025 -0500 app commit dc47eced143e77ebec415bdfbe209d9466b7bcf1 Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:43:15 2025 -0500 asdfasdfasdf commit e24bcae88cf8c14ea543f49b639b2976c627d201 Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:39:44 2025 -0500 a commit c894451bfe790c97ac0e01085615d7c7288a39da Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:38:20 2025 -0500 req commit 3d09869562a96b5adc7c4be279bc8c003bbb37b2 Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:33:27 2025 -0500 wrong number = code dont work lol i love computers commit 4c93a1271b8aea159cf53f8d7879b00513886d6f Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 20:29:39 2025 -0500 q commit 1f5361da88fe3903a1e92a345fa56bb390f69d92 Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 18:27:20 2025 -0500 same as original code now -5 days of my life commit 511b070cbbcd29b4e784e9a09d58481e50e6e82f Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 13:52:28 2025 -0500 pricey worky commit 964fdd641b63530c59e038ebc7d1e01e9570d75c Author: zman <joshua.k.rzemien@gmail.com> Date: Fri Feb 7 11:37:29 2025 -0500 prep for pricing service work commit a78c3bcba303c2605b6277c1db33b155abe4db1b Author: zman <joshua.k.rzemien@gmail.com> Date: Wed Feb 5 21:51:22 2025 -0500 more stuff yay commit bd9cfca7a95c89b2140eec57bf52bc84432b9a4e Author: zman <joshua.k.rzemien@gmail.com> Date: Tue Feb 4 22:30:33 2025 -0500 GIGA FIXED EVERYTHING OMG commit 85510a46713e0ac660e70c7befb4e94ccf11912e Author: zman <joshua.k.rzemien@gmail.com> Date: Tue Feb 4 00:01:34 2025 -0500 data model change and some new services
This commit is contained in:
parent
37a5dac06a
commit
cc365970a9
4
.gitignore
vendored
4
.gitignore
vendored
@ -170,3 +170,7 @@ cython_debug/
|
|||||||
|
|
||||||
# my stuff
|
# my stuff
|
||||||
*.db
|
*.db
|
||||||
|
temp/
|
||||||
|
.DS_Store
|
||||||
|
*.db-journal
|
||||||
|
cookies/
|
15
Dockerfile
Normal file
15
Dockerfile
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
FROM python:3.13-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
ENV DATABASE_URL=postgresql://poggers:giga!@192.168.1.41:5432/omegatcgdb
|
||||||
|
|
||||||
|
COPY requirements.txt .
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
119
alembic.ini
Normal file
119
alembic.ini
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
# A generic, single database configuration.
|
||||||
|
|
||||||
|
[alembic]
|
||||||
|
# path to migration scripts
|
||||||
|
# Use forward slashes (/) also on windows to provide an os agnostic path
|
||||||
|
script_location = alembic
|
||||||
|
|
||||||
|
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
||||||
|
# Uncomment the line below if you want the files to be prepended with date and time
|
||||||
|
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
||||||
|
# for all available tokens
|
||||||
|
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
||||||
|
|
||||||
|
# sys.path path, will be prepended to sys.path if present.
|
||||||
|
# defaults to the current working directory.
|
||||||
|
prepend_sys_path = .
|
||||||
|
|
||||||
|
# timezone to use when rendering the date within the migration file
|
||||||
|
# as well as the filename.
|
||||||
|
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
||||||
|
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
||||||
|
# string value is passed to ZoneInfo()
|
||||||
|
# leave blank for localtime
|
||||||
|
# timezone =
|
||||||
|
|
||||||
|
# max length of characters to apply to the "slug" field
|
||||||
|
# truncate_slug_length = 40
|
||||||
|
|
||||||
|
# set to 'true' to run the environment during
|
||||||
|
# the 'revision' command, regardless of autogenerate
|
||||||
|
# revision_environment = false
|
||||||
|
|
||||||
|
# set to 'true' to allow .pyc and .pyo files without
|
||||||
|
# a source .py file to be detected as revisions in the
|
||||||
|
# versions/ directory
|
||||||
|
# sourceless = false
|
||||||
|
|
||||||
|
# version location specification; This defaults
|
||||||
|
# to alembic/versions. When using multiple version
|
||||||
|
# directories, initial revisions must be specified with --version-path.
|
||||||
|
# The path separator used here should be the separator specified by "version_path_separator" below.
|
||||||
|
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
||||||
|
|
||||||
|
# version path separator; As mentioned above, this is the character used to split
|
||||||
|
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
||||||
|
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
||||||
|
# Valid values for version_path_separator are:
|
||||||
|
#
|
||||||
|
# version_path_separator = :
|
||||||
|
# version_path_separator = ;
|
||||||
|
# version_path_separator = space
|
||||||
|
# version_path_separator = newline
|
||||||
|
#
|
||||||
|
# Use os.pathsep. Default configuration used for new projects.
|
||||||
|
version_path_separator = os
|
||||||
|
|
||||||
|
# set to 'true' to search source files recursively
|
||||||
|
# in each "version_locations" directory
|
||||||
|
# new in Alembic version 1.10
|
||||||
|
# recursive_version_locations = false
|
||||||
|
|
||||||
|
# the output encoding used when revision files
|
||||||
|
# are written from script.py.mako
|
||||||
|
# output_encoding = utf-8
|
||||||
|
|
||||||
|
sqlalchemy.url = sqlite:///omegacard.db
|
||||||
|
|
||||||
|
|
||||||
|
[post_write_hooks]
|
||||||
|
# post_write_hooks defines scripts or Python functions that are run
|
||||||
|
# on newly generated revision scripts. See the documentation for further
|
||||||
|
# detail and examples
|
||||||
|
|
||||||
|
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
||||||
|
# hooks = black
|
||||||
|
# black.type = console_scripts
|
||||||
|
# black.entrypoint = black
|
||||||
|
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
||||||
|
# hooks = ruff
|
||||||
|
# ruff.type = exec
|
||||||
|
# ruff.executable = %(here)s/.venv/bin/ruff
|
||||||
|
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
||||||
|
|
||||||
|
# Logging configuration
|
||||||
|
[loggers]
|
||||||
|
keys = root,sqlalchemy,alembic
|
||||||
|
|
||||||
|
[handlers]
|
||||||
|
keys = console
|
||||||
|
|
||||||
|
[formatters]
|
||||||
|
keys = generic
|
||||||
|
|
||||||
|
[logger_root]
|
||||||
|
level = WARNING
|
||||||
|
handlers = console
|
||||||
|
qualname =
|
||||||
|
|
||||||
|
[logger_sqlalchemy]
|
||||||
|
level = WARNING
|
||||||
|
handlers =
|
||||||
|
qualname = sqlalchemy.engine
|
||||||
|
|
||||||
|
[logger_alembic]
|
||||||
|
level = INFO
|
||||||
|
handlers =
|
||||||
|
qualname = alembic
|
||||||
|
|
||||||
|
[handler_console]
|
||||||
|
class = StreamHandler
|
||||||
|
args = (sys.stderr,)
|
||||||
|
level = NOTSET
|
||||||
|
formatter = generic
|
||||||
|
|
||||||
|
[formatter_generic]
|
||||||
|
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||||
|
datefmt = %H:%M:%S
|
1
alembic/README
Normal file
1
alembic/README
Normal file
@ -0,0 +1 @@
|
|||||||
|
Generic single-database configuration.
|
90
alembic/env.py
Normal file
90
alembic/env.py
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
from logging.config import fileConfig
|
||||||
|
|
||||||
|
from sqlalchemy import engine_from_config
|
||||||
|
from sqlalchemy import pool
|
||||||
|
|
||||||
|
from alembic import context
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
||||||
|
|
||||||
|
from app.db.models import Base
|
||||||
|
from app.db.database import DATABASE_URL
|
||||||
|
|
||||||
|
# this is the Alembic Config object, which provides
|
||||||
|
# access to the values within the .ini file in use.
|
||||||
|
config = context.config
|
||||||
|
|
||||||
|
config.set_main_option('sqlalchemy.url', DATABASE_URL)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# Interpret the config file for Python logging.
|
||||||
|
# This line sets up loggers basically.
|
||||||
|
if config.config_file_name is not None:
|
||||||
|
fileConfig(config.config_file_name)
|
||||||
|
|
||||||
|
# add your model's MetaData object here
|
||||||
|
# for 'autogenerate' support
|
||||||
|
# from myapp import mymodel
|
||||||
|
# target_metadata = mymodel.Base.metadata
|
||||||
|
target_metadata = Base.metadata
|
||||||
|
|
||||||
|
# other values from the config, defined by the needs of env.py,
|
||||||
|
# can be acquired:
|
||||||
|
# my_important_option = config.get_main_option("my_important_option")
|
||||||
|
# ... etc.
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_offline() -> None:
|
||||||
|
"""Run migrations in 'offline' mode.
|
||||||
|
|
||||||
|
This configures the context with just a URL
|
||||||
|
and not an Engine, though an Engine is acceptable
|
||||||
|
here as well. By skipping the Engine creation
|
||||||
|
we don't even need a DBAPI to be available.
|
||||||
|
|
||||||
|
Calls to context.execute() here emit the given string to the
|
||||||
|
script output.
|
||||||
|
|
||||||
|
"""
|
||||||
|
url = config.get_main_option("sqlalchemy.url")
|
||||||
|
context.configure(
|
||||||
|
url=url,
|
||||||
|
target_metadata=target_metadata,
|
||||||
|
literal_binds=True,
|
||||||
|
dialect_opts={"paramstyle": "named"},
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
def run_migrations_online() -> None:
|
||||||
|
"""Run migrations in 'online' mode.
|
||||||
|
|
||||||
|
In this scenario we need to create an Engine
|
||||||
|
and associate a connection with the context.
|
||||||
|
|
||||||
|
"""
|
||||||
|
connectable = engine_from_config(
|
||||||
|
config.get_section(config.config_ini_section, {}),
|
||||||
|
prefix="sqlalchemy.",
|
||||||
|
poolclass=pool.NullPool,
|
||||||
|
)
|
||||||
|
|
||||||
|
with connectable.connect() as connection:
|
||||||
|
context.configure(
|
||||||
|
connection=connection, target_metadata=target_metadata
|
||||||
|
)
|
||||||
|
|
||||||
|
with context.begin_transaction():
|
||||||
|
context.run_migrations()
|
||||||
|
|
||||||
|
|
||||||
|
if context.is_offline_mode():
|
||||||
|
run_migrations_offline()
|
||||||
|
else:
|
||||||
|
run_migrations_online()
|
26
alembic/script.py.mako
Normal file
26
alembic/script.py.mako
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
"""${message}
|
||||||
|
|
||||||
|
Revision ID: ${up_revision}
|
||||||
|
Revises: ${down_revision | comma,n}
|
||||||
|
Create Date: ${create_date}
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
${imports if imports else ""}
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = ${repr(up_revision)}
|
||||||
|
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||||
|
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
${upgrades if upgrades else "pass"}
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
${downgrades if downgrades else "pass"}
|
30
alembic/versions/f629adc7e597_.py
Normal file
30
alembic/versions/f629adc7e597_.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
"""empty message
|
||||||
|
|
||||||
|
Revision ID: f629adc7e597
|
||||||
|
Revises:
|
||||||
|
Create Date: 2025-02-07 20:13:32.559672
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Sequence, Union
|
||||||
|
|
||||||
|
from alembic import op
|
||||||
|
import sqlalchemy as sa
|
||||||
|
|
||||||
|
|
||||||
|
# revision identifiers, used by Alembic.
|
||||||
|
revision: str = 'f629adc7e597'
|
||||||
|
down_revision: Union[str, None] = None
|
||||||
|
branch_labels: Union[str, Sequence[str], None] = None
|
||||||
|
depends_on: Union[str, Sequence[str], None] = None
|
||||||
|
|
||||||
|
|
||||||
|
def upgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
||||||
|
|
||||||
|
|
||||||
|
def downgrade() -> None:
|
||||||
|
# ### commands auto generated by Alembic - please adjust! ###
|
||||||
|
pass
|
||||||
|
# ### end Alembic commands ###
|
@ -3,6 +3,12 @@ from sqlalchemy.orm import sessionmaker, Session
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from typing import Generator
|
from typing import Generator
|
||||||
import os
|
import os
|
||||||
|
from sqlalchemy import inspect
|
||||||
|
from app.services.tcgplayer import TCGPlayerService
|
||||||
|
from app.services.pricing import PricingService
|
||||||
|
from app.services.file import FileService
|
||||||
|
from app.db.models import Price
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
@ -44,12 +50,47 @@ def get_db() -> Generator[Session, None, None]:
|
|||||||
with get_db_session() as session:
|
with get_db_session() as session:
|
||||||
yield session
|
yield session
|
||||||
|
|
||||||
|
def prepopulate_data(db: Session, db_exist: bool = False) -> None:
|
||||||
|
file_service = FileService(db)
|
||||||
|
tcgplayer_service = TCGPlayerService(db, file_service)
|
||||||
|
pricing_service = PricingService(db, file_service, tcgplayer_service)
|
||||||
|
if not db_exist:
|
||||||
|
tcgplayer_service.populate_tcgplayer_groups()
|
||||||
|
file = tcgplayer_service.load_tcgplayer_cards()
|
||||||
|
pricing_service.cron_load_prices(file)
|
||||||
|
else:
|
||||||
|
pricing_service.cron_load_prices()
|
||||||
|
|
||||||
def init_db() -> None:
|
def init_db() -> None:
|
||||||
"""Initialize database tables"""
|
"""Initialize database tables and run first-time setup if needed"""
|
||||||
from .models import Base
|
from .models import Base
|
||||||
try:
|
try:
|
||||||
|
inspector = inspect(engine)
|
||||||
|
tables_exist = all(
|
||||||
|
table in inspector.get_table_names()
|
||||||
|
for table in Base.metadata.tables.keys()
|
||||||
|
)
|
||||||
|
if tables_exist:
|
||||||
|
with get_db_session() as db:
|
||||||
|
# get date created of latest pricing record
|
||||||
|
latest_price = db.query(Price).order_by(Price.date_created.desc()).first()
|
||||||
|
if latest_price:
|
||||||
|
# check if it is greater than 1.5 hours old
|
||||||
|
if (datetime.now() - latest_price.date_created).total_seconds() > 5400:
|
||||||
|
prepopulate_data(db, db_exist=True)
|
||||||
|
else:
|
||||||
|
prepopulate_data(db, db_exist=True)
|
||||||
|
|
||||||
|
# Create tables if they don't exist
|
||||||
Base.metadata.create_all(bind=engine)
|
Base.metadata.create_all(bind=engine)
|
||||||
logger.info("Database tables created successfully")
|
|
||||||
|
# Run first-time setup only if tables were just created
|
||||||
|
if not tables_exist:
|
||||||
|
with get_db_session() as db:
|
||||||
|
prepopulate_data(db)
|
||||||
|
logger.info("First-time database setup completed")
|
||||||
|
|
||||||
|
logger.info("Database initialization completed")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Failed to initialize database: {str(e)}")
|
logger.error(f"Failed to initialize database: {str(e)}")
|
||||||
raise
|
raise
|
367
app/db/models.py
Normal file
367
app/db/models.py
Normal file
@ -0,0 +1,367 @@
|
|||||||
|
from sqlalchemy import Column, Integer, String, Float, Boolean, DateTime, ForeignKey
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy.orm import relationship, validates
|
||||||
|
from datetime import datetime
|
||||||
|
from enum import Enum
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
|
||||||
|
## Core Models
|
||||||
|
|
||||||
|
class Product(Base):
|
||||||
|
"""
|
||||||
|
product is the concept of a physical item that can be sold
|
||||||
|
"""
|
||||||
|
__tablename__ = "products"
|
||||||
|
|
||||||
|
@validates("type")
|
||||||
|
def validate_type(self, key, type: str):
|
||||||
|
if type not in ProductTypeEnum or type.lower() not in ProductTypeEnum:
|
||||||
|
raise ValueError(f"Invalid product type: {type}")
|
||||||
|
return type
|
||||||
|
|
||||||
|
@validates("product_line")
|
||||||
|
def validate_product_line(self, key, product_line: str):
|
||||||
|
if product_line not in ProductLineEnum or product_line.lower() not in ProductLineEnum:
|
||||||
|
raise ValueError(f"Invalid product line: {product_line}")
|
||||||
|
return product_line
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
type = Column(String) # box or card
|
||||||
|
product_line = Column(String) # pokemon, mtg, etc.
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class Sale(Base):
|
||||||
|
"""
|
||||||
|
sale represents a transaction where a product was sold to a customer on a marketplace
|
||||||
|
"""
|
||||||
|
__tablename__ = "sales"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
ledger_id = Column(String, ForeignKey("ledgers.id"))
|
||||||
|
customer_id = Column(String, ForeignKey("customers.id"))
|
||||||
|
marketplace_id = Column(String, ForeignKey("marketplaces.id"))
|
||||||
|
amount = Column(Float)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class Ledger(Base):
|
||||||
|
"""
|
||||||
|
ledger associates financial transactions with a user
|
||||||
|
"""
|
||||||
|
__tablename__ = "ledgers"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
user_id = Column(String, ForeignKey("users.id"))
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class Expense(Base):
|
||||||
|
"""
|
||||||
|
expense is any cash outflow associated with moving a product
|
||||||
|
can be optionally associated with a sale or a product
|
||||||
|
"""
|
||||||
|
__tablename__ = "expenses"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
ledger_id = Column(String, ForeignKey("ledgers.id"))
|
||||||
|
product_id = Column(String, ForeignKey("products.id"), nullable=True)
|
||||||
|
sale_id = Column(String, ForeignKey("sales.id"), nullable=True)
|
||||||
|
cost = Column(Float)
|
||||||
|
type = Column(String) # price paid, cogs, shipping, refund, supplies, subscription, fee, etc.
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class Marketplace(Base):
|
||||||
|
"""
|
||||||
|
Marketplace represents a marketplace where products can be sold
|
||||||
|
"""
|
||||||
|
__tablename__ = "marketplaces"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class Box(Base):
|
||||||
|
"""
|
||||||
|
Box Represents a physical product with a sku that contains trading cards
|
||||||
|
Boxes can be sealed or opened
|
||||||
|
Opened boxes have cards associated with them
|
||||||
|
A box contains cards regardless of the inventory status of those cards
|
||||||
|
"""
|
||||||
|
__tablename__ = "boxes"
|
||||||
|
|
||||||
|
@validates("type")
|
||||||
|
def validate_type(self, key, type: str):
|
||||||
|
if type not in BoxTypeEnum or type.lower() not in BoxTypeEnum:
|
||||||
|
raise ValueError(f"Invalid box type: {type}")
|
||||||
|
return type
|
||||||
|
|
||||||
|
product_id = Column(String, ForeignKey("products.id"), primary_key=True)
|
||||||
|
type = Column(String) # collector box, play box, etc.
|
||||||
|
set_code = Column(String)
|
||||||
|
sku = Column(String, nullable=True)
|
||||||
|
num_cards_expected = Column(Integer, nullable=True)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class OpenBox(Base):
|
||||||
|
__tablename__ = "open_boxes"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
product_id = Column(String, ForeignKey("products.id"))
|
||||||
|
num_cards_actual = Column(Integer)
|
||||||
|
date_opened = Column(DateTime, default=datetime.now)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class Card(Base):
|
||||||
|
"""
|
||||||
|
Card represents the concept of a distinct card
|
||||||
|
Cards have metadata from different sources
|
||||||
|
"""
|
||||||
|
__tablename__ = "cards"
|
||||||
|
|
||||||
|
product_id = Column(String, ForeignKey("products.id"), primary_key=True)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class CardManabox(Base):
|
||||||
|
__tablename__ = "manabox_cards"
|
||||||
|
|
||||||
|
product_id = Column(String, ForeignKey("cards.product_id"), primary_key=True)
|
||||||
|
name = Column(String)
|
||||||
|
set_code = Column(String)
|
||||||
|
set_name = Column(String)
|
||||||
|
collector_number = Column(String)
|
||||||
|
foil = Column(String)
|
||||||
|
rarity = Column(String)
|
||||||
|
manabox_id = Column(Integer)
|
||||||
|
scryfall_id = Column(String)
|
||||||
|
condition = Column(String)
|
||||||
|
language = Column(String)
|
||||||
|
|
||||||
|
class CardTCGPlayer(Base):
|
||||||
|
__tablename__ = "tcgplayer_cards"
|
||||||
|
|
||||||
|
product_id = Column(String, ForeignKey("cards.product_id"), primary_key=True)
|
||||||
|
group_id = Column(Integer)
|
||||||
|
tcgplayer_id = Column(Integer)
|
||||||
|
product_line = Column(String)
|
||||||
|
set_name = Column(String)
|
||||||
|
product_name = Column(String)
|
||||||
|
title = Column(String)
|
||||||
|
number = Column(String)
|
||||||
|
rarity = Column(String)
|
||||||
|
condition = Column(String)
|
||||||
|
|
||||||
|
class Warehouse(Base):
|
||||||
|
"""
|
||||||
|
container that is associated with a user and contains inventory and stock
|
||||||
|
"""
|
||||||
|
__tablename__ = "warehouses"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
user_id = Column(String, ForeignKey("users.id"))
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class Stock(Base):
|
||||||
|
"""
|
||||||
|
contains products that are listed for sale
|
||||||
|
"""
|
||||||
|
__tablename__ = "stocks"
|
||||||
|
|
||||||
|
product_id = Column(String, ForeignKey("products.id"), primary_key=True)
|
||||||
|
warehouse_id = Column(String, ForeignKey("warehouses.id"), default="default")
|
||||||
|
marketplace_id = Column(String, ForeignKey("marketplaces.id"))
|
||||||
|
quantity = Column(Integer)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class Inventory(Base):
|
||||||
|
"""
|
||||||
|
contains products in inventory (not necessarily listed for sale)
|
||||||
|
sealed product in breakdown queue, held sealed product, speculatively held singles, etc.
|
||||||
|
inventory can contain products across multiple marketplaces
|
||||||
|
"""
|
||||||
|
__tablename__ = "inventories"
|
||||||
|
|
||||||
|
product_id = Column(String, ForeignKey("products.id"), primary_key=True)
|
||||||
|
warehouse_id = Column(String, ForeignKey("warehouses.id"), default="default")
|
||||||
|
quantity = Column(Integer)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class User(Base):
|
||||||
|
"""
|
||||||
|
User represents a user in the system
|
||||||
|
"""
|
||||||
|
__tablename__ = "users"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
username = Column(String)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class Customer(Base):
|
||||||
|
"""
|
||||||
|
Customer represents a customer that has purchased at least 1 product
|
||||||
|
"""
|
||||||
|
__tablename__ = "customers"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class StagedFileProduct(Base):
|
||||||
|
__tablename__ = "staged_file_products"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
product_id = Column(String, ForeignKey("products.id"))
|
||||||
|
file_id = Column(String, ForeignKey("files.id"))
|
||||||
|
quantity = Column(Integer)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class File(Base):
|
||||||
|
"""
|
||||||
|
File represents a file that has been uploaded to or retrieved by the system
|
||||||
|
"""
|
||||||
|
__tablename__ = "files"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
type = Column(String) # upload, export, etc.
|
||||||
|
source = Column(String) # manabox, tcgplayer, etc.
|
||||||
|
service = Column(String) # pricing, data, etc.
|
||||||
|
filename = Column(String)
|
||||||
|
filepath = Column(String) # backup location
|
||||||
|
filesize_kb = Column(Float)
|
||||||
|
status = Column(String)
|
||||||
|
box_id = Column(String, ForeignKey("boxes.product_id"), nullable=True)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class Price(Base):
|
||||||
|
__tablename__ = "prices"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
product_id = Column(String, ForeignKey("products.id"))
|
||||||
|
marketplace_id = Column(String, ForeignKey("marketplaces.id"))
|
||||||
|
type = Column(String) # market, direct, low, low_with_shipping, marketplace
|
||||||
|
price = Column(Float)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class StorageBlock(Base):
|
||||||
|
"""
|
||||||
|
StorageBlock represents a physical storage location for products (50 card indexed block in a box)
|
||||||
|
"""
|
||||||
|
__tablename__ = "storage_blocks"
|
||||||
|
|
||||||
|
@validates("type")
|
||||||
|
def validate_type(self, key, type: str):
|
||||||
|
if type not in StorageBlockTypeEnum or type.lower() not in StorageBlockTypeEnum:
|
||||||
|
raise ValueError(f"Invalid storage block type: {type}")
|
||||||
|
return type
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
warehouse_id = Column(String, ForeignKey("warehouses.id"))
|
||||||
|
name = Column(String)
|
||||||
|
type = Column(String) # rare or common
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class ProductBlock(Base):
|
||||||
|
"""
|
||||||
|
ProductBlock represents the relationship between a product and a storage block
|
||||||
|
which products are in a block and at what index
|
||||||
|
"""
|
||||||
|
__tablename__ = "product_blocks"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
product_id = Column(String, ForeignKey("products.id"))
|
||||||
|
block_id = Column(String, ForeignKey("storage_blocks.id"))
|
||||||
|
block_index = Column(Integer)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class OpenBoxCard(Base):
|
||||||
|
"""
|
||||||
|
OpenedBoxCard represents the relationship between an opened box and the cards it contains
|
||||||
|
"""
|
||||||
|
__tablename__ = "open_box_cards"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
open_box_id = Column(String, ForeignKey("open_boxes.id"))
|
||||||
|
card_id = Column(String, ForeignKey("cards.product_id"))
|
||||||
|
quantity = Column(Integer)
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class ProductSale(Base):
|
||||||
|
"""
|
||||||
|
ProductSale represents the relationship between products and sales
|
||||||
|
"""
|
||||||
|
__tablename__ = "product_sales"
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
product_id = Column(String, ForeignKey("products.id"))
|
||||||
|
sale_id = Column(String, ForeignKey("sales.id"))
|
||||||
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
|
class TCGPlayerGroups(Base):
|
||||||
|
__tablename__ = 'tcgplayer_groups'
|
||||||
|
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
group_id = Column(Integer)
|
||||||
|
name = Column(String)
|
||||||
|
abbreviation = Column(String)
|
||||||
|
is_supplemental = Column(String)
|
||||||
|
published_on = Column(String)
|
||||||
|
modified_on = Column(String)
|
||||||
|
category_id = Column(Integer)
|
||||||
|
|
||||||
|
# enums
|
||||||
|
|
||||||
|
class RarityEnum(str, Enum):
|
||||||
|
COMMON = "common"
|
||||||
|
UNCOMMON = "uncommon"
|
||||||
|
RARE = "rare"
|
||||||
|
MYTHIC = "mythic"
|
||||||
|
LAND = "land"
|
||||||
|
PROMO = "promo"
|
||||||
|
SPECIAL = "special"
|
||||||
|
|
||||||
|
class ConditionEnum(str, Enum):
|
||||||
|
MINT = "mint"
|
||||||
|
NEAR_MINT = "near_mint"
|
||||||
|
LIGHTLY_PLAYED = "lightly_played"
|
||||||
|
MODERATELY_PLAYED = "moderately_played"
|
||||||
|
HEAVILY_PLAYED = "heavily_played"
|
||||||
|
DAMAGED = "damaged"
|
||||||
|
|
||||||
|
class BoxTypeEnum(str, Enum):
|
||||||
|
COLLECTOR = "collector"
|
||||||
|
PLAY = "play"
|
||||||
|
DRAFT = "draft"
|
||||||
|
COMMANDER = "commander"
|
||||||
|
SET = "set"
|
||||||
|
|
||||||
|
class ProductLineEnum(str, Enum):
|
||||||
|
MTG = "mtg"
|
||||||
|
POKEMON = "pokemon"
|
||||||
|
|
||||||
|
class ProductTypeEnum(str, Enum):
|
||||||
|
BOX = "box"
|
||||||
|
CARD = "card"
|
||||||
|
|
||||||
|
class StorageBlockTypeEnum(str, Enum):
|
||||||
|
RARE = "rare"
|
||||||
|
COMMON = "common"
|
@ -1,6 +1,6 @@
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from exceptions import FailedUploadException
|
from app.exceptions import FailedUploadException
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
124
app/dependencies.py
Normal file
124
app/dependencies.py
Normal file
@ -0,0 +1,124 @@
|
|||||||
|
from typing import Annotated
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from fastapi import Depends, Form
|
||||||
|
|
||||||
|
from app.services.box import BoxService
|
||||||
|
from app.services.tcgplayer import TCGPlayerService
|
||||||
|
from app.services.pricing import PricingService
|
||||||
|
from app.services.file import FileService
|
||||||
|
from app.services.product import ProductService
|
||||||
|
from app.services.inventory import InventoryService
|
||||||
|
from app.services.task import TaskService
|
||||||
|
from app.services.storage import StorageService
|
||||||
|
from app.db.database import get_db
|
||||||
|
from app.schemas.file import CreateFileRequest
|
||||||
|
from app.schemas.box import CreateBoxRequest, UpdateBoxRequest, CreateOpenBoxRequest
|
||||||
|
|
||||||
|
# Common type annotation for database dependency
|
||||||
|
DB = Annotated[Session, Depends(get_db)]
|
||||||
|
|
||||||
|
# Base Services (no dependencies besides DB)
|
||||||
|
def get_file_service(db: DB) -> FileService:
|
||||||
|
"""FileService with only database dependency"""
|
||||||
|
return FileService(db)
|
||||||
|
|
||||||
|
def get_storage_service(db: DB) -> StorageService:
|
||||||
|
"""StorageService with only database dependency"""
|
||||||
|
return StorageService(db)
|
||||||
|
|
||||||
|
def get_inventory_service(db: DB) -> InventoryService:
|
||||||
|
"""InventoryService with only database dependency"""
|
||||||
|
return InventoryService(db)
|
||||||
|
|
||||||
|
# Services with dependencies on other services
|
||||||
|
def get_tcgplayer_service(
|
||||||
|
db: DB,
|
||||||
|
file_service: Annotated[FileService, Depends(get_file_service)]
|
||||||
|
) -> TCGPlayerService:
|
||||||
|
"""TCGPlayerService depends on PricingService"""
|
||||||
|
return TCGPlayerService(db, file_service)
|
||||||
|
|
||||||
|
def get_pricing_service(db: DB, file_service: Annotated[FileService, Depends(get_file_service)], tcgplayer_service: Annotated[TCGPlayerService, Depends(get_tcgplayer_service)]) -> PricingService:
|
||||||
|
"""PricingService with only database dependency"""
|
||||||
|
return PricingService(db, file_service, tcgplayer_service)
|
||||||
|
|
||||||
|
def get_product_service(
|
||||||
|
db: DB,
|
||||||
|
file_service: Annotated[FileService, Depends(get_file_service)],
|
||||||
|
tcgplayer_service: Annotated[TCGPlayerService, Depends(get_tcgplayer_service)],
|
||||||
|
storage_service: Annotated[StorageService, Depends(get_storage_service)]
|
||||||
|
) -> ProductService:
|
||||||
|
"""ProductService with multiple service dependencies"""
|
||||||
|
return ProductService(db, file_service, tcgplayer_service, storage_service)
|
||||||
|
|
||||||
|
def get_box_service(
|
||||||
|
db: DB,
|
||||||
|
inventory_service: Annotated[InventoryService, Depends(get_inventory_service)]
|
||||||
|
) -> BoxService:
|
||||||
|
"""BoxService depends on InventoryService"""
|
||||||
|
return BoxService(db, inventory_service)
|
||||||
|
|
||||||
|
def get_task_service(
|
||||||
|
db: DB,
|
||||||
|
product_service: Annotated[ProductService, Depends(get_product_service)],
|
||||||
|
pricing_service: Annotated[PricingService, Depends(get_pricing_service)]
|
||||||
|
) -> TaskService:
|
||||||
|
"""TaskService depends on ProductService and TCGPlayerService"""
|
||||||
|
return TaskService(db, product_service, pricing_service)
|
||||||
|
|
||||||
|
# Form data dependencies
|
||||||
|
def get_create_file_metadata(
|
||||||
|
type: str = Form(...),
|
||||||
|
source: str = Form(...),
|
||||||
|
service: str = Form(None),
|
||||||
|
filename: str = Form(None)
|
||||||
|
) -> CreateFileRequest:
|
||||||
|
"""Form dependency for file creation"""
|
||||||
|
return CreateFileRequest(
|
||||||
|
type=type,
|
||||||
|
source=source,
|
||||||
|
service=service,
|
||||||
|
filename=filename
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_box_data(
|
||||||
|
type: str = Form(...),
|
||||||
|
sku: str = Form(None),
|
||||||
|
set_code: str = Form(...),
|
||||||
|
num_cards_expected: int = Form(None)
|
||||||
|
) -> CreateBoxRequest:
|
||||||
|
"""Form dependency for box creation"""
|
||||||
|
return CreateBoxRequest(
|
||||||
|
type=type,
|
||||||
|
sku=sku,
|
||||||
|
set_code=set_code,
|
||||||
|
num_cards_expected=num_cards_expected
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_box_update_data(
|
||||||
|
type: str = Form(None),
|
||||||
|
sku: str = Form(None),
|
||||||
|
set_code: str = Form(None),
|
||||||
|
num_cards_expected: int = Form(None)
|
||||||
|
) -> UpdateBoxRequest:
|
||||||
|
"""Form dependency for box updates"""
|
||||||
|
return UpdateBoxRequest(
|
||||||
|
type=type,
|
||||||
|
sku=sku,
|
||||||
|
set_code=set_code,
|
||||||
|
num_cards_expected=num_cards_expected
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_open_box_data(
|
||||||
|
product_id: str = Form(...),
|
||||||
|
file_ids: list[str] = Form(None),
|
||||||
|
num_cards_actual: int = Form(None),
|
||||||
|
date_opened: str = Form(None)
|
||||||
|
) -> CreateOpenBoxRequest:
|
||||||
|
"""Form dependency for opening boxes"""
|
||||||
|
return CreateOpenBoxRequest(
|
||||||
|
product_id=product_id,
|
||||||
|
file_ids=file_ids,
|
||||||
|
num_cards_actual=num_cards_actual,
|
||||||
|
date_opened=date_opened
|
||||||
|
)
|
93
app/main.py
Normal file
93
app/main.py
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
from fastapi import FastAPI
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
import uvicorn
|
||||||
|
from app.routes.routes import router
|
||||||
|
from app.db.database import init_db, check_db_connection, get_db
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
|
||||||
|
# Import your dependency functions
|
||||||
|
from app.dependencies import (
|
||||||
|
get_task_service,
|
||||||
|
get_tcgplayer_service,
|
||||||
|
get_pricing_service,
|
||||||
|
get_file_service,
|
||||||
|
get_product_service,
|
||||||
|
get_storage_service,
|
||||||
|
get_inventory_service,
|
||||||
|
)
|
||||||
|
|
||||||
|
logging.basicConfig(
|
||||||
|
level=logging.DEBUG,
|
||||||
|
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||||
|
handlers=[
|
||||||
|
logging.StreamHandler(sys.stdout),
|
||||||
|
logging.FileHandler('app.log')
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Create FastAPI instance
|
||||||
|
app = FastAPI(
|
||||||
|
title="Card Management API",
|
||||||
|
description="API for managing card collections and TCGPlayer integration",
|
||||||
|
version="1.0.0",
|
||||||
|
debug=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Configure CORS
|
||||||
|
app.add_middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"], # Modify this in production
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Include routers
|
||||||
|
app.include_router(router)
|
||||||
|
|
||||||
|
@app.on_event("startup")
|
||||||
|
async def startup_event():
|
||||||
|
try:
|
||||||
|
# Check database connection
|
||||||
|
if not check_db_connection():
|
||||||
|
logger.error("Database connection failed")
|
||||||
|
raise Exception("Database connection failed")
|
||||||
|
|
||||||
|
# Initialize database
|
||||||
|
init_db()
|
||||||
|
|
||||||
|
# Get database session
|
||||||
|
db = next(get_db())
|
||||||
|
|
||||||
|
# Use dependency injection to get services
|
||||||
|
file_service = get_file_service(db)
|
||||||
|
storage_service = get_storage_service(db)
|
||||||
|
inventory_service = get_inventory_service(db)
|
||||||
|
tcgplayer_service = get_tcgplayer_service(db, file_service)
|
||||||
|
pricing_service = get_pricing_service(db, file_service, tcgplayer_service)
|
||||||
|
product_service = get_product_service(db, file_service, tcgplayer_service, storage_service)
|
||||||
|
task_service = get_task_service(db, product_service, pricing_service)
|
||||||
|
|
||||||
|
# Start task service
|
||||||
|
await task_service.start()
|
||||||
|
|
||||||
|
logger.info("Application started successfully")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Startup failed: {str(e)}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
@app.on_event("shutdown")
|
||||||
|
async def shutdown_event():
|
||||||
|
logger.info("Application shutting down")
|
||||||
|
pass
|
||||||
|
|
||||||
|
@app.get("/")
|
||||||
|
async def root():
|
||||||
|
return {"message": "Card Management API"}
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
|
314
app/routes/routes.py
Normal file
314
app/routes/routes.py
Normal file
@ -0,0 +1,314 @@
|
|||||||
|
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, BackgroundTasks
|
||||||
|
from fastapi.responses import StreamingResponse
|
||||||
|
from typing import Optional, List
|
||||||
|
from io import BytesIO
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
import os
|
||||||
|
import json
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from app.schemas.file import (
|
||||||
|
FileSchema,
|
||||||
|
CreateFileRequest,
|
||||||
|
CreateFileResponse,
|
||||||
|
GetFileResponse,
|
||||||
|
DeleteFileResponse,
|
||||||
|
GetFileQueryParams
|
||||||
|
)
|
||||||
|
from app.schemas.box import (
|
||||||
|
CreateBoxResponse,
|
||||||
|
CreateBoxRequest,
|
||||||
|
BoxSchema,
|
||||||
|
UpdateBoxRequest,
|
||||||
|
CreateOpenBoxRequest,
|
||||||
|
CreateOpenBoxResponse,
|
||||||
|
OpenBoxSchema
|
||||||
|
)
|
||||||
|
from app.services.file import FileService
|
||||||
|
from app.services.box import BoxService
|
||||||
|
from app.services.task import TaskService
|
||||||
|
from app.services.pricing import PricingService
|
||||||
|
from app.dependencies import (
|
||||||
|
get_file_service,
|
||||||
|
get_box_service,
|
||||||
|
get_task_service,
|
||||||
|
get_create_file_metadata,
|
||||||
|
get_box_data,
|
||||||
|
get_box_update_data,
|
||||||
|
get_open_box_data,
|
||||||
|
get_pricing_service
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api", tags=["cards"])
|
||||||
|
MAX_FILE_SIZE = 100 * 1024 * 1024 # 100 MB
|
||||||
|
|
||||||
|
async def validate_file_upload(file: UploadFile) -> bytes:
|
||||||
|
"""Validate uploaded file and return its contents."""
|
||||||
|
if not file.filename:
|
||||||
|
raise HTTPException(status_code=400, detail="No filename provided")
|
||||||
|
|
||||||
|
content = await file.read()
|
||||||
|
if len(content) > MAX_FILE_SIZE:
|
||||||
|
raise HTTPException(status_code=413, detail="File too large")
|
||||||
|
|
||||||
|
return content
|
||||||
|
|
||||||
|
@router.post("/files", response_model=CreateFileResponse, status_code=201)
|
||||||
|
async def create_file(
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
file: UploadFile = File(...),
|
||||||
|
metadata: CreateFileRequest = Depends(get_create_file_metadata),
|
||||||
|
file_service: FileService = Depends(get_file_service),
|
||||||
|
task_service: TaskService = Depends(get_task_service)
|
||||||
|
) -> CreateFileResponse:
|
||||||
|
"""Create a new file entry with the uploaded file."""
|
||||||
|
try:
|
||||||
|
content = await validate_file_upload(file)
|
||||||
|
logger.debug(f"File received: {file.filename}")
|
||||||
|
logger.debug(f"Metadata: {metadata}")
|
||||||
|
|
||||||
|
metadata.filename = metadata.filename or file.filename
|
||||||
|
|
||||||
|
if not file_service.validate_file(content, metadata):
|
||||||
|
raise HTTPException(status_code=400, detail="Invalid file content")
|
||||||
|
|
||||||
|
created_file = file_service.create_file(content, metadata)
|
||||||
|
|
||||||
|
if metadata.source == 'manabox':
|
||||||
|
background_tasks.add_task(task_service.process_manabox_file, created_file)
|
||||||
|
|
||||||
|
return CreateFileResponse(
|
||||||
|
status_code=201,
|
||||||
|
success=True,
|
||||||
|
files=[FileSchema.from_orm(created_file)]
|
||||||
|
)
|
||||||
|
|
||||||
|
except HTTPException as http_ex:
|
||||||
|
raise http_ex
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"File upload failed: {str(e)}")
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail="Internal server error occurred during file upload"
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
await file.close()
|
||||||
|
|
||||||
|
@router.get("/files/{file_id:path}", response_model=GetFileResponse)
|
||||||
|
@router.get("/files", response_model=GetFileResponse)
|
||||||
|
async def get_file(
|
||||||
|
file_id: Optional[str] = None,
|
||||||
|
query: GetFileQueryParams = Depends(),
|
||||||
|
file_service: FileService = Depends(get_file_service)
|
||||||
|
) -> GetFileResponse:
|
||||||
|
"""
|
||||||
|
Get file(s) by optional ID and/or status.
|
||||||
|
If file_id is provided, returns that specific file.
|
||||||
|
If status is provided, returns all files with that status.
|
||||||
|
If neither is provided, returns all files.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
if file_id:
|
||||||
|
file = file_service.get_file(file_id)
|
||||||
|
files = [file]
|
||||||
|
else:
|
||||||
|
files = file_service.get_files(status=query.status)
|
||||||
|
|
||||||
|
return GetFileResponse(
|
||||||
|
status_code=200,
|
||||||
|
success=True,
|
||||||
|
files=[FileSchema.from_orm(f) for f in files]
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Get file(s) failed: {str(e)}")
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
@router.delete("/files/{file_id}", response_model=DeleteFileResponse)
|
||||||
|
async def delete_file(
|
||||||
|
file_id: str,
|
||||||
|
file_service: FileService = Depends(get_file_service)
|
||||||
|
) -> DeleteFileResponse:
|
||||||
|
"""Delete a file by ID."""
|
||||||
|
try:
|
||||||
|
file = file_service.delete_file(file_id)
|
||||||
|
return DeleteFileResponse(
|
||||||
|
status_code=200,
|
||||||
|
success=True,
|
||||||
|
files=[FileSchema.from_orm(file)]
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Delete file failed: {str(e)}")
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
@router.post("/boxes", response_model=CreateBoxResponse, status_code=201)
|
||||||
|
async def create_box(
|
||||||
|
box_data: CreateBoxRequest = Depends(get_box_data),
|
||||||
|
box_service: BoxService = Depends(get_box_service)
|
||||||
|
) -> CreateBoxResponse:
|
||||||
|
"""Create a new box."""
|
||||||
|
try:
|
||||||
|
result, success = box_service.create_box(box_data)
|
||||||
|
if not success:
|
||||||
|
raise HTTPException(status_code=400, detail="Box creation failed, box already exists")
|
||||||
|
return CreateBoxResponse(
|
||||||
|
status_code=201,
|
||||||
|
success=True,
|
||||||
|
box=[BoxSchema.from_orm(result)]
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Create box failed: {str(e)}")
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
@router.put("/boxes/{box_id}", response_model=CreateBoxResponse)
|
||||||
|
async def update_box(
|
||||||
|
box_id: str,
|
||||||
|
box_data: UpdateBoxRequest = Depends(get_box_update_data),
|
||||||
|
box_service: BoxService = Depends(get_box_service)
|
||||||
|
) -> CreateBoxResponse:
|
||||||
|
"""Update an existing box."""
|
||||||
|
try:
|
||||||
|
result = box_service.update_box(box_id, box_data)
|
||||||
|
return CreateBoxResponse(
|
||||||
|
status_code=200,
|
||||||
|
success=True,
|
||||||
|
box=[BoxSchema.from_orm(result)]
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Update box failed: {str(e)}")
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
@router.delete("/boxes/{box_id}", response_model=CreateBoxResponse)
|
||||||
|
async def delete_box(
|
||||||
|
box_id: str,
|
||||||
|
box_service: BoxService = Depends(get_box_service)
|
||||||
|
) -> CreateBoxResponse:
|
||||||
|
"""Delete a box by ID."""
|
||||||
|
try:
|
||||||
|
result = box_service.delete_box(box_id)
|
||||||
|
return CreateBoxResponse(
|
||||||
|
status_code=200,
|
||||||
|
success=True,
|
||||||
|
box=[BoxSchema.from_orm(result)]
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Delete box failed: {str(e)}")
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
@router.post("/boxes/{box_id}/open", response_model=CreateOpenBoxResponse, status_code=201)
|
||||||
|
async def open_box(
|
||||||
|
box_id: str,
|
||||||
|
box_data: CreateOpenBoxRequest = Depends(get_open_box_data),
|
||||||
|
box_service: BoxService = Depends(get_box_service)
|
||||||
|
) -> CreateOpenBoxResponse:
|
||||||
|
"""Open a box by ID."""
|
||||||
|
try:
|
||||||
|
result = box_service.open_box(box_id, box_data)
|
||||||
|
return CreateOpenBoxResponse(
|
||||||
|
status_code=201,
|
||||||
|
success=True,
|
||||||
|
open_box=[OpenBoxSchema.from_orm(result)]
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Open box failed: {str(e)}")
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
@router.delete("/boxes/{box_id}/open", response_model=CreateOpenBoxResponse, status_code=200)
|
||||||
|
async def delete_open_box(
|
||||||
|
box_id: str,
|
||||||
|
box_service: BoxService = Depends(get_box_service)
|
||||||
|
) -> CreateOpenBoxResponse:
|
||||||
|
"""Delete an open box by ID."""
|
||||||
|
try:
|
||||||
|
result = box_service.delete_open_box(box_id)
|
||||||
|
return CreateOpenBoxResponse(
|
||||||
|
status_code=201,
|
||||||
|
success=True,
|
||||||
|
open_box=[OpenBoxSchema.from_orm(result)]
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Delete open box failed: {str(e)}")
|
||||||
|
raise HTTPException(status_code=400, detail=str(e)
|
||||||
|
)
|
||||||
|
|
||||||
|
@router.post("/tcgplayer/inventory/add", response_class=StreamingResponse)
|
||||||
|
async def create_inventory_add_file(
|
||||||
|
request: dict, # Just use a dict instead
|
||||||
|
pricing_service: PricingService = Depends(get_pricing_service),
|
||||||
|
):
|
||||||
|
"""Create a new inventory add file for download."""
|
||||||
|
try:
|
||||||
|
# Get IDs directly from the dict
|
||||||
|
open_box_ids = request.get('open_box_ids', [])
|
||||||
|
content = pricing_service.generate_tcgplayer_inventory_update_file_with_pricing(open_box_ids)
|
||||||
|
|
||||||
|
stream = BytesIO(content)
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
iter([stream.getvalue()]),
|
||||||
|
media_type="text/csv",
|
||||||
|
headers={
|
||||||
|
'Content-Disposition': f'attachment; filename="inventory_add_{timestamp}.csv"'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Create inventory add file failed: {str(e)}")
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
@router.get("/tcgplayer/inventory/update", response_class=StreamingResponse)
|
||||||
|
async def create_inventory_update_file(
|
||||||
|
pricing_service: PricingService = Depends(get_pricing_service),
|
||||||
|
):
|
||||||
|
"""Create a new inventory update file for download."""
|
||||||
|
try:
|
||||||
|
content = pricing_service.generate_tcgplayer_inventory_update_file_with_pricing()
|
||||||
|
|
||||||
|
stream = BytesIO(content)
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
|
|
||||||
|
return StreamingResponse(
|
||||||
|
iter([stream.getvalue()]),
|
||||||
|
media_type="text/csv",
|
||||||
|
headers={
|
||||||
|
'Content-Disposition': f'attachment; filename="inventory_update_{timestamp}.csv"'
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Create inventory update file failed: {str(e)}")
|
||||||
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
|
class CookieUpdate(BaseModel):
|
||||||
|
cookies: dict
|
||||||
|
|
||||||
|
# cookies
|
||||||
|
@router.post("/cookies", response_model=dict)
|
||||||
|
async def update_cookies(
|
||||||
|
cookie_data: CookieUpdate
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
# Create cookies directory if it doesn't exist
|
||||||
|
os.makedirs('cookies', exist_ok=True)
|
||||||
|
|
||||||
|
# Save cookies with timestamp
|
||||||
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
|
cookie_path = f'cookies/tcg_cookies.json'
|
||||||
|
|
||||||
|
# Save new cookies
|
||||||
|
with open(cookie_path, 'w') as f:
|
||||||
|
json.dump(cookie_data.cookies, f, indent=2)
|
||||||
|
|
||||||
|
# Update the "latest" cookies file
|
||||||
|
with open('cookies/tcg_cookies_latest.json', 'w') as f:
|
||||||
|
json.dump(cookie_data.cookies, f, indent=2)
|
||||||
|
|
||||||
|
return {"message": "Cookies updated successfully"}
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=500,
|
||||||
|
detail=f"Failed to update cookies: {str(e)}"
|
||||||
|
)
|
11
app/schemas/base.py
Normal file
11
app/schemas/base.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
from pydantic import BaseModel
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
# Base schemas with shared attributes
|
||||||
|
class BaseSchema(BaseModel):
|
||||||
|
date_created: datetime
|
||||||
|
date_modified: datetime
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
from_attributes = True # Allows conversion from SQLAlchemy models
|
66
app/schemas/box.py
Normal file
66
app/schemas/box.py
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
from pydantic import BaseModel, Field, ConfigDict
|
||||||
|
from app.schemas.base import BaseSchema
|
||||||
|
from typing import Optional
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
#BOX
|
||||||
|
class BoxSchema(BaseSchema):
|
||||||
|
product_id: str = Field(..., title="Product ID")
|
||||||
|
type: str = Field(..., title="Box Type (collector, play, draft)")
|
||||||
|
set_code: str = Field(..., title="Set Code")
|
||||||
|
sku: Optional[str] = Field(None, title="SKU")
|
||||||
|
num_cards_expected: Optional[int] = Field(None, title="Number of cards expected")
|
||||||
|
|
||||||
|
model_config = ConfigDict(from_attributes=True)
|
||||||
|
|
||||||
|
# CREATE
|
||||||
|
# REQUEST
|
||||||
|
class CreateBoxRequest(BaseModel):
|
||||||
|
type: str = Field(..., title="Box Type (collector, play, draft)")
|
||||||
|
set_code: str = Field(..., title="Set Code")
|
||||||
|
sku: Optional[str] = Field(None, title="SKU")
|
||||||
|
num_cards_expected: Optional[int] = Field(None, title="Number of cards expected")
|
||||||
|
|
||||||
|
# RESPONSE
|
||||||
|
class CreateBoxResponse(BaseModel):
|
||||||
|
status_code: int = Field(..., title="status_code")
|
||||||
|
success: bool = Field(..., title="success")
|
||||||
|
box: list[BoxSchema] = Field(..., title="box")
|
||||||
|
|
||||||
|
# UPDATE
|
||||||
|
# REQUEST
|
||||||
|
class UpdateBoxRequest(BaseModel):
|
||||||
|
type: Optional[str] = Field(None, title="Box Type (collector, play, draft)")
|
||||||
|
set_code: Optional[str] = Field(None, title="Set Code")
|
||||||
|
sku: Optional[str] = Field(None, title="SKU")
|
||||||
|
num_cards_expected: Optional[int] = Field(None, title="Number of cards expected")
|
||||||
|
|
||||||
|
# GET
|
||||||
|
# RESPONSE
|
||||||
|
class GetBoxResponse(BaseModel):
|
||||||
|
status_code: int = Field(..., title="status_code")
|
||||||
|
success: bool = Field(..., title="success")
|
||||||
|
boxes: list[BoxSchema] = Field(..., title="boxes")
|
||||||
|
|
||||||
|
|
||||||
|
# OPEN BOX
|
||||||
|
class OpenBoxSchema(BaseModel):
|
||||||
|
id: str = Field(..., title="id")
|
||||||
|
num_cards_actual: Optional[int] = Field(None, title="Number of cards actual")
|
||||||
|
date_opened: Optional[datetime] = Field(None, title="Date Opened")
|
||||||
|
|
||||||
|
model_config = ConfigDict(from_attributes=True)
|
||||||
|
|
||||||
|
# CREATE
|
||||||
|
# REQUEST
|
||||||
|
class CreateOpenBoxRequest(BaseModel):
|
||||||
|
product_id: str = Field(..., title="Product ID")
|
||||||
|
file_ids: list[str] = Field(None, title="File IDs")
|
||||||
|
num_cards_actual: Optional[int] = Field(None, title="Number of cards actual")
|
||||||
|
date_opened: Optional [str] = Field(None, title="Date Opened")
|
||||||
|
|
||||||
|
# RESPONSE
|
||||||
|
class CreateOpenBoxResponse(BaseModel):
|
||||||
|
status_code: int = Field(..., title="status_code")
|
||||||
|
success: bool = Field(..., title="success")
|
||||||
|
open_box: list[OpenBoxSchema] = Field(..., title="open_box")
|
51
app/schemas/file.py
Normal file
51
app/schemas/file.py
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
from pydantic import BaseModel, Field, ConfigDict
|
||||||
|
from typing import Optional
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
# FILE
|
||||||
|
class FileSchema(BaseModel):
|
||||||
|
id: str = Field(..., title="id")
|
||||||
|
filename: str = Field(..., title="filename")
|
||||||
|
type: str = Field(..., title="type")
|
||||||
|
filesize_kb: float = Field(..., title="filesize_kb")
|
||||||
|
source: str = Field(..., title="source")
|
||||||
|
status: str = Field(..., title="status")
|
||||||
|
service: Optional[str] = Field(None, title="service")
|
||||||
|
date_created: datetime = Field(..., title="date_created")
|
||||||
|
date_modified: datetime = Field(..., title="date_modified")
|
||||||
|
|
||||||
|
# This enables ORM mode
|
||||||
|
model_config = ConfigDict(from_attributes=True)
|
||||||
|
|
||||||
|
# CREATE
|
||||||
|
# REQUEST
|
||||||
|
class CreateFileRequest(BaseModel):
|
||||||
|
source: str = Field(..., title="source")
|
||||||
|
type: str = Field(..., title="type")
|
||||||
|
# optional
|
||||||
|
service: Optional[str] = Field(None, title="Service")
|
||||||
|
filename: Optional[str] = Field(None, title="Filename")
|
||||||
|
|
||||||
|
# RESPONSE
|
||||||
|
class CreateFileResponse(BaseModel):
|
||||||
|
status_code: int = Field(..., title="status_code")
|
||||||
|
success: bool = Field(..., title="success")
|
||||||
|
files: list[FileSchema] = Field(..., title="files")
|
||||||
|
|
||||||
|
# GET
|
||||||
|
# RESPONSE
|
||||||
|
class GetFileResponse(BaseModel):
|
||||||
|
status_code: int = Field(..., title="status_code")
|
||||||
|
success: bool = Field(..., title="success")
|
||||||
|
files: list[FileSchema] = Field(..., title="files")
|
||||||
|
# QUERY PARAMS
|
||||||
|
class GetFileQueryParams(BaseModel):
|
||||||
|
status: Optional[str] = Field(None, title="status")
|
||||||
|
|
||||||
|
# DELETE
|
||||||
|
# RESPONSE
|
||||||
|
class DeleteFileResponse(BaseModel):
|
||||||
|
status_code: int = Field(..., title="status_code")
|
||||||
|
success: bool = Field(..., title="success")
|
||||||
|
files: list[FileSchema] = Field(..., title="files")
|
5
app/schemas/inventory.py
Normal file
5
app/schemas/inventory.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
|
||||||
|
class UpdateInventoryResponse(BaseModel):
|
||||||
|
success: bool = Field(..., title="Success")
|
19
app/schemas/order.py
Normal file
19
app/schemas/order.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from pydantic import BaseModel, Field, ConfigDict
|
||||||
|
from typing import Optional
|
||||||
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
||||||
|
# FILE
|
||||||
|
class OrderSchema(BaseModel):
|
||||||
|
id: str = Field(..., title="id")
|
||||||
|
filename: str = Field(..., title="filename")
|
||||||
|
type: str = Field(..., title="type")
|
||||||
|
filesize_kb: float = Field(..., title="filesize_kb")
|
||||||
|
source: str = Field(..., title="source")
|
||||||
|
status: str = Field(..., title="status")
|
||||||
|
service: Optional[str] = Field(None, title="service")
|
||||||
|
date_created: datetime = Field(..., title="date_created")
|
||||||
|
date_modified: datetime = Field(..., title="date_modified")
|
||||||
|
|
||||||
|
# This enables ORM mode
|
||||||
|
model_config = ConfigDict(from_attributes=True)
|
0
app/services/__init__.py
Normal file
0
app/services/__init__.py
Normal file
202
app/services/box.py
Normal file
202
app/services/box.py
Normal file
@ -0,0 +1,202 @@
|
|||||||
|
from datetime import datetime
|
||||||
|
from typing import Dict, List
|
||||||
|
from uuid import uuid4
|
||||||
|
from sqlalchemy import or_
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
import logging
|
||||||
|
|
||||||
|
from app.db.models import (
|
||||||
|
Box,
|
||||||
|
File,
|
||||||
|
StagedFileProduct,
|
||||||
|
Product,
|
||||||
|
OpenBoxCard,
|
||||||
|
OpenBox,
|
||||||
|
TCGPlayerGroups,
|
||||||
|
Inventory
|
||||||
|
)
|
||||||
|
from app.db.utils import db_transaction
|
||||||
|
from app.schemas.box import CreateBoxRequest, UpdateBoxRequest, CreateOpenBoxRequest
|
||||||
|
from app.services.inventory import InventoryService
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
VALID_BOX_TYPES = {"collector", "play", "draft", "set", "commander"}
|
||||||
|
|
||||||
|
class BoxService:
|
||||||
|
def __init__(self, db: Session, inventory_service: InventoryService):
|
||||||
|
self.db = db
|
||||||
|
self.inventory_service = inventory_service
|
||||||
|
|
||||||
|
def get_staged_product_data(self, file_ids: List[str]) -> List[StagedFileProduct]:
|
||||||
|
"""Retrieve staged product data for given file IDs."""
|
||||||
|
return self.db.query(StagedFileProduct).filter(
|
||||||
|
StagedFileProduct.file_id.in_(file_ids)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
def aggregate_staged_product_data(self, staged_product_data: List[StagedFileProduct]) -> Dict[Product, int]:
|
||||||
|
"""Aggregate staged product data by product and quantity."""
|
||||||
|
product_data = {}
|
||||||
|
for row in staged_product_data:
|
||||||
|
product = self.db.query(Product).filter(Product.id == row.product_id).first()
|
||||||
|
if product:
|
||||||
|
product_data[product] = product_data.get(product, 0) + row.quantity
|
||||||
|
return product_data
|
||||||
|
|
||||||
|
def add_products_to_open_box(self, open_box: OpenBox, product_data: Dict[Product, int]) -> None:
|
||||||
|
"""Add products to an open box."""
|
||||||
|
for product, quantity in product_data.items():
|
||||||
|
open_box_card = OpenBoxCard(
|
||||||
|
id=str(uuid4()),
|
||||||
|
open_box_id=open_box.id,
|
||||||
|
card_id=product.id,
|
||||||
|
quantity=quantity
|
||||||
|
)
|
||||||
|
self.db.add(open_box_card)
|
||||||
|
|
||||||
|
def validate_box_type(self, box_type: str) -> bool:
|
||||||
|
"""Validate if the box type is supported."""
|
||||||
|
return box_type in VALID_BOX_TYPES
|
||||||
|
|
||||||
|
def validate_set_code(self, set_code: str) -> bool:
|
||||||
|
"""Validate if the set code exists in TCGPlayer groups."""
|
||||||
|
return self.db.query(TCGPlayerGroups).filter(
|
||||||
|
TCGPlayerGroups.abbreviation == set_code
|
||||||
|
).first() is not None
|
||||||
|
|
||||||
|
def create_box(self, create_box_data: CreateBoxRequest) -> Box:
|
||||||
|
"""Create a new box."""
|
||||||
|
if not self.validate_box_type(create_box_data.type):
|
||||||
|
raise ValueError("Invalid box type")
|
||||||
|
if not self.validate_set_code(create_box_data.set_code):
|
||||||
|
raise ValueError("Invalid set code")
|
||||||
|
|
||||||
|
existing_box = self.db.query(Box).filter(
|
||||||
|
Box.type == create_box_data.type,
|
||||||
|
Box.set_code == create_box_data.set_code,
|
||||||
|
or_(Box.sku == create_box_data.sku, Box.sku.is_(None))
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if existing_box:
|
||||||
|
return existing_box, False
|
||||||
|
else:
|
||||||
|
with db_transaction(self.db):
|
||||||
|
product = Product(
|
||||||
|
id=str(uuid4()),
|
||||||
|
type='box',
|
||||||
|
product_line='mtg'
|
||||||
|
)
|
||||||
|
box = Box(
|
||||||
|
product_id=product.id,
|
||||||
|
type=create_box_data.type,
|
||||||
|
set_code=create_box_data.set_code,
|
||||||
|
sku=create_box_data.sku,
|
||||||
|
num_cards_expected=create_box_data.num_cards_expected
|
||||||
|
)
|
||||||
|
self.db.add(product)
|
||||||
|
self.db.add(box)
|
||||||
|
|
||||||
|
return box, True
|
||||||
|
|
||||||
|
def update_box(self, box_id: str, update_box_data: UpdateBoxRequest) -> Box:
|
||||||
|
"""Update an existing box."""
|
||||||
|
box = self.db.query(Box).filter(Box.product_id == box_id).first()
|
||||||
|
if not box:
|
||||||
|
raise ValueError("Box not found")
|
||||||
|
|
||||||
|
update_data = update_box_data.dict(exclude_unset=True)
|
||||||
|
|
||||||
|
# Validate box type if it's being updated
|
||||||
|
if "type" in update_data and update_data["type"] is not None:
|
||||||
|
if not self.validate_box_type(update_data["type"]):
|
||||||
|
raise ValueError(f"Invalid box type: {update_data['type']}")
|
||||||
|
|
||||||
|
# Validate set code if it's being updated
|
||||||
|
if "set_code" in update_data and update_data["set_code"] is not None:
|
||||||
|
if not self.validate_set_code(update_data["set_code"]):
|
||||||
|
raise ValueError(f"Invalid set code: {update_data['set_code']}")
|
||||||
|
|
||||||
|
with db_transaction(self.db):
|
||||||
|
for field, value in update_data.items():
|
||||||
|
if value is not None: # Only update non-None values
|
||||||
|
setattr(box, field, value)
|
||||||
|
|
||||||
|
return box
|
||||||
|
|
||||||
|
def delete_box(self, box_id: str) -> Box:
|
||||||
|
"""Delete a box."""
|
||||||
|
box = self.db.query(Box).filter(Box.product_id == box_id).first()
|
||||||
|
product = self.db.query(Product).filter(Product.id == box_id).first()
|
||||||
|
if not box:
|
||||||
|
raise ValueError("Box not found")
|
||||||
|
|
||||||
|
with db_transaction(self.db):
|
||||||
|
self.db.delete(box)
|
||||||
|
self.db.delete(product)
|
||||||
|
return box
|
||||||
|
|
||||||
|
def open_box(self, box_id: str, box_data: CreateOpenBoxRequest) -> OpenBox:
|
||||||
|
"""Open a box and process its contents."""
|
||||||
|
box = self.db.query(Box).filter(Box.product_id == box_id).first()
|
||||||
|
if not box:
|
||||||
|
raise ValueError("Box not found")
|
||||||
|
|
||||||
|
with db_transaction(self.db):
|
||||||
|
open_box = OpenBox(
|
||||||
|
id=str(uuid4()),
|
||||||
|
product_id=box_id,
|
||||||
|
num_cards_actual=box_data.num_cards_actual,
|
||||||
|
date_opened=datetime.strptime(box_data.date_opened, "%Y-%m-%d") if box_data.date_opened else datetime.now()
|
||||||
|
)
|
||||||
|
self.db.add(open_box)
|
||||||
|
|
||||||
|
staged_product_data = self.get_staged_product_data(box_data.file_ids)
|
||||||
|
product_data = self.aggregate_staged_product_data(staged_product_data)
|
||||||
|
self.inventory_service.process_staged_products(product_data)
|
||||||
|
self.add_products_to_open_box(open_box, product_data)
|
||||||
|
|
||||||
|
# Update file box IDs
|
||||||
|
self.db.query(File).filter(File.id.in_(box_data.file_ids)).update(
|
||||||
|
{"box_id": open_box.id}, synchronize_session=False
|
||||||
|
)
|
||||||
|
|
||||||
|
return open_box
|
||||||
|
|
||||||
|
def delete_open_box(self, box_id: str) -> OpenBox:
|
||||||
|
# Fetch open box and related cards in one query
|
||||||
|
open_box = (
|
||||||
|
self.db.query(OpenBox)
|
||||||
|
.filter(OpenBox.id == box_id)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if not open_box:
|
||||||
|
raise ValueError("Open box not found")
|
||||||
|
|
||||||
|
# Get all open box cards and related inventory items in one query
|
||||||
|
open_box_cards = (
|
||||||
|
self.db.query(OpenBoxCard, Inventory)
|
||||||
|
.join(
|
||||||
|
Inventory,
|
||||||
|
OpenBoxCard.card_id == Inventory.product_id
|
||||||
|
)
|
||||||
|
.filter(OpenBoxCard.open_box_id == open_box.id)
|
||||||
|
.all()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process inventory adjustments
|
||||||
|
for open_box_card, inventory_item in open_box_cards:
|
||||||
|
if open_box_card.quantity > inventory_item.quantity:
|
||||||
|
raise ValueError("Open box quantity exceeds inventory quantity")
|
||||||
|
|
||||||
|
inventory_item.quantity -= open_box_card.quantity
|
||||||
|
if inventory_item.quantity == 0:
|
||||||
|
self.db.delete(inventory_item)
|
||||||
|
|
||||||
|
# Delete the open box card
|
||||||
|
self.db.delete(open_box_card)
|
||||||
|
|
||||||
|
# Execute all database operations in a single transaction
|
||||||
|
with db_transaction(self.db):
|
||||||
|
self.db.delete(open_box)
|
||||||
|
|
||||||
|
return open_box
|
156
app/services/file.py
Normal file
156
app/services/file.py
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from typing import Optional, List, Dict, Any
|
||||||
|
from uuid import uuid4
|
||||||
|
import csv
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
from app.db.utils import db_transaction
|
||||||
|
from app.db.models import File, StagedFileProduct
|
||||||
|
from app.schemas.file import CreateFileRequest
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class FileConfig:
|
||||||
|
"""Configuration constants for file processing"""
|
||||||
|
TEMP_DIR = os.path.join(os.getcwd(), 'app/' + 'temp')
|
||||||
|
|
||||||
|
MANABOX_HEADERS = [
|
||||||
|
'Name', 'Set code', 'Set name', 'Collector number', 'Foil',
|
||||||
|
'Rarity', 'Quantity', 'ManaBox ID', 'Scryfall ID', 'Purchase price',
|
||||||
|
'Misprint', 'Altered', 'Condition', 'Language', 'Purchase price currency'
|
||||||
|
]
|
||||||
|
|
||||||
|
SOURCES = {
|
||||||
|
"manabox": {
|
||||||
|
"required_headers": MANABOX_HEADERS,
|
||||||
|
"allowed_extensions": ['.csv'],
|
||||||
|
"allowed_types": ['scan_export_common', 'scan_export_rare']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
class FileValidationError(Exception):
|
||||||
|
"""Custom exception for file validation errors"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
class FileService:
|
||||||
|
def __init__(self, db: Session):
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
def get_config(self, source: str) -> Dict[str, Any]:
|
||||||
|
"""Get configuration for a specific source"""
|
||||||
|
config = FileConfig.SOURCES.get(source)
|
||||||
|
if not config:
|
||||||
|
raise FileValidationError(f"Unsupported source: {source}")
|
||||||
|
return config
|
||||||
|
|
||||||
|
def validate_file_extension(self, filename: str, config: Dict[str, Any]) -> bool:
|
||||||
|
"""Validate file extension against allowed extensions"""
|
||||||
|
return any(filename.endswith(ext) for ext in config["allowed_extensions"])
|
||||||
|
|
||||||
|
def validate_file_type(self, metadata: CreateFileRequest, config: Dict[str, Any]) -> bool:
|
||||||
|
"""Validate file type against allowed types"""
|
||||||
|
return metadata.type in config["allowed_types"]
|
||||||
|
|
||||||
|
def validate_csv(self, content: bytes, required_headers: Optional[List[str]] = None) -> bool:
|
||||||
|
"""Validate CSV content and headers"""
|
||||||
|
try:
|
||||||
|
csv_text = content.decode('utf-8')
|
||||||
|
csv_file = StringIO(csv_text)
|
||||||
|
csv_reader = csv.reader(csv_file)
|
||||||
|
|
||||||
|
if required_headers:
|
||||||
|
headers = next(csv_reader, None)
|
||||||
|
if not headers or not all(header in headers for header in required_headers):
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
except (UnicodeDecodeError, csv.Error) as e:
|
||||||
|
logger.error(f"CSV validation error: {str(e)}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def validate_file_content(self, content: bytes, metadata: CreateFileRequest, config: Dict[str, Any]) -> bool:
|
||||||
|
"""Validate file content based on file type"""
|
||||||
|
extension = os.path.splitext(metadata.filename)[1].lower()
|
||||||
|
if extension == '.csv':
|
||||||
|
return self.validate_csv(content, config.get("required_headers"))
|
||||||
|
return False
|
||||||
|
|
||||||
|
def validate_file(self, content: bytes, metadata: CreateFileRequest) -> bool:
|
||||||
|
"""Validate file against all criteria"""
|
||||||
|
config = self.get_config(metadata.source)
|
||||||
|
|
||||||
|
if not self.validate_file_extension(metadata.filename, config):
|
||||||
|
raise FileValidationError("Invalid file extension")
|
||||||
|
|
||||||
|
if not self.validate_file_type(metadata, config):
|
||||||
|
raise FileValidationError("Invalid file type")
|
||||||
|
|
||||||
|
if not self.validate_file_content(content, metadata, config):
|
||||||
|
raise FileValidationError("Invalid file content or headers")
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def create_file(self, content: bytes, metadata: CreateFileRequest) -> File:
|
||||||
|
"""Create a new file record and save the file"""
|
||||||
|
with db_transaction(self.db):
|
||||||
|
file = File(
|
||||||
|
id=str(uuid4()),
|
||||||
|
filename=metadata.filename,
|
||||||
|
filepath=os.path.join(FileConfig.TEMP_DIR, metadata.filename),
|
||||||
|
type=metadata.type,
|
||||||
|
source=metadata.source,
|
||||||
|
filesize_kb=round(len(content) / 1024, 2),
|
||||||
|
status='pending',
|
||||||
|
service=metadata.service
|
||||||
|
)
|
||||||
|
self.db.add(file)
|
||||||
|
|
||||||
|
os.makedirs(FileConfig.TEMP_DIR, exist_ok=True)
|
||||||
|
with open(file.filepath, 'wb') as f:
|
||||||
|
f.write(content)
|
||||||
|
|
||||||
|
return file
|
||||||
|
|
||||||
|
def get_file(self, file_id: str) -> File:
|
||||||
|
"""Get a file by ID"""
|
||||||
|
file = self.db.query(File).filter(File.id == file_id).first()
|
||||||
|
if not file:
|
||||||
|
raise FileValidationError(f"File with id {file_id} not found")
|
||||||
|
return file
|
||||||
|
|
||||||
|
def get_files(self, status: Optional[str] = None) -> List[File]:
|
||||||
|
"""Get all files, optionally filtered by status"""
|
||||||
|
query = self.db.query(File)
|
||||||
|
if status:
|
||||||
|
query = query.filter(File.status == status)
|
||||||
|
return query.all()
|
||||||
|
|
||||||
|
def get_staged_products(self, file_id: str) -> List[StagedFileProduct]:
|
||||||
|
"""Get staged products for a file"""
|
||||||
|
return self.db.query(StagedFileProduct).filter(
|
||||||
|
StagedFileProduct.file_id == file_id
|
||||||
|
).all()
|
||||||
|
|
||||||
|
def delete_file(self, file_id: str) -> File:
|
||||||
|
"""Mark a file as deleted and remove associated staged products"""
|
||||||
|
file = self.get_file(file_id)
|
||||||
|
staged_products = self.get_staged_products(file_id)
|
||||||
|
|
||||||
|
with db_transaction(self.db):
|
||||||
|
file.status = 'deleted'
|
||||||
|
for staged_product in staged_products:
|
||||||
|
self.db.delete(staged_product)
|
||||||
|
|
||||||
|
return file
|
||||||
|
|
||||||
|
def get_file_content(self, file_id: str) -> bytes:
|
||||||
|
"""Get the content of a file"""
|
||||||
|
file = self.get_file(file_id)
|
||||||
|
try:
|
||||||
|
with open(file.filepath, 'rb') as f:
|
||||||
|
return f.read()
|
||||||
|
except IOError as e:
|
||||||
|
logger.error(f"Error reading file {file_id}: {str(e)}")
|
||||||
|
raise FileValidationError(f"Could not read file content for {file_id}")
|
90
app/services/inventory.py
Normal file
90
app/services/inventory.py
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
|
||||||
|
from app.db.models import Product, Inventory
|
||||||
|
from app.schemas.inventory import UpdateInventoryResponse
|
||||||
|
from app.db.utils import db_transaction
|
||||||
|
|
||||||
|
|
||||||
|
class InventoryService:
|
||||||
|
"""Service class for managing product inventory operations."""
|
||||||
|
|
||||||
|
def __init__(self, db: Session) -> None:
|
||||||
|
"""
|
||||||
|
Initialize the InventoryService.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: SQLAlchemy database session
|
||||||
|
"""
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
def add_inventory(self, product: Product, quantity: int) -> Inventory:
|
||||||
|
"""
|
||||||
|
Add or update inventory for a product.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
product: Product model instance
|
||||||
|
quantity: Quantity to add to inventory
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Updated Inventory model instance
|
||||||
|
"""
|
||||||
|
inventory = self.db.query(Inventory).filter(
|
||||||
|
Inventory.product_id == product.id
|
||||||
|
).first()
|
||||||
|
|
||||||
|
if inventory is None:
|
||||||
|
inventory = Inventory(
|
||||||
|
product_id=product.id,
|
||||||
|
quantity=quantity
|
||||||
|
)
|
||||||
|
self.db.add(inventory)
|
||||||
|
else:
|
||||||
|
inventory.quantity += quantity
|
||||||
|
|
||||||
|
return inventory
|
||||||
|
|
||||||
|
def process_staged_products(
|
||||||
|
self,
|
||||||
|
product_data: Dict[Product, int]
|
||||||
|
) -> UpdateInventoryResponse:
|
||||||
|
"""
|
||||||
|
Process multiple products and update their inventory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
product_data: Dictionary mapping Products to their quantities
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response indicating success status
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with db_transaction(self.db):
|
||||||
|
for product, quantity in product_data.items():
|
||||||
|
self.add_inventory(product, quantity)
|
||||||
|
return UpdateInventoryResponse(success=True)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
return UpdateInventoryResponse(success=False)
|
||||||
|
|
||||||
|
def add_sealed_box_to_inventory(
|
||||||
|
self,
|
||||||
|
product: Product,
|
||||||
|
quantity: int
|
||||||
|
) -> UpdateInventoryResponse:
|
||||||
|
"""
|
||||||
|
Add sealed box inventory for a single product.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
product: Product model instance
|
||||||
|
quantity: Quantity to add to inventory
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Response indicating success status
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with db_transaction(self.db):
|
||||||
|
self.add_inventory(product, quantity)
|
||||||
|
return UpdateInventoryResponse(success=True)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
return UpdateInventoryResponse(success=False)
|
219
app/services/pricing.py
Normal file
219
app/services/pricing.py
Normal file
@ -0,0 +1,219 @@
|
|||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from app.db.models import File, CardTCGPlayer, Price
|
||||||
|
from app.services.util._dataframe import TCGPlayerPricingRow, DataframeUtil
|
||||||
|
from app.services.file import FileService
|
||||||
|
from app.services.tcgplayer import TCGPlayerService
|
||||||
|
from uuid import uuid4
|
||||||
|
from app.db.utils import db_transaction
|
||||||
|
from typing import List, Dict
|
||||||
|
import pandas as pd
|
||||||
|
import logging
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class PricingService:
|
||||||
|
def __init__(self, db: Session, file_service: FileService, tcgplayer_service: TCGPlayerService):
|
||||||
|
self.db = db
|
||||||
|
self.file_service = file_service
|
||||||
|
self.tcgplayer_service = tcgplayer_service
|
||||||
|
self.df_util = DataframeUtil()
|
||||||
|
|
||||||
|
# function for taking a tcgplayer pricing export with all set ids and loading it into the price table
|
||||||
|
# can be run as needed or scheduled
|
||||||
|
def get_pricing_export_content(self, file: File = None) -> bytes:
|
||||||
|
if file:
|
||||||
|
file_content = self.file_service.get_file_content(file.id)
|
||||||
|
else:
|
||||||
|
file = self.tcgplayer_service.get_pricing_export_for_all_products()
|
||||||
|
file_content = self.file_service.get_file_content(file.id)
|
||||||
|
return file_content
|
||||||
|
|
||||||
|
def load_pricing_csv_content_to_db(self, file_content: bytes):
|
||||||
|
try:
|
||||||
|
if not file_content:
|
||||||
|
raise ValueError("No file content provided")
|
||||||
|
|
||||||
|
price_types = {
|
||||||
|
"tcg_market_price": "tcg_market_price",
|
||||||
|
"tcg_direct_low": "tcg_direct_low",
|
||||||
|
"tcg_low_price_with_shipping": "tcg_low_price_with_shipping",
|
||||||
|
"tcg_low_price": "tcg_low_price",
|
||||||
|
"tcg_marketplace_price": "listed_price"
|
||||||
|
}
|
||||||
|
|
||||||
|
required_columns = ["tcgplayer_id"] + list(price_types.keys())
|
||||||
|
df = self.df_util.csv_bytes_to_df(file_content)
|
||||||
|
|
||||||
|
# Validate columns
|
||||||
|
missing_columns = set(required_columns) - set(df.columns)
|
||||||
|
if missing_columns:
|
||||||
|
raise ValueError(f"Missing required columns: {missing_columns}")
|
||||||
|
|
||||||
|
# Process in true batches
|
||||||
|
for i in range(0, len(df), 1000):
|
||||||
|
batch = df.iloc[i:i+1000]
|
||||||
|
pricing_rows = [TCGPlayerPricingRow(row) for _, row in batch.iterrows()]
|
||||||
|
|
||||||
|
# Query cards for this batch only
|
||||||
|
tcgplayer_ids = [row.tcgplayer_id for row in pricing_rows]
|
||||||
|
batch_cards = self.db.query(CardTCGPlayer).filter(
|
||||||
|
CardTCGPlayer.tcgplayer_id.in_(tcgplayer_ids)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
existing_cards = {card.tcgplayer_id: card for card in batch_cards}
|
||||||
|
|
||||||
|
new_prices = []
|
||||||
|
for row in pricing_rows:
|
||||||
|
if row.tcgplayer_id not in existing_cards:
|
||||||
|
continue
|
||||||
|
|
||||||
|
card = existing_cards[row.tcgplayer_id]
|
||||||
|
row_prices = [
|
||||||
|
Price(
|
||||||
|
id=str(uuid4()),
|
||||||
|
product_id=card.product_id,
|
||||||
|
marketplace_id=None,
|
||||||
|
type=price_type, # Added missing price_type
|
||||||
|
price=getattr(row, col_name)
|
||||||
|
)
|
||||||
|
for col_name, price_type in price_types.items()
|
||||||
|
if getattr(row, col_name, None) is not None and getattr(row, col_name) > 0
|
||||||
|
]
|
||||||
|
new_prices.extend(row_prices)
|
||||||
|
|
||||||
|
# Save each batch separately
|
||||||
|
if new_prices:
|
||||||
|
with db_transaction(self.db):
|
||||||
|
self.db.bulk_save_objects(new_prices)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
raise e # Consider adding logging here
|
||||||
|
|
||||||
|
|
||||||
|
def cron_load_prices(self, file: File = None):
|
||||||
|
file_content = self.get_pricing_export_content(file)
|
||||||
|
self.load_pricing_csv_content_to_db(file_content)
|
||||||
|
|
||||||
|
def get_all_prices_for_products(self, product_ids: List[str]) -> Dict[str, Dict[str, float]]:
|
||||||
|
all_prices = self.db.query(Price).filter(
|
||||||
|
Price.product_id.in_(product_ids)
|
||||||
|
).all()
|
||||||
|
|
||||||
|
price_lookup = {}
|
||||||
|
for price in all_prices:
|
||||||
|
if price.product_id not in price_lookup:
|
||||||
|
price_lookup[price.product_id] = {}
|
||||||
|
price_lookup[price.product_id][price.type] = price.price
|
||||||
|
return price_lookup
|
||||||
|
|
||||||
|
def apply_price_to_df_columns(self, row: pd.Series, price_lookup: Dict[str, Dict[str, float]]) -> pd.Series:
|
||||||
|
product_prices = price_lookup.get(row['product_id'], {})
|
||||||
|
for price_type, price in product_prices.items():
|
||||||
|
row[price_type] = price
|
||||||
|
return row
|
||||||
|
|
||||||
|
def default_pricing_algo(self, row: pd.Series) -> pd.Series:
|
||||||
|
"""Default pricing algorithm with complex pricing rules"""
|
||||||
|
tcg_low = row.get('tcg_low_price')
|
||||||
|
tcg_low_shipping = row.get('tcg_low_price_with_shipping')
|
||||||
|
|
||||||
|
if pd.isna(tcg_low) or pd.isna(tcg_low_shipping):
|
||||||
|
logger.warning(f"Missing pricing data for row: {row}")
|
||||||
|
row['new_price'] = None
|
||||||
|
return row
|
||||||
|
|
||||||
|
# Apply pricing rules
|
||||||
|
if tcg_low < 0.35:
|
||||||
|
new_price = 0.35
|
||||||
|
elif tcg_low < 5 or tcg_low_shipping < 5:
|
||||||
|
new_price = round(tcg_low * 1.25, 2)
|
||||||
|
elif tcg_low_shipping > 25:
|
||||||
|
new_price = round(tcg_low_shipping * 1.025, 2)
|
||||||
|
else:
|
||||||
|
new_price = round(tcg_low_shipping * 1.10, 2)
|
||||||
|
|
||||||
|
row['new_price'] = new_price
|
||||||
|
return row
|
||||||
|
|
||||||
|
def apply_pricing_algo(self, row: pd.Series, pricing_algo: callable = None) -> pd.Series:
|
||||||
|
"""Modified to handle the pricing algorithm as an instance method"""
|
||||||
|
if pricing_algo is None:
|
||||||
|
pricing_algo = self.default_pricing_algo
|
||||||
|
return pricing_algo(row)
|
||||||
|
|
||||||
|
def generate_tcgplayer_inventory_update_file_with_pricing(self, open_box_ids: List[str] = None) -> bytes:
|
||||||
|
desired_columns = [
|
||||||
|
'TCGplayer Id', 'Product Line', 'Set Name', 'Product Name',
|
||||||
|
'Title', 'Number', 'Rarity', 'Condition', 'TCG Market Price',
|
||||||
|
'TCG Direct Low', 'TCG Low Price With Shipping', 'TCG Low Price',
|
||||||
|
'Total Quantity', 'Add to Quantity', 'TCG Marketplace Price', 'Photo URL'
|
||||||
|
]
|
||||||
|
|
||||||
|
if open_box_ids:
|
||||||
|
# Get initial dataframe
|
||||||
|
update_type = 'add'
|
||||||
|
df = self.tcgplayer_service.open_box_cards_to_tcgplayer_inventory_df(open_box_ids)
|
||||||
|
else:
|
||||||
|
update_type = 'update'
|
||||||
|
df = self.tcgplayer_service.get_inventory_df('live')
|
||||||
|
# remove rows with total quantity of 0
|
||||||
|
df = df[df['total_quantity'] != 0]
|
||||||
|
tcgplayer_ids = df['tcgplayer_id'].unique().tolist()
|
||||||
|
|
||||||
|
# Make a single query to get all matching records
|
||||||
|
product_id_mapping = {
|
||||||
|
card.tcgplayer_id: card.product_id
|
||||||
|
for card in self.db.query(CardTCGPlayer)
|
||||||
|
.filter(CardTCGPlayer.tcgplayer_id.in_(tcgplayer_ids))
|
||||||
|
.all()
|
||||||
|
}
|
||||||
|
|
||||||
|
# Map the ids using the dictionary
|
||||||
|
df['product_id'] = df['tcgplayer_id'].map(product_id_mapping)
|
||||||
|
|
||||||
|
price_lookup = self.get_all_prices_for_products(df['product_id'].unique())
|
||||||
|
|
||||||
|
# Apply price columns
|
||||||
|
df = df.apply(lambda row: self.apply_price_to_df_columns(row, price_lookup), axis=1)
|
||||||
|
|
||||||
|
# Apply pricing algorithm
|
||||||
|
df = df.apply(self.apply_pricing_algo, axis=1)
|
||||||
|
|
||||||
|
# if update type is update, remove rows where new_price == listed_price
|
||||||
|
if update_type == 'update':
|
||||||
|
df = df[df['new_price'] != df['listed_price']]
|
||||||
|
|
||||||
|
# Set marketplace price
|
||||||
|
df['TCG Marketplace Price'] = df['new_price']
|
||||||
|
|
||||||
|
column_mapping = {
|
||||||
|
'tcgplayer_id': 'TCGplayer Id',
|
||||||
|
'product_line': 'Product Line',
|
||||||
|
'set_name': 'Set Name',
|
||||||
|
'product_name': 'Product Name',
|
||||||
|
'title': 'Title',
|
||||||
|
'number': 'Number',
|
||||||
|
'rarity': 'Rarity',
|
||||||
|
'condition': 'Condition',
|
||||||
|
'tcg_market_price': 'TCG Market Price',
|
||||||
|
'tcg_direct_low': 'TCG Direct Low',
|
||||||
|
'tcg_low_price_with_shipping': 'TCG Low Price With Shipping',
|
||||||
|
'tcg_low_price': 'TCG Low Price',
|
||||||
|
'total_quantity': 'Total Quantity',
|
||||||
|
'add_to_quantity': 'Add to Quantity',
|
||||||
|
'photo_url': 'Photo URL'
|
||||||
|
}
|
||||||
|
df = df.rename(columns=column_mapping)
|
||||||
|
|
||||||
|
# Now do your column selection
|
||||||
|
df = df[desired_columns]
|
||||||
|
|
||||||
|
# remove any rows with no price
|
||||||
|
#df = df[df['TCG Marketplace Price'] != 0]
|
||||||
|
#df = df[df['TCG Marketplace Price'].notna()]
|
||||||
|
|
||||||
|
# Convert to CSV bytes
|
||||||
|
csv_bytes = self.df_util.df_to_csv_bytes(df)
|
||||||
|
|
||||||
|
return csv_bytes
|
183
app/services/product.py
Normal file
183
app/services/product.py
Normal file
@ -0,0 +1,183 @@
|
|||||||
|
from logging import getLogger
|
||||||
|
from uuid import uuid4
|
||||||
|
from pandas import DataFrame
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from app.db.utils import db_transaction
|
||||||
|
from app.db.models import CardManabox, CardTCGPlayer, StagedFileProduct, TCGPlayerGroups
|
||||||
|
from app.services.util._dataframe import ManaboxRow, DataframeUtil
|
||||||
|
from app.services.file import FileService
|
||||||
|
from app.services.tcgplayer import TCGPlayerService
|
||||||
|
from app.services.storage import StorageService
|
||||||
|
|
||||||
|
logger = getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ProductService:
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
db: Session,
|
||||||
|
file_service: FileService,
|
||||||
|
tcgplayer_service: TCGPlayerService,
|
||||||
|
storage_service: StorageService,
|
||||||
|
):
|
||||||
|
self.db = db
|
||||||
|
self.file_service = file_service
|
||||||
|
self.tcgplayer_service = tcgplayer_service
|
||||||
|
self.storage_service = storage_service
|
||||||
|
self.df_util = DataframeUtil()
|
||||||
|
|
||||||
|
def create_staged_file_product(
|
||||||
|
self, file_id: str, card_manabox: CardManabox, row: ManaboxRow
|
||||||
|
) -> StagedFileProduct:
|
||||||
|
"""Create a staged file product entry.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_id: The ID of the file being processed
|
||||||
|
card_manabox: The Manabox card details
|
||||||
|
row: The row data from the Manabox file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The created staged file product
|
||||||
|
"""
|
||||||
|
staged_product = StagedFileProduct(
|
||||||
|
id=str(uuid4()),
|
||||||
|
file_id=file_id,
|
||||||
|
product_id=card_manabox.product_id,
|
||||||
|
quantity=row.quantity,
|
||||||
|
)
|
||||||
|
with db_transaction(self.db):
|
||||||
|
self.db.add(staged_product)
|
||||||
|
return staged_product
|
||||||
|
|
||||||
|
def create_card_manabox(
|
||||||
|
self, manabox_row: ManaboxRow, card_tcgplayer: CardTCGPlayer
|
||||||
|
) -> CardManabox:
|
||||||
|
"""Create a Manabox card entry.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
manabox_row: The row data from the Manabox file
|
||||||
|
card_tcgplayer: The TCGPlayer card details
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The created Manabox card
|
||||||
|
"""
|
||||||
|
if not card_tcgplayer:
|
||||||
|
group = (
|
||||||
|
self.db.query(TCGPlayerGroups)
|
||||||
|
.filter(TCGPlayerGroups.abbreviation == manabox_row.set_code)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
card_tcgplayer = self.tcgplayer_service.get_card_tcgplayer_from_manabox_row(
|
||||||
|
manabox_row, group.group_id
|
||||||
|
)
|
||||||
|
|
||||||
|
card_manabox = CardManabox(
|
||||||
|
product_id=card_tcgplayer.product_id,
|
||||||
|
name=manabox_row.name,
|
||||||
|
set_code=manabox_row.set_code,
|
||||||
|
set_name=manabox_row.set_name,
|
||||||
|
collector_number=manabox_row.collector_number,
|
||||||
|
foil=manabox_row.foil,
|
||||||
|
rarity=manabox_row.rarity,
|
||||||
|
manabox_id=manabox_row.manabox_id,
|
||||||
|
scryfall_id=manabox_row.scryfall_id,
|
||||||
|
condition=manabox_row.condition,
|
||||||
|
language=manabox_row.language,
|
||||||
|
)
|
||||||
|
|
||||||
|
with db_transaction(self.db):
|
||||||
|
self.db.add(card_manabox)
|
||||||
|
return card_manabox
|
||||||
|
|
||||||
|
def card_manabox_lookup_create_if_not_exist(
|
||||||
|
self, manabox_row: ManaboxRow
|
||||||
|
) -> CardManabox:
|
||||||
|
"""Lookup a Manabox card or create it if it doesn't exist.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
manabox_row: The row data from the Manabox file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The existing or newly created Manabox card
|
||||||
|
"""
|
||||||
|
card_manabox = (
|
||||||
|
self.db.query(CardManabox)
|
||||||
|
.filter(
|
||||||
|
CardManabox.name == manabox_row.name,
|
||||||
|
CardManabox.set_code == manabox_row.set_code,
|
||||||
|
CardManabox.set_name == manabox_row.set_name,
|
||||||
|
CardManabox.collector_number == manabox_row.collector_number,
|
||||||
|
CardManabox.foil == manabox_row.foil,
|
||||||
|
CardManabox.rarity == manabox_row.rarity,
|
||||||
|
CardManabox.manabox_id == manabox_row.manabox_id,
|
||||||
|
CardManabox.scryfall_id == manabox_row.scryfall_id,
|
||||||
|
CardManabox.condition == manabox_row.condition,
|
||||||
|
CardManabox.language == manabox_row.language,
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
|
if not card_manabox:
|
||||||
|
logger.debug(f"card_manabox not found for row: {manabox_row.__dict__}")
|
||||||
|
group = (
|
||||||
|
self.db.query(TCGPlayerGroups)
|
||||||
|
.filter(TCGPlayerGroups.abbreviation == manabox_row.set_code)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
if not group:
|
||||||
|
logger.error(f"Group not found for set code: {manabox_row.set_code}")
|
||||||
|
logger.error(f"Row data: {manabox_row.__dict__}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
card_tcgplayer = self.tcgplayer_service.get_card_tcgplayer_from_manabox_row(
|
||||||
|
manabox_row, group.group_id
|
||||||
|
)
|
||||||
|
if not card_tcgplayer:
|
||||||
|
logger.error(f"Card not found for row: {manabox_row.__dict__}")
|
||||||
|
return None
|
||||||
|
card_manabox = self.create_card_manabox(manabox_row, card_tcgplayer)
|
||||||
|
|
||||||
|
return card_manabox
|
||||||
|
|
||||||
|
def process_manabox_df(self, df: DataFrame, file_id: str) -> None:
|
||||||
|
"""Process a Manabox dataframe.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
df: The Manabox dataframe to process
|
||||||
|
file_id: The ID of the file being processed
|
||||||
|
"""
|
||||||
|
for _, row in df.iterrows():
|
||||||
|
manabox_row = ManaboxRow(row)
|
||||||
|
card_manabox = self.card_manabox_lookup_create_if_not_exist(manabox_row)
|
||||||
|
if not card_manabox:
|
||||||
|
continue
|
||||||
|
self.create_staged_file_product(file_id, card_manabox, row)
|
||||||
|
|
||||||
|
def bg_process_manabox_file(self, file_id: str) -> None:
|
||||||
|
"""Process a Manabox file in the background.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_id: The ID of the file to process
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
Exception: If there's an error during processing
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
manabox_file = self.file_service.get_file(file_id)
|
||||||
|
manabox_df = self.df_util.file_to_df(manabox_file)
|
||||||
|
self.process_manabox_df(manabox_df, file_id)
|
||||||
|
|
||||||
|
with db_transaction(self.db):
|
||||||
|
manabox_file.status = "completed"
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
with db_transaction(self.db):
|
||||||
|
manabox_file.status = "error"
|
||||||
|
raise e
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.storage_service.store_staged_products_for_file(file_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error creating storage records: {str(e)}")
|
||||||
|
raise e
|
256
app/services/storage.py
Normal file
256
app/services/storage.py
Normal file
@ -0,0 +1,256 @@
|
|||||||
|
from uuid import uuid4
|
||||||
|
from typing import List, TypedDict
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
from app.db.utils import db_transaction
|
||||||
|
from app.db.models import (
|
||||||
|
Warehouse,
|
||||||
|
User,
|
||||||
|
StagedFileProduct,
|
||||||
|
StorageBlock,
|
||||||
|
ProductBlock,
|
||||||
|
File,
|
||||||
|
CardTCGPlayer
|
||||||
|
)
|
||||||
|
|
||||||
|
class ProductAttributes(TypedDict):
|
||||||
|
"""Attributes for a product to be stored."""
|
||||||
|
product_id: str
|
||||||
|
card_number: str
|
||||||
|
|
||||||
|
class StorageService:
|
||||||
|
"""Service for managing product storage and warehouse operations."""
|
||||||
|
|
||||||
|
def __init__(self, db: Session) -> None:
|
||||||
|
"""Initialize the storage service.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: SQLAlchemy database session
|
||||||
|
"""
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
def get_or_create_user(self, username: str) -> User:
|
||||||
|
"""Get an existing user or create a new one if not found.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
username: Username to look up or create
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The existing or newly created User
|
||||||
|
"""
|
||||||
|
user = self.db.query(User).filter(User.username == username).first()
|
||||||
|
if user is None:
|
||||||
|
user = User(
|
||||||
|
id=str(uuid4()),
|
||||||
|
username=username
|
||||||
|
)
|
||||||
|
with db_transaction(self.db):
|
||||||
|
self.db.add(user)
|
||||||
|
return user
|
||||||
|
|
||||||
|
def get_or_create_warehouse(self) -> Warehouse:
|
||||||
|
"""Get the default warehouse or create it if it doesn't exist.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The existing or newly created Warehouse
|
||||||
|
"""
|
||||||
|
warehouse = self.db.query(Warehouse).first()
|
||||||
|
user = self.get_or_create_user('admin')
|
||||||
|
if warehouse is None:
|
||||||
|
warehouse = Warehouse(
|
||||||
|
id=str(uuid4()),
|
||||||
|
user_id=user.id
|
||||||
|
)
|
||||||
|
with db_transaction(self.db):
|
||||||
|
self.db.add(warehouse)
|
||||||
|
return warehouse
|
||||||
|
|
||||||
|
def get_staged_product(self, file_id: str) -> List[StagedFileProduct]:
|
||||||
|
"""Get all staged products for a given file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_id: ID of the file to get staged products for
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of staged products
|
||||||
|
"""
|
||||||
|
return self.db.query(StagedFileProduct).filter(
|
||||||
|
StagedFileProduct.file_id == file_id
|
||||||
|
).all()
|
||||||
|
|
||||||
|
def get_storage_block_name(self, warehouse: Warehouse, file_id: str) -> str:
|
||||||
|
"""Generate a unique name for a new storage block.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
warehouse: Warehouse the block belongs to
|
||||||
|
file_id: ID of the file being processed
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Unique storage block name
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If no file is found with the given ID
|
||||||
|
"""
|
||||||
|
current_file = self.db.query(File).filter(File.id == file_id).first()
|
||||||
|
if not current_file:
|
||||||
|
raise ValueError(f"No file found with id {file_id}")
|
||||||
|
|
||||||
|
storage_block_type = 'rare' if 'rare' in current_file.type else 'common'
|
||||||
|
prefix = storage_block_type[0]
|
||||||
|
|
||||||
|
latest_block = (
|
||||||
|
self.db.query(StorageBlock)
|
||||||
|
.filter(
|
||||||
|
StorageBlock.warehouse_id == warehouse.id,
|
||||||
|
StorageBlock.type == storage_block_type
|
||||||
|
)
|
||||||
|
.order_by(StorageBlock.date_created.desc())
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
|
start_number = 1 if not latest_block else int(latest_block.name[1:]) + 1
|
||||||
|
|
||||||
|
while True:
|
||||||
|
new_name = f"{prefix}{start_number}"
|
||||||
|
exists = (
|
||||||
|
self.db.query(StorageBlock)
|
||||||
|
.filter(
|
||||||
|
StorageBlock.warehouse_id == warehouse.id,
|
||||||
|
StorageBlock.name == new_name
|
||||||
|
)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
|
if not exists:
|
||||||
|
return new_name
|
||||||
|
start_number += 1
|
||||||
|
|
||||||
|
def create_storage_block(self, warehouse: Warehouse, file_id: str) -> StorageBlock:
|
||||||
|
"""Create a new storage block for the given warehouse and file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
warehouse: Warehouse to create the block in
|
||||||
|
file_id: ID of the file being processed
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Newly created StorageBlock
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If no file is found with the given ID
|
||||||
|
"""
|
||||||
|
current_file = self.db.query(File).filter(File.id == file_id).first()
|
||||||
|
if not current_file:
|
||||||
|
raise ValueError(f"No file found with id {file_id}")
|
||||||
|
|
||||||
|
storage_block_type = 'rare' if 'rare' in current_file.type else 'common'
|
||||||
|
|
||||||
|
storage_block = StorageBlock(
|
||||||
|
id=str(uuid4()),
|
||||||
|
warehouse_id=warehouse.id,
|
||||||
|
name=self.get_storage_block_name(warehouse, file_id),
|
||||||
|
type=storage_block_type
|
||||||
|
)
|
||||||
|
with db_transaction(self.db):
|
||||||
|
self.db.add(storage_block)
|
||||||
|
return storage_block
|
||||||
|
|
||||||
|
def add_staged_product_to_product_block(
|
||||||
|
self,
|
||||||
|
staged_product: StagedFileProduct,
|
||||||
|
storage_block: StorageBlock,
|
||||||
|
product_attributes: ProductAttributes,
|
||||||
|
block_index: int
|
||||||
|
) -> ProductBlock:
|
||||||
|
"""Create a new ProductBlock for a single unit of a staged product.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
staged_product: The staged product to store
|
||||||
|
storage_block: The block to store the product in
|
||||||
|
product_attributes: Additional product attributes
|
||||||
|
block_index: Index within the storage block
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Newly created ProductBlock
|
||||||
|
"""
|
||||||
|
product_block = ProductBlock(
|
||||||
|
id=str(uuid4()),
|
||||||
|
product_id=staged_product.product_id,
|
||||||
|
block_id=storage_block.id,
|
||||||
|
block_index=block_index
|
||||||
|
)
|
||||||
|
|
||||||
|
with db_transaction(self.db):
|
||||||
|
self.db.add(product_block)
|
||||||
|
|
||||||
|
return product_block
|
||||||
|
|
||||||
|
def get_staged_product_attributes_for_storage(
|
||||||
|
self,
|
||||||
|
staged_product: StagedFileProduct
|
||||||
|
) -> List[ProductAttributes]:
|
||||||
|
"""Get attributes for each unit of a staged product.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
staged_product: The staged product to get attributes for
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of attributes for each unit of the product
|
||||||
|
"""
|
||||||
|
result = (
|
||||||
|
self.db.query(
|
||||||
|
StagedFileProduct.product_id,
|
||||||
|
StagedFileProduct.quantity,
|
||||||
|
CardTCGPlayer.number
|
||||||
|
)
|
||||||
|
.join(CardTCGPlayer, CardTCGPlayer.product_id == StagedFileProduct.product_id)
|
||||||
|
.filter(StagedFileProduct.id == staged_product.id)
|
||||||
|
.first()
|
||||||
|
)
|
||||||
|
|
||||||
|
if not result:
|
||||||
|
return []
|
||||||
|
|
||||||
|
return [
|
||||||
|
ProductAttributes(
|
||||||
|
product_id=result.product_id,
|
||||||
|
card_number=result.number
|
||||||
|
)
|
||||||
|
for _ in range(result.quantity)
|
||||||
|
]
|
||||||
|
|
||||||
|
def store_staged_products_for_file(self, file_id: str) -> StorageBlock:
|
||||||
|
"""Store all staged products for a file in a new storage block.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_id: ID of the file containing staged products
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The newly created StorageBlock containing all products
|
||||||
|
"""
|
||||||
|
warehouse = self.get_or_create_warehouse()
|
||||||
|
storage_block = self.create_storage_block(warehouse, file_id)
|
||||||
|
staged_products = self.get_staged_product(file_id)
|
||||||
|
|
||||||
|
# Collect all product attributes first
|
||||||
|
all_product_attributes = []
|
||||||
|
for staged_product in staged_products:
|
||||||
|
product_attributes_list = self.get_staged_product_attributes_for_storage(staged_product)
|
||||||
|
for attrs in product_attributes_list:
|
||||||
|
all_product_attributes.append((staged_product, attrs))
|
||||||
|
|
||||||
|
# Sort by card number as integer to determine block indices
|
||||||
|
sorted_attributes = sorted(
|
||||||
|
all_product_attributes,
|
||||||
|
key=lambda x: int(''.join(filter(str.isdigit, x[1]['card_number'])))
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add products with correct block indices
|
||||||
|
for block_index, (staged_product, product_attributes) in enumerate(sorted_attributes, 1):
|
||||||
|
self.add_staged_product_to_product_block(
|
||||||
|
staged_product=staged_product,
|
||||||
|
storage_block=storage_block,
|
||||||
|
product_attributes=product_attributes,
|
||||||
|
block_index=block_index
|
||||||
|
)
|
||||||
|
|
||||||
|
return storage_block
|
37
app/services/task.py
Normal file
37
app/services/task.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from apscheduler.schedulers.background import BackgroundScheduler
|
||||||
|
import logging
|
||||||
|
from typing import Dict, Callable
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from app.services.product import ProductService
|
||||||
|
from app.db.models import File
|
||||||
|
from app.services.pricing import PricingService
|
||||||
|
|
||||||
|
|
||||||
|
class TaskService:
|
||||||
|
def __init__(self, db: Session, product_service: ProductService, pricing_service: PricingService):
|
||||||
|
self.scheduler = BackgroundScheduler()
|
||||||
|
self.logger = logging.getLogger(__name__)
|
||||||
|
self.tasks: Dict[str, Callable] = {}
|
||||||
|
self.db = db
|
||||||
|
self.product_service = product_service
|
||||||
|
self.pricing_service = pricing_service
|
||||||
|
|
||||||
|
async def start(self):
|
||||||
|
self.scheduler.start()
|
||||||
|
self.logger.info("Task scheduler started.")
|
||||||
|
self.register_scheduled_tasks()
|
||||||
|
# self.pricing_service.generate_tcgplayer_inventory_update_file_with_pricing(['e20cc342-23cb-4593-89cb-56a0cb3ed3f3'])
|
||||||
|
|
||||||
|
def register_scheduled_tasks(self):
|
||||||
|
self.scheduler.add_job(self.hourly_pricing, 'cron', minute='45')
|
||||||
|
self.logger.info("Scheduled tasks registered.")
|
||||||
|
|
||||||
|
def hourly_pricing(self):
|
||||||
|
self.logger.info("Running hourly pricing task")
|
||||||
|
self.pricing_service.cron_load_prices()
|
||||||
|
self.logger.info("Finished hourly pricing task")
|
||||||
|
|
||||||
|
async def process_manabox_file(self, file: File):
|
||||||
|
self.logger.info("Processing ManaBox file")
|
||||||
|
self.product_service.bg_process_manabox_file(file.id)
|
||||||
|
self.logger.info("Finished processing ManaBox file")
|
592
app/services/tcgplayer.py
Normal file
592
app/services/tcgplayer.py
Normal file
@ -0,0 +1,592 @@
|
|||||||
|
from app.db.models import TCGPlayerGroups, CardTCGPlayer, Product, Card, File, Inventory, OpenBox, OpenBoxCard
|
||||||
|
import requests
|
||||||
|
from app.services.util._dataframe import TCGPlayerPricingRow, DataframeUtil, ManaboxRow
|
||||||
|
from app.services.file import FileService
|
||||||
|
from app.services.inventory import InventoryService
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from app.db.utils import db_transaction
|
||||||
|
from uuid import uuid4 as uuid
|
||||||
|
import browser_cookie3
|
||||||
|
import webbrowser
|
||||||
|
from typing import Optional, Dict ,List
|
||||||
|
from enum import Enum
|
||||||
|
import logging
|
||||||
|
from dataclasses import dataclass
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
from datetime import datetime
|
||||||
|
import time
|
||||||
|
from typing import List, Dict, Optional
|
||||||
|
import pandas as pd
|
||||||
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
from app.schemas.file import CreateFileRequest
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class Browser(Enum):
|
||||||
|
"""Supported browser types for cookie extraction"""
|
||||||
|
BRAVE = "brave"
|
||||||
|
CHROME = "chrome"
|
||||||
|
FIREFOX = "firefox"
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class TCGPlayerConfig:
|
||||||
|
"""Configuration for TCGPlayer API interactions"""
|
||||||
|
tcgplayer_base_url: str = "https://store.tcgplayer.com"
|
||||||
|
tcgplayer_login_path: str = "/oauth/login"
|
||||||
|
staged_inventory_download_path: str = "/Admin/Pricing/DownloadStagedInventoryExportCSV?type=Pricing"
|
||||||
|
live_inventory_download_path = "/Admin/Pricing/DownloadMyExportCSV?type=Pricing"
|
||||||
|
pricing_export_path: str = "/admin/pricing/downloadexportcsv"
|
||||||
|
max_retries: int = 1
|
||||||
|
|
||||||
|
class TCGPlayerService:
|
||||||
|
def __init__(self, db: Session,
|
||||||
|
file_service: FileService,
|
||||||
|
config: TCGPlayerConfig=TCGPlayerConfig(),
|
||||||
|
browser_type: Browser=Browser.BRAVE):
|
||||||
|
self.db = db
|
||||||
|
self.config = config
|
||||||
|
self.browser_type = browser_type
|
||||||
|
self.cookies = None
|
||||||
|
self.previous_request_time = None
|
||||||
|
self.df_util = DataframeUtil()
|
||||||
|
self.file_service = file_service
|
||||||
|
|
||||||
|
def _insert_groups(self, groups):
|
||||||
|
for group in groups:
|
||||||
|
db_group = TCGPlayerGroups(
|
||||||
|
id=str(uuid()),
|
||||||
|
group_id=group['groupId'],
|
||||||
|
name=group['name'],
|
||||||
|
abbreviation=group['abbreviation'],
|
||||||
|
is_supplemental=group['isSupplemental'],
|
||||||
|
published_on=group['publishedOn'],
|
||||||
|
modified_on=group['modifiedOn'],
|
||||||
|
category_id=group['categoryId']
|
||||||
|
)
|
||||||
|
self.db.add(db_group)
|
||||||
|
|
||||||
|
def populate_tcgplayer_groups(self):
|
||||||
|
group_endpoint = "https://tcgcsv.com/tcgplayer/1/groups"
|
||||||
|
response = requests.get(group_endpoint)
|
||||||
|
response.raise_for_status()
|
||||||
|
groups = response.json()['results']
|
||||||
|
# manually add broken groups
|
||||||
|
manual_groups = [
|
||||||
|
{
|
||||||
|
"groupId": 2422,
|
||||||
|
"name": "Modern Horizons 2 Timeshifts",
|
||||||
|
"abbreviation": "H2R",
|
||||||
|
"isSupplemental": "0",
|
||||||
|
"publishedOn": "2018-11-08T00:00:00",
|
||||||
|
"modifiedOn": "2018-11-08T00:00:00",
|
||||||
|
"categoryId": 1
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"groupId": 52,
|
||||||
|
"name": "Store Championships",
|
||||||
|
"abbreviation": "SCH",
|
||||||
|
"isSupplemental": "1",
|
||||||
|
"publishedOn": "2007-07-14T00:00:00",
|
||||||
|
"modifiedOn": "2007-07-14T00:00:00",
|
||||||
|
"categoryId": 1
|
||||||
|
}
|
||||||
|
]
|
||||||
|
groups.extend(manual_groups)
|
||||||
|
# Insert groups into db
|
||||||
|
with db_transaction(self.db):
|
||||||
|
self._insert_groups(groups)
|
||||||
|
|
||||||
|
def get_cookies_from_file(self) -> Dict:
|
||||||
|
# check if cookies file exists
|
||||||
|
if not os.path.exists('cookies/tcg_cookies.json'):
|
||||||
|
raise ValueError("Cookies file not found")
|
||||||
|
with open('cookies/tcg_cookies.json', 'r') as f:
|
||||||
|
logger.debug("Loading cookies from file")
|
||||||
|
cookies = json.load(f)
|
||||||
|
return cookies
|
||||||
|
|
||||||
|
def _get_browser_cookies(self) -> Optional[Dict]:
|
||||||
|
"""Retrieve cookies from the specified browser"""
|
||||||
|
try:
|
||||||
|
cookie_getter = getattr(browser_cookie3, self.browser_type.value, None)
|
||||||
|
if not cookie_getter:
|
||||||
|
raise ValueError(f"Unsupported browser type: {self.browser_type.value}")
|
||||||
|
return cookie_getter()
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to get browser cookies: {str(e)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def is_in_docker(self) -> bool:
|
||||||
|
"""Check if we're running inside a Docker container using multiple methods"""
|
||||||
|
# Method 1: Check cgroup
|
||||||
|
try:
|
||||||
|
with open('/proc/1/cgroup', 'r') as f:
|
||||||
|
content = f.read().lower()
|
||||||
|
if any(container_id in content for container_id in ['docker', 'containerd', 'kubepods']):
|
||||||
|
logger.debug("Docker detected via cgroup")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f"Could not read cgroup file: {e}")
|
||||||
|
|
||||||
|
# Method 2: Check /.dockerenv file
|
||||||
|
if os.path.exists('/.dockerenv'):
|
||||||
|
logger.debug("Docker detected via /.dockerenv file")
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Method 3: Check environment variables
|
||||||
|
docker_env = any(os.environ.get(var, False) for var in [
|
||||||
|
'DOCKER_CONTAINER',
|
||||||
|
'IN_DOCKER',
|
||||||
|
'KUBERNETES_SERVICE_HOST', # For k8s
|
||||||
|
'DOCKER_HOST'
|
||||||
|
])
|
||||||
|
if docker_env:
|
||||||
|
logger.debug("Docker detected via environment variables")
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Method 4: Check container runtime
|
||||||
|
try:
|
||||||
|
with open('/proc/self/mountinfo', 'r') as f:
|
||||||
|
content = f.read().lower()
|
||||||
|
if any(rt in content for rt in ['docker', 'containerd', 'kubernetes']):
|
||||||
|
logger.debug("Docker detected via mountinfo")
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(f"Could not read mountinfo: {e}")
|
||||||
|
|
||||||
|
logger.debug("No Docker environment detected")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _send_request(self, url: str, method: str, data=None, except_302=False) -> requests.Response:
|
||||||
|
"""Send a request with the specified cookies"""
|
||||||
|
# Rate limiting logic
|
||||||
|
if self.previous_request_time:
|
||||||
|
time_diff = (datetime.now() - self.previous_request_time).total_seconds()
|
||||||
|
if time_diff < 10:
|
||||||
|
logger.info(f"Waiting 10 seconds before next request...")
|
||||||
|
time.sleep(10 - time_diff)
|
||||||
|
|
||||||
|
headers = self._set_headers(method)
|
||||||
|
|
||||||
|
# Move cookie initialization outside and make it more explicit
|
||||||
|
if not self.cookies:
|
||||||
|
if self.is_in_docker():
|
||||||
|
logger.debug("Running in Docker - using cookies from file")
|
||||||
|
self.cookies = self.get_cookies_from_file()
|
||||||
|
else:
|
||||||
|
logger.debug("Not in Docker - using browser cookies")
|
||||||
|
self.cookies = self._get_browser_cookies()
|
||||||
|
|
||||||
|
if not self.cookies:
|
||||||
|
raise ValueError("Failed to retrieve cookies")
|
||||||
|
|
||||||
|
try:
|
||||||
|
#logger.info(f"debug: request url {url}, method {method}, data {data}")
|
||||||
|
response = requests.request(method, url, headers=headers, cookies=self.cookies, data=data)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
if response.status_code == 302 and not except_302:
|
||||||
|
logger.warning("Redirecting to login page...")
|
||||||
|
self._refresh_authentication()
|
||||||
|
return self._send_request(url, method, except_302=True)
|
||||||
|
|
||||||
|
elif response.status_code == 302 and except_302:
|
||||||
|
raise ValueError("Redirected to login page after authentication refresh")
|
||||||
|
|
||||||
|
self.previous_request_time = datetime.now()
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
|
except requests.RequestException as e:
|
||||||
|
logger.error(f"Request failed: {str(e)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _set_headers(self, method: str) -> Dict:
|
||||||
|
base_headers = {
|
||||||
|
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8',
|
||||||
|
'accept-language': 'en-US,en;q=0.8',
|
||||||
|
'priority': 'u=0, i',
|
||||||
|
'referer': 'https://store.tcgplayer.com/admin/pricing',
|
||||||
|
'sec-ch-ua': '"Not A(Brand";v="8", "Chromium";v="132", "Brave";v="132"',
|
||||||
|
'sec-ch-ua-mobile': '?0',
|
||||||
|
'sec-ch-ua-platform': '"macOS"',
|
||||||
|
'sec-fetch-dest': 'document',
|
||||||
|
'sec-fetch-mode': 'navigate',
|
||||||
|
'sec-fetch-site': 'same-origin',
|
||||||
|
'sec-fetch-user': '?1',
|
||||||
|
'sec-gpc': '1',
|
||||||
|
'upgrade-insecure-requests': '1',
|
||||||
|
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36'
|
||||||
|
}
|
||||||
|
|
||||||
|
if method == 'POST':
|
||||||
|
post_headers = {
|
||||||
|
'cache-control': 'max-age=0',
|
||||||
|
'content-type': 'application/x-www-form-urlencoded',
|
||||||
|
'origin': 'https://store.tcgplayer.com'
|
||||||
|
}
|
||||||
|
base_headers.update(post_headers)
|
||||||
|
|
||||||
|
return base_headers
|
||||||
|
|
||||||
|
def _set_pricing_export_payload(self, set_name_ids: List[str]) -> Dict:
|
||||||
|
data = {
|
||||||
|
"PricingType": "Pricing",
|
||||||
|
"CategoryId": "1",
|
||||||
|
"SetNameIds": set_name_ids,
|
||||||
|
"ConditionIds": ["1"],
|
||||||
|
"RarityIds": ["0"],
|
||||||
|
"LanguageIds": ["1"],
|
||||||
|
"PrintingIds": ["0"],
|
||||||
|
"CompareAgainstPrice": False,
|
||||||
|
"PriceToCompare": 3,
|
||||||
|
"ValueToCompare": 1,
|
||||||
|
"PriceValueToCompare": None,
|
||||||
|
"MyInventory": False,
|
||||||
|
"ExcludeListos": False,
|
||||||
|
"ExportLowestListingNotMe": False
|
||||||
|
}
|
||||||
|
payload = "model=" + urllib.parse.quote(json.dumps(data))
|
||||||
|
return payload
|
||||||
|
|
||||||
|
def _refresh_authentication(self) -> None:
|
||||||
|
"""Open browser for user to refresh authentication"""
|
||||||
|
login_url = f"{self.config.tcgplayer_base_url}{self.config.tcgplayer_login_path}"
|
||||||
|
logger.info("Opening browser for authentication refresh...")
|
||||||
|
webbrowser.open(login_url)
|
||||||
|
input('Please login and press Enter to continue...')
|
||||||
|
# Clear existing cookies to force refresh
|
||||||
|
self.cookies = None
|
||||||
|
|
||||||
|
def get_inventory_df(self, version: str) -> pd.DataFrame:
|
||||||
|
if version == 'staged':
|
||||||
|
inventory_download_url = f"{self.config.tcgplayer_base_url}{self.config.staged_inventory_download_path}"
|
||||||
|
elif version == 'live':
|
||||||
|
inventory_download_url = f"{self.config.tcgplayer_base_url}{self.config.live_inventory_download_path}"
|
||||||
|
else:
|
||||||
|
raise ValueError("Invalid inventory version")
|
||||||
|
response = self._send_request(inventory_download_url, 'GET')
|
||||||
|
df = self.df_util.csv_bytes_to_df(response.content)
|
||||||
|
return df
|
||||||
|
|
||||||
|
def _get_export_csv(self, set_name_ids: List[str]) -> bytes:
|
||||||
|
"""
|
||||||
|
Download export CSV and save to specified path
|
||||||
|
Returns True if successful, False otherwise
|
||||||
|
"""
|
||||||
|
logger.info(f"Downloading pricing export from tcgplayer with ids {set_name_ids}")
|
||||||
|
payload = self._set_pricing_export_payload(set_name_ids)
|
||||||
|
export_csv_download_url = f"{self.config.tcgplayer_base_url}{self.config.pricing_export_path}"
|
||||||
|
response = self._send_request(export_csv_download_url, method='POST', data=payload)
|
||||||
|
return response.content
|
||||||
|
|
||||||
|
def create_tcgplayer_card(self, row: TCGPlayerPricingRow, group_id: int):
|
||||||
|
# if card already exists, return none
|
||||||
|
card_exists = self.db.query(CardTCGPlayer).filter(
|
||||||
|
CardTCGPlayer.tcgplayer_id == row.tcgplayer_id,
|
||||||
|
CardTCGPlayer.group_id == group_id
|
||||||
|
).first()
|
||||||
|
if card_exists:
|
||||||
|
return card_exists
|
||||||
|
# create product
|
||||||
|
product = Product(
|
||||||
|
id=str(uuid()),
|
||||||
|
type = 'card',
|
||||||
|
product_line = 'mtg'
|
||||||
|
)
|
||||||
|
# create card
|
||||||
|
card = Card(
|
||||||
|
product_id=product.id,
|
||||||
|
)
|
||||||
|
# create Cardtcgplayer
|
||||||
|
tcgcard = CardTCGPlayer(
|
||||||
|
product_id=product.id,
|
||||||
|
group_id=group_id,
|
||||||
|
tcgplayer_id=row.tcgplayer_id,
|
||||||
|
product_line=row.product_line,
|
||||||
|
set_name=row.set_name,
|
||||||
|
product_name=row.product_name,
|
||||||
|
title=row.title,
|
||||||
|
number=row.number,
|
||||||
|
rarity=row.rarity,
|
||||||
|
condition=row.condition
|
||||||
|
)
|
||||||
|
with db_transaction(self.db):
|
||||||
|
self.db.add(product)
|
||||||
|
self.db.add(card)
|
||||||
|
self.db.add(tcgcard)
|
||||||
|
return tcgcard
|
||||||
|
|
||||||
|
def create_tcgplayer_cards_batch(self, rows: list[TCGPlayerPricingRow], set_to_group: dict) -> list[CardTCGPlayer]:
|
||||||
|
# Get existing cards in a single query
|
||||||
|
existing_cards = {
|
||||||
|
(card.tcgplayer_id, card.group_id): card
|
||||||
|
for card in self.db.query(CardTCGPlayer).filter(
|
||||||
|
CardTCGPlayer.tcgplayer_id.in_([row.tcgplayer_id for row in rows]),
|
||||||
|
CardTCGPlayer.group_id.in_([set_to_group[row.set_name] for row in rows])
|
||||||
|
).all()
|
||||||
|
}
|
||||||
|
|
||||||
|
# Pre-allocate lists for better memory efficiency
|
||||||
|
new_products = []
|
||||||
|
new_cards = []
|
||||||
|
new_tcgcards = []
|
||||||
|
|
||||||
|
for row in rows:
|
||||||
|
# Get the correct group_id for this row's set
|
||||||
|
group_id = set_to_group[row.set_name]
|
||||||
|
|
||||||
|
if (row.tcgplayer_id, group_id) in existing_cards:
|
||||||
|
continue
|
||||||
|
|
||||||
|
product_id = str(uuid())
|
||||||
|
|
||||||
|
new_products.append(Product(
|
||||||
|
id=product_id,
|
||||||
|
type='card',
|
||||||
|
product_line='mtg'
|
||||||
|
))
|
||||||
|
|
||||||
|
new_cards.append(Card(
|
||||||
|
product_id=product_id,
|
||||||
|
))
|
||||||
|
|
||||||
|
new_tcgcards.append(CardTCGPlayer(
|
||||||
|
product_id=product_id,
|
||||||
|
group_id=group_id, # Use the correct group_id for this specific row
|
||||||
|
tcgplayer_id=row.tcgplayer_id,
|
||||||
|
product_line=row.product_line,
|
||||||
|
set_name=row.set_name,
|
||||||
|
product_name=row.product_name,
|
||||||
|
title=row.title,
|
||||||
|
number=row.number,
|
||||||
|
rarity=row.rarity,
|
||||||
|
condition=row.condition
|
||||||
|
))
|
||||||
|
|
||||||
|
# Batch create price objects
|
||||||
|
# row_prices = [
|
||||||
|
# Price(
|
||||||
|
# id=str(uuid()),
|
||||||
|
# product_id=product_id,
|
||||||
|
# marketplace_id=None,
|
||||||
|
# type=price_type,
|
||||||
|
# price=getattr(row, col_name)
|
||||||
|
# )
|
||||||
|
# for col_name, price_type in price_types.items()
|
||||||
|
# if getattr(row, col_name, None) is not None and getattr(row, col_name) > 0
|
||||||
|
# ]
|
||||||
|
# new_prices.extend(row_prices)
|
||||||
|
|
||||||
|
if new_products:
|
||||||
|
with db_transaction(self.db):
|
||||||
|
self.db.bulk_save_objects(new_products)
|
||||||
|
self.db.bulk_save_objects(new_cards)
|
||||||
|
self.db.bulk_save_objects(new_tcgcards)
|
||||||
|
# if new_prices:
|
||||||
|
# self.db.bulk_save_objects(new_prices)
|
||||||
|
|
||||||
|
return new_tcgcards
|
||||||
|
|
||||||
|
def load_export_csv_to_card_tcgplayer(self, export_csv: bytes, file_id: str = None, batch_size: int = 1000) -> None:
|
||||||
|
try:
|
||||||
|
if not export_csv:
|
||||||
|
raise ValueError("No export CSV provided")
|
||||||
|
|
||||||
|
df = self.df_util.csv_bytes_to_df(export_csv)
|
||||||
|
|
||||||
|
logger.debug(f"Loaded {len(df)} rows from export CSV")
|
||||||
|
|
||||||
|
# Get all group_ids upfront in a single query
|
||||||
|
set_to_group = dict(
|
||||||
|
self.db.query(TCGPlayerGroups.name, TCGPlayerGroups.group_id).all()
|
||||||
|
)
|
||||||
|
|
||||||
|
# Process in batches
|
||||||
|
for i in range(0, len(df), batch_size):
|
||||||
|
batch_df = df.iloc[i:i + batch_size]
|
||||||
|
batch_rows = [TCGPlayerPricingRow(row) for _, row in batch_df.iterrows()]
|
||||||
|
|
||||||
|
# Filter rows with valid group_ids
|
||||||
|
valid_rows = [
|
||||||
|
row for row in batch_rows
|
||||||
|
if row.set_name in set_to_group
|
||||||
|
]
|
||||||
|
|
||||||
|
# logger.debug(f"Processing batch {i // batch_size + 1}: {len(valid_rows)} valid rows")
|
||||||
|
|
||||||
|
if valid_rows:
|
||||||
|
# Pass the entire set_to_group mapping
|
||||||
|
self.create_tcgplayer_cards_batch(valid_rows, set_to_group)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to load export CSV: {e}")
|
||||||
|
# set file upload to failed
|
||||||
|
if file_id:
|
||||||
|
with db_transaction(self.db):
|
||||||
|
file = self.db.query(File).filter(File.id == file_id).first()
|
||||||
|
if file:
|
||||||
|
file.status = 'failed'
|
||||||
|
self.db.add(file)
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
if file_id:
|
||||||
|
with db_transaction(self.db):
|
||||||
|
file = self.db.query(File).filter(File.id == file_id).first()
|
||||||
|
if file:
|
||||||
|
file.status = 'completed'
|
||||||
|
self.db.add(file)
|
||||||
|
|
||||||
|
|
||||||
|
def get_card_tcgplayer_from_manabox_row(self, card: ManaboxRow, group_id: int) -> CardTCGPlayer:
|
||||||
|
# Expanded rarity mapping
|
||||||
|
mb_to_tcg_rarity_mapping = {
|
||||||
|
"common": "C",
|
||||||
|
"uncommon": "U",
|
||||||
|
"rare": "R",
|
||||||
|
"mythic": "M",
|
||||||
|
"special": "S"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Mapping from Manabox condition+foil to TCGPlayer condition
|
||||||
|
mb_to_tcg_condition_mapping = {
|
||||||
|
("near_mint", "foil"): "Near Mint Foil",
|
||||||
|
("near_mint", "normal"): "Near Mint",
|
||||||
|
("near_mint", "etched"): "Near Mint Foil"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Get TCGPlayer condition from Manabox condition+foil combination
|
||||||
|
tcg_condition = mb_to_tcg_condition_mapping.get((card.condition, card.foil))
|
||||||
|
if tcg_condition is None:
|
||||||
|
logger.error(f"Unsupported condition/foil combination: {card.condition}, {card.foil}")
|
||||||
|
logger.error(f"Card details: name={card.name}, set_name={card.set_name}, collector_number={card.collector_number}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Get TCGPlayer rarity from Manabox rarity
|
||||||
|
tcg_rarity = mb_to_tcg_rarity_mapping.get(card.rarity)
|
||||||
|
if tcg_rarity is None:
|
||||||
|
logger.error(f"Unsupported rarity: {card.rarity}")
|
||||||
|
logger.error(f"Card details: name={card.name}, set_name={card.set_name}, collector_number={card.collector_number}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# First query for matching products without rarity filter
|
||||||
|
# debug
|
||||||
|
# log everything in this query
|
||||||
|
# remove letters from card.collector_number FOR JOIN ONLY
|
||||||
|
join_collector_number = ''.join(filter(str.isdigit, card.collector_number))
|
||||||
|
# logger.debug(f"Querying for card: {card.name}, {card.set_code}, {card.collector_number}, {tcg_condition}, {group_id}")
|
||||||
|
base_query = self.db.query(CardTCGPlayer).filter(
|
||||||
|
CardTCGPlayer.number == join_collector_number,
|
||||||
|
CardTCGPlayer.condition == tcg_condition,
|
||||||
|
CardTCGPlayer.group_id == group_id,
|
||||||
|
CardTCGPlayer.rarity != "T" # TOKENS ARE NOT SUPPORTED CUZ BROKE LOL
|
||||||
|
)
|
||||||
|
|
||||||
|
# logger.debug(f"Base query: {base_query.statement.compile(compile_kwargs={'literal_binds': True})}")
|
||||||
|
|
||||||
|
# Get all potential matches
|
||||||
|
products = base_query.all()
|
||||||
|
|
||||||
|
# If no products found, return None
|
||||||
|
if not products:
|
||||||
|
logger.error(f"No matching TCGPlayer product found for card {card.name} ({card.set_code} {card.collector_number})")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Look for an exact match including rarity, unless the TCGPlayer product is a land
|
||||||
|
for product in products:
|
||||||
|
if product.rarity == "L" or product.rarity == tcg_rarity:
|
||||||
|
return product
|
||||||
|
|
||||||
|
# ignore rarity, just make sure only one product is returned
|
||||||
|
if len(products) > 1:
|
||||||
|
# try to match on name before failing
|
||||||
|
for product in products:
|
||||||
|
if product.product_name == card.name:
|
||||||
|
return product
|
||||||
|
elif len(products) == 1:
|
||||||
|
return products[0]
|
||||||
|
|
||||||
|
logger.error(f"Multiple matching TCGPlayer products found for card {card.name} ({card.set_code} {card.collector_number})")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# If we got here, we found products but none matched our rarity criteria
|
||||||
|
# logger.error(f"No matching TCGPlayer product with correct rarity found for card {card.name} {card.rarity} {group_id} ({card.set_name} {card.collector_number})")
|
||||||
|
# return None
|
||||||
|
|
||||||
|
def get_pricing_export_for_all_products(self) -> File:
|
||||||
|
"""
|
||||||
|
"""
|
||||||
|
DEBUG = False
|
||||||
|
if DEBUG:
|
||||||
|
logger.debug("DEBUG: Using existing pricing export file")
|
||||||
|
file = self.db.query(File).filter(File.type == 'tcgplayer_pricing_export').first()
|
||||||
|
if file:
|
||||||
|
return file
|
||||||
|
try:
|
||||||
|
all_group_ids = self.db.query(TCGPlayerGroups.group_id).all()
|
||||||
|
all_group_ids = [str(group_id) for group_id, in all_group_ids]
|
||||||
|
export_csv = self._get_export_csv(all_group_ids)
|
||||||
|
export_csv_file = self.file_service.create_file(export_csv, CreateFileRequest(
|
||||||
|
source="tcgplayer",
|
||||||
|
type="tcgplayer_pricing_export",
|
||||||
|
filename="tcgplayer_pricing_export.csv"
|
||||||
|
))
|
||||||
|
return export_csv_file
|
||||||
|
except SQLAlchemyError as e:
|
||||||
|
raise RuntimeError(f"Failed to retrieve group IDs: {str(e)}")
|
||||||
|
|
||||||
|
def load_tcgplayer_cards(self) -> File:
|
||||||
|
try:
|
||||||
|
# Get pricing export
|
||||||
|
export_csv_file = self.get_pricing_export_for_all_products()
|
||||||
|
export_csv = self.file_service.get_file_content(export_csv_file.id)
|
||||||
|
|
||||||
|
# load to card tcgplayer
|
||||||
|
self.load_export_csv_to_card_tcgplayer(export_csv, export_csv_file.id)
|
||||||
|
|
||||||
|
return export_csv_file
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to load prices: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def open_box_cards_to_tcgplayer_inventory_df(self, open_box_ids: List[str]) -> pd.DataFrame:
|
||||||
|
tcgcards = (self.db.query(OpenBoxCard, CardTCGPlayer)
|
||||||
|
.filter(OpenBoxCard.open_box_id.in_(open_box_ids))
|
||||||
|
.join(CardTCGPlayer, OpenBoxCard.card_id == CardTCGPlayer.product_id)
|
||||||
|
.all())
|
||||||
|
|
||||||
|
if not tcgcards:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Create dataframe
|
||||||
|
df = pd.DataFrame([(tcg.product_id, tcg.tcgplayer_id, tcg.product_line, tcg.set_name, tcg.product_name,
|
||||||
|
tcg.title, tcg.number, tcg.rarity, tcg.condition, obc.quantity)
|
||||||
|
for obc, tcg in tcgcards],
|
||||||
|
columns=['product_id', 'tcgplayer_id', 'product_line', 'set_name', 'product_name',
|
||||||
|
'title', 'number', 'rarity', 'condition', 'quantity'])
|
||||||
|
|
||||||
|
# Add empty columns
|
||||||
|
df['Total Quantity'] = ''
|
||||||
|
df['Add to Quantity'] = df['quantity']
|
||||||
|
df['TCG Marketplace Price'] = ''
|
||||||
|
df['Photo URL'] = ''
|
||||||
|
|
||||||
|
# Rename columns
|
||||||
|
df = df.rename(columns={
|
||||||
|
'tcgplayer_id': 'TCGplayer Id',
|
||||||
|
'product_line': 'Product Line',
|
||||||
|
'set_name': 'Set Name',
|
||||||
|
'product_name': 'Product Name',
|
||||||
|
'title': 'Title',
|
||||||
|
'number': 'Number',
|
||||||
|
'rarity': 'Rarity',
|
||||||
|
'condition': 'Condition'
|
||||||
|
})
|
||||||
|
|
||||||
|
return df
|
||||||
|
|
||||||
|
|
||||||
|
|
500
app/services/unholy_pricing.py
Normal file
500
app/services/unholy_pricing.py
Normal file
@ -0,0 +1,500 @@
|
|||||||
|
from dataclasses import dataclass
|
||||||
|
from decimal import Decimal, ROUND_HALF_UP
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Optional, Dict, List, Any
|
||||||
|
import pandas as pd
|
||||||
|
import logging
|
||||||
|
from db.models import Product, Price
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
from uuid import uuid4 as uuid
|
||||||
|
from datetime import datetime
|
||||||
|
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||||
|
from sqlalchemy import text
|
||||||
|
from services.util._dataframe import DataframeUtil
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class PriceType(str, Enum):
|
||||||
|
TCG_MARKET = 'tcg_market_price'
|
||||||
|
TCG_DIRECT_LOW = 'tcg_direct_low'
|
||||||
|
TCG_LOW_WITH_SHIPPING = 'tcg_low_price_with_shipping'
|
||||||
|
TCG_LOW = 'tcg_low_price'
|
||||||
|
TCG_MARKETPLACE = 'tcg_marketplace_price'
|
||||||
|
MY_PRICE = 'my_price'
|
||||||
|
|
||||||
|
class PricingStrategy(str, Enum):
|
||||||
|
DEFAULT = 'default'
|
||||||
|
AGGRESSIVE = 'aggressive'
|
||||||
|
CONSERVATIVE = 'conservative'
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PriceRange:
|
||||||
|
min_price: Decimal
|
||||||
|
max_price: Decimal
|
||||||
|
multiplier: Decimal
|
||||||
|
ceiling_price: Optional[Decimal] = None
|
||||||
|
include_shipping: bool = False
|
||||||
|
|
||||||
|
def __post_init__(self):
|
||||||
|
# Convert all values to Decimal for precise calculations
|
||||||
|
self.min_price = Decimal(str(self.min_price))
|
||||||
|
self.max_price = Decimal(str(self.max_price))
|
||||||
|
self.multiplier = Decimal(str(self.multiplier))
|
||||||
|
if self.ceiling_price is not None:
|
||||||
|
self.ceiling_price = Decimal(str(self.ceiling_price))
|
||||||
|
|
||||||
|
def contains_price(self, price: Decimal) -> bool:
|
||||||
|
"""Check if a price falls within this range, inclusive of min, exclusive of max."""
|
||||||
|
return self.min_price <= price < self.max_price
|
||||||
|
|
||||||
|
def calculate_price(self, base_price: Decimal) -> Decimal:
|
||||||
|
"""Calculate the final price for this range, respecting ceiling."""
|
||||||
|
calculated = base_price * self.multiplier
|
||||||
|
if self.ceiling_price is not None:
|
||||||
|
calculated = min(calculated, self.ceiling_price)
|
||||||
|
return calculated.quantize(Decimal('0.01'), rounding=ROUND_HALF_UP)
|
||||||
|
|
||||||
|
class PricingConfiguration:
|
||||||
|
"""Centralized configuration for pricing rules and thresholds."""
|
||||||
|
|
||||||
|
# Price thresholds
|
||||||
|
FLOOR_PRICE = Decimal('0.35')
|
||||||
|
MAX_PRICE = Decimal('100000.00') # Safety cap for maximum price
|
||||||
|
SHIPPING_THRESHOLD = Decimal('5.00')
|
||||||
|
|
||||||
|
# Multipliers
|
||||||
|
FLOOR_MULT = Decimal('1.25')
|
||||||
|
NEAR_FLOOR_MULT = Decimal('1.25')
|
||||||
|
UNDER_FIVE_MULT = Decimal('1.25')
|
||||||
|
FIVE_TO_TEN_MULT = Decimal('1.15')
|
||||||
|
TEN_TO_TWENTYFIVE_MULT = Decimal('1.10')
|
||||||
|
TWENTYFIVE_TO_FIFTY_MULT = Decimal('1.05')
|
||||||
|
FIFTY_PLUS_MULT = Decimal('1.025')
|
||||||
|
|
||||||
|
# Price variance thresholds
|
||||||
|
MAX_PRICE_VARIANCE = Decimal('0.50') # Maximum allowed variance between prices as a ratio
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_price_ranges(cls) -> list[PriceRange]:
|
||||||
|
"""Get the list of price ranges with their respective rules."""
|
||||||
|
return [
|
||||||
|
PriceRange(
|
||||||
|
min_price=Decimal('0'),
|
||||||
|
max_price=cls.FLOOR_PRICE,
|
||||||
|
multiplier=cls.FLOOR_MULT,
|
||||||
|
include_shipping=False
|
||||||
|
),
|
||||||
|
PriceRange(
|
||||||
|
min_price=cls.FLOOR_PRICE,
|
||||||
|
max_price=Decimal('5'),
|
||||||
|
multiplier=cls.UNDER_FIVE_MULT,
|
||||||
|
ceiling_price=Decimal('4.99'),
|
||||||
|
include_shipping=False
|
||||||
|
),
|
||||||
|
PriceRange(
|
||||||
|
min_price=Decimal('5'),
|
||||||
|
max_price=Decimal('10'),
|
||||||
|
multiplier=cls.FIVE_TO_TEN_MULT,
|
||||||
|
ceiling_price=Decimal('9.99'),
|
||||||
|
include_shipping=True
|
||||||
|
),
|
||||||
|
PriceRange(
|
||||||
|
min_price=Decimal('10'),
|
||||||
|
max_price=Decimal('25'),
|
||||||
|
multiplier=cls.TEN_TO_TWENTYFIVE_MULT,
|
||||||
|
ceiling_price=Decimal('24.99'),
|
||||||
|
include_shipping=True
|
||||||
|
),
|
||||||
|
PriceRange(
|
||||||
|
min_price=Decimal('25'),
|
||||||
|
max_price=Decimal('50'),
|
||||||
|
multiplier=cls.TWENTYFIVE_TO_FIFTY_MULT,
|
||||||
|
ceiling_price=Decimal('49.99'),
|
||||||
|
include_shipping=True
|
||||||
|
),
|
||||||
|
PriceRange(
|
||||||
|
min_price=Decimal('50'),
|
||||||
|
max_price=cls.MAX_PRICE,
|
||||||
|
multiplier=cls.FIFTY_PLUS_MULT,
|
||||||
|
include_shipping=True
|
||||||
|
)
|
||||||
|
]
|
||||||
|
|
||||||
|
class PriceCalculationResult:
|
||||||
|
"""Represents the result of a price calculation."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
product: Product,
|
||||||
|
calculated_price: Optional[Decimal],
|
||||||
|
base_prices: Dict[str, Decimal],
|
||||||
|
error: Optional[str] = None
|
||||||
|
):
|
||||||
|
self.product = product
|
||||||
|
self.calculated_price = calculated_price
|
||||||
|
self.base_prices = base_prices
|
||||||
|
self.error = error
|
||||||
|
|
||||||
|
@property
|
||||||
|
def success(self) -> bool:
|
||||||
|
return self.calculated_price is not None and self.error is None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def max_base_price(self) -> Optional[Decimal]:
|
||||||
|
"""Returns the highest base price."""
|
||||||
|
return max(self.base_prices.values()) if self.base_prices else None
|
||||||
|
|
||||||
|
|
||||||
|
class PricingService:
|
||||||
|
CHUNK_SIZE = 5000 # Configurable batch size
|
||||||
|
MAX_WORKERS = 4 # Configurable worker count
|
||||||
|
|
||||||
|
def __init__(self, db: Session):
|
||||||
|
self.db = db
|
||||||
|
self.df_util = DataframeUtil()
|
||||||
|
self.config = PricingConfiguration
|
||||||
|
self.price_ranges = self.config.get_price_ranges()
|
||||||
|
|
||||||
|
def get_product_by_id(self, product_id: str) -> Optional[Product]:
|
||||||
|
"""Get a product by its ID."""
|
||||||
|
return self.db.query(Product)\
|
||||||
|
.filter(Product.id == str(product_id))\
|
||||||
|
.all()[0] if len(self.db.query(Product)\
|
||||||
|
.filter(Product.id == str(product_id))\
|
||||||
|
.all()) > 0 else None
|
||||||
|
|
||||||
|
def get_latest_price_for_product(self, product: Product, price_type: PriceType) -> Optional[Price]:
|
||||||
|
"""Get the most recent price of a specific type for a product."""
|
||||||
|
prices = self.db.query(Price)\
|
||||||
|
.filter(
|
||||||
|
Price.product_id == str(product.id),
|
||||||
|
Price.type == price_type.value
|
||||||
|
)\
|
||||||
|
.order_by(Price.date_created.desc())\
|
||||||
|
.all()
|
||||||
|
return prices[0] if prices else None
|
||||||
|
|
||||||
|
def get_historical_prices_for_product(
|
||||||
|
self, product: Product, price_type: Optional[PriceType] = None
|
||||||
|
) -> dict[PriceType, list[Price]]:
|
||||||
|
"""Get historical prices for a product, optionally filtered by type."""
|
||||||
|
query = self.db.query(Price).filter(Price.product_id == str(product.id))
|
||||||
|
|
||||||
|
if price_type:
|
||||||
|
query = query.filter(Price.type == price_type.value) # Fixed: Use enum value
|
||||||
|
|
||||||
|
prices = query.order_by(Price.date_created.desc()).all()
|
||||||
|
|
||||||
|
if price_type:
|
||||||
|
return {price_type: prices}
|
||||||
|
|
||||||
|
# Group prices by type
|
||||||
|
result = {t: [] for t in PriceType}
|
||||||
|
for price in prices:
|
||||||
|
result[PriceType(price.type)].append(price) # Fixed: Convert string to enum
|
||||||
|
return result
|
||||||
|
|
||||||
|
def _validate_price_data(self, prices: dict[str, Optional[Price]]) -> Optional[str]:
|
||||||
|
"""Validate price data and return error message if invalid."""
|
||||||
|
# Filter out None values and get valid prices
|
||||||
|
valid_prices = {k: v for k, v in prices.items() if v is not None}
|
||||||
|
|
||||||
|
if not valid_prices:
|
||||||
|
return "No valid price data available"
|
||||||
|
|
||||||
|
for price in valid_prices.values():
|
||||||
|
if price.price < 0:
|
||||||
|
return f"Negative price found: {price.price}"
|
||||||
|
if price.price > self.config.MAX_PRICE:
|
||||||
|
return f"Price exceeds maximum allowed: {price.price}"
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _check_price_variance(self, prices: Dict[str, Decimal]) -> bool:
|
||||||
|
"""Check if the variance between prices is within acceptable limits."""
|
||||||
|
if not prices:
|
||||||
|
return True
|
||||||
|
|
||||||
|
min_price = min(prices.values())
|
||||||
|
max_price = max(prices.values())
|
||||||
|
|
||||||
|
if min_price == 0:
|
||||||
|
return False
|
||||||
|
|
||||||
|
variance_ratio = max_price / min_price
|
||||||
|
return variance_ratio <= (1 + self.config.MAX_PRICE_VARIANCE)
|
||||||
|
|
||||||
|
def _get_relevant_prices(self, product: Product) -> dict[str, Optional[Price]]:
|
||||||
|
"""Get all relevant prices for a product."""
|
||||||
|
return {
|
||||||
|
PriceType.TCG_LOW.value: self.get_latest_price_for_product(product, PriceType.TCG_LOW),
|
||||||
|
PriceType.TCG_DIRECT_LOW.value: self.get_latest_price_for_product(product, PriceType.TCG_DIRECT_LOW),
|
||||||
|
PriceType.TCG_MARKET.value: self.get_latest_price_for_product(product, PriceType.TCG_MARKET),
|
||||||
|
PriceType.TCG_LOW_WITH_SHIPPING.value: self.get_latest_price_for_product(product, PriceType.TCG_LOW_WITH_SHIPPING)
|
||||||
|
}
|
||||||
|
|
||||||
|
def _get_base_prices(
|
||||||
|
self, prices: dict[str, Price], include_shipping: bool = False
|
||||||
|
) -> Dict[str, Decimal]:
|
||||||
|
"""Get base prices, excluding None values."""
|
||||||
|
base_prices = {}
|
||||||
|
|
||||||
|
# Add core prices if they exist
|
||||||
|
if tcg_low := prices.get(PriceType.TCG_LOW.value):
|
||||||
|
base_prices[PriceType.TCG_LOW.value] = Decimal(str(tcg_low.price))
|
||||||
|
if tcg_direct := prices.get(PriceType.TCG_DIRECT_LOW.value):
|
||||||
|
base_prices[PriceType.TCG_DIRECT_LOW.value] = Decimal(str(tcg_direct.price))
|
||||||
|
if tcg_market := prices.get(PriceType.TCG_MARKET.value):
|
||||||
|
base_prices[PriceType.TCG_MARKET.value] = Decimal(str(tcg_market.price))
|
||||||
|
|
||||||
|
# Add shipping price if requested and available
|
||||||
|
if include_shipping:
|
||||||
|
if tcg_shipping := prices.get(PriceType.TCG_LOW_WITH_SHIPPING.value):
|
||||||
|
base_prices[PriceType.TCG_LOW_WITH_SHIPPING.value] = Decimal(str(tcg_shipping.price))
|
||||||
|
|
||||||
|
return base_prices
|
||||||
|
|
||||||
|
def _get_price_range(self, price: Decimal) -> Optional[PriceRange]:
|
||||||
|
"""Get the appropriate price range for a given price."""
|
||||||
|
for price_range in self.price_ranges:
|
||||||
|
if price_range.contains_price(price):
|
||||||
|
return price_range
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _handle_floor_price_cases(
|
||||||
|
self, base_prices: Dict[str, Decimal]
|
||||||
|
) -> Optional[Decimal]:
|
||||||
|
"""Handle special cases for prices near or below floor price."""
|
||||||
|
if all(price < self.config.FLOOR_PRICE for price in base_prices.values()):
|
||||||
|
return self.config.FLOOR_PRICE
|
||||||
|
|
||||||
|
if any(price < self.config.FLOOR_PRICE for price in base_prices.values()):
|
||||||
|
max_price = max(base_prices.values())
|
||||||
|
return max_price * self.config.NEAR_FLOOR_MULT
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def calculate_price(
|
||||||
|
self, product_id: str, strategy: PricingStrategy = PricingStrategy.DEFAULT
|
||||||
|
) -> PriceCalculationResult:
|
||||||
|
"""Calculate the final price for a product using the specified pricing strategy."""
|
||||||
|
# get product
|
||||||
|
product = self.get_product_by_id(str(product_id)) # Fixed: Ensure string UUID
|
||||||
|
if not product:
|
||||||
|
logger.error(f"Product not found: {product_id}")
|
||||||
|
return PriceCalculationResult(product, None, {}, "Product not found")
|
||||||
|
|
||||||
|
# Get all relevant prices
|
||||||
|
prices = self._get_relevant_prices(product)
|
||||||
|
|
||||||
|
# Validate price data
|
||||||
|
if error := self._validate_price_data(prices):
|
||||||
|
logger.error(f"Invalid price data: {error}")
|
||||||
|
logger.error(f"product: {product.id}")
|
||||||
|
return PriceCalculationResult(product, None, {}, error)
|
||||||
|
|
||||||
|
# Get initial base prices without shipping
|
||||||
|
base_prices = self._get_base_prices(prices, include_shipping=False)
|
||||||
|
|
||||||
|
# Check price variance
|
||||||
|
if not self._check_price_variance(base_prices):
|
||||||
|
logger.error(f"Price variance exceeds acceptable threshold")
|
||||||
|
logger.error(f"Base prices: {base_prices}")
|
||||||
|
logger.error(f"product: {product.id}")
|
||||||
|
return PriceCalculationResult(
|
||||||
|
product, None, base_prices,
|
||||||
|
"Price variance exceeds acceptable threshold"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Handle floor price cases
|
||||||
|
if floor_price := self._handle_floor_price_cases(base_prices):
|
||||||
|
return PriceCalculationResult(product, floor_price, base_prices)
|
||||||
|
|
||||||
|
# Get max base price and its range
|
||||||
|
max_base_price = max(base_prices.values())
|
||||||
|
price_range = self._get_price_range(max_base_price)
|
||||||
|
|
||||||
|
if not price_range:
|
||||||
|
logger.error(f"No valid price range found for price")
|
||||||
|
logger.error(f"Base prices: {base_prices}, max_base_price: {max_base_price}")
|
||||||
|
logger.error(f"product: {product.id}")
|
||||||
|
return PriceCalculationResult(
|
||||||
|
product, None, base_prices,
|
||||||
|
f"No valid price range found for price: {max_base_price}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Include shipping prices if necessary
|
||||||
|
if price_range.include_shipping:
|
||||||
|
base_prices = self._get_base_prices(prices, include_shipping=True)
|
||||||
|
max_base_price = max(base_prices.values())
|
||||||
|
|
||||||
|
# Recheck price range with shipping
|
||||||
|
price_range = self._get_price_range(max_base_price)
|
||||||
|
|
||||||
|
if not price_range:
|
||||||
|
logger.error(f"No valid price range found for price with shipping")
|
||||||
|
logger.error(f"Base prices: {base_prices}, max_base_price: {max_base_price}")
|
||||||
|
logger.error(f"product: {product.id}")
|
||||||
|
return PriceCalculationResult(
|
||||||
|
product, None, base_prices,
|
||||||
|
f"No valid price range found for price with shipping: {max_base_price}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Calculate final price using the price range
|
||||||
|
calculated_price = price_range.calculate_price(max_base_price)
|
||||||
|
|
||||||
|
# Apply strategy-specific adjustments
|
||||||
|
if strategy == PricingStrategy.AGGRESSIVE:
|
||||||
|
calculated_price *= Decimal('0.95')
|
||||||
|
elif strategy == PricingStrategy.CONSERVATIVE:
|
||||||
|
calculated_price *= Decimal('1.05')
|
||||||
|
|
||||||
|
debug_base_prices_with_name_string = ", ".join([f"{k}: {v}" for k, v in base_prices.items()])
|
||||||
|
|
||||||
|
logger.debug(f"Set price for to {calculated_price.quantize(Decimal('0.01'), rounding=ROUND_HALF_UP)} based on {debug_base_prices_with_name_string}")
|
||||||
|
|
||||||
|
return PriceCalculationResult(
|
||||||
|
product,
|
||||||
|
calculated_price.quantize(Decimal('0.01'), rounding=ROUND_HALF_UP),
|
||||||
|
base_prices
|
||||||
|
)
|
||||||
|
|
||||||
|
def _bulk_generate_uuids(self, size: int) -> List[str]:
|
||||||
|
"""Generate UUIDs in bulk for better performance."""
|
||||||
|
return [str(uuid()) for _ in range(size)]
|
||||||
|
|
||||||
|
def _prepare_price_records(self, df: pd.DataFrame, price_type: str, uuids: List[str]) -> List[Dict]:
|
||||||
|
"""Prepare price records in bulk using vectorized operations."""
|
||||||
|
records = []
|
||||||
|
df['price_id'] = uuids[:len(df)]
|
||||||
|
df['type'] = price_type # price_type should already be a string value
|
||||||
|
df['date_created'] = datetime.utcnow()
|
||||||
|
|
||||||
|
return df[['price_id', 'product_id', 'type', 'price', 'date_created']].to_dict('records')
|
||||||
|
|
||||||
|
def _calculate_suggested_prices_batch(self, product_ids: List[str]) -> Dict[str, float]:
|
||||||
|
"""Calculate suggested prices in parallel for a batch of products."""
|
||||||
|
with ThreadPoolExecutor(max_workers=self.MAX_WORKERS) as executor:
|
||||||
|
future_to_id = {
|
||||||
|
executor.submit(self.calculate_price, str(pid)): pid # Fixed: Ensure string UUID
|
||||||
|
for pid in product_ids
|
||||||
|
}
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
for future in as_completed(future_to_id):
|
||||||
|
product_id = future_to_id[future]
|
||||||
|
try:
|
||||||
|
result = future.result()
|
||||||
|
if result.success:
|
||||||
|
results[str(product_id)] = float(result.calculated_price) # Fixed: Ensure string UUID
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to calculate price for product {product_id}: {e}")
|
||||||
|
|
||||||
|
return results
|
||||||
|
|
||||||
|
def _bulk_insert_prices(self, records: List[Dict]) -> None:
|
||||||
|
"""Efficiently insert price records in bulk."""
|
||||||
|
if not records:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
df = pd.DataFrame(records)
|
||||||
|
df.to_sql('prices', self.db.bind,
|
||||||
|
if_exists='append',
|
||||||
|
index=False,
|
||||||
|
method='multi',
|
||||||
|
chunksize=self.CHUNK_SIZE)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to bulk insert prices: {e}")
|
||||||
|
raise
|
||||||
|
|
||||||
|
def process_pricing_export(self, export_csv: bytes) -> None:
|
||||||
|
"""Process pricing export with optimized bulk operations."""
|
||||||
|
try:
|
||||||
|
# Convert CSV to DataFrame
|
||||||
|
df = self.df_util.csv_bytes_to_df(export_csv)
|
||||||
|
df.columns = df.columns.str.lower().str.replace(' ', '_')
|
||||||
|
|
||||||
|
# Get product mappings efficiently - SQLite compatible with chunking
|
||||||
|
SQLITE_MAX_VARS = 999 # SQLite parameter limit
|
||||||
|
tcgplayer_ids = df['tcgplayer_id'].tolist()
|
||||||
|
all_product_dfs = []
|
||||||
|
|
||||||
|
for i in range(0, len(tcgplayer_ids), SQLITE_MAX_VARS):
|
||||||
|
chunk_ids = tcgplayer_ids[i:i + SQLITE_MAX_VARS]
|
||||||
|
placeholders = ','.join([':id_' + str(j) for j in range(len(chunk_ids))])
|
||||||
|
product_query = f"""
|
||||||
|
SELECT tcgplayer_id, product_id
|
||||||
|
FROM card_tcgplayer
|
||||||
|
WHERE tcgplayer_id IN ({placeholders})
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Create a dictionary of parameters
|
||||||
|
params = {f'id_{j}': id_val for j, id_val in enumerate(chunk_ids)}
|
||||||
|
|
||||||
|
chunk_df = pd.read_sql(
|
||||||
|
text(product_query),
|
||||||
|
self.db.bind,
|
||||||
|
params=params
|
||||||
|
)
|
||||||
|
all_product_dfs.append(chunk_df)
|
||||||
|
|
||||||
|
# Combine all chunks
|
||||||
|
product_df = pd.concat(all_product_dfs) if all_product_dfs else pd.DataFrame()
|
||||||
|
|
||||||
|
# Merge dataframes efficiently
|
||||||
|
merged_df = pd.merge(
|
||||||
|
df,
|
||||||
|
product_df,
|
||||||
|
on='tcgplayer_id',
|
||||||
|
how='inner'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Define price columns mapping - using enum values directly
|
||||||
|
price_columns = {
|
||||||
|
'tcg_market_price': PriceType.TCG_MARKET.value,
|
||||||
|
'tcg_direct_low': PriceType.TCG_DIRECT_LOW.value,
|
||||||
|
'tcg_low_price_with_shipping': PriceType.TCG_LOW_WITH_SHIPPING.value,
|
||||||
|
'tcg_low_price': PriceType.TCG_LOW.value,
|
||||||
|
'tcg_marketplace_price': PriceType.TCG_MARKETPLACE.value
|
||||||
|
}
|
||||||
|
|
||||||
|
# Process each price type in chunks
|
||||||
|
for price_col, price_type in price_columns.items():
|
||||||
|
valid_prices_df = merged_df[merged_df[price_col].notna()].copy()
|
||||||
|
|
||||||
|
for chunk_start in range(0, len(valid_prices_df), self.CHUNK_SIZE):
|
||||||
|
chunk_df = valid_prices_df.iloc[chunk_start:chunk_start + self.CHUNK_SIZE].copy()
|
||||||
|
uuids = self._bulk_generate_uuids(len(chunk_df))
|
||||||
|
|
||||||
|
chunk_df['price'] = chunk_df[price_col]
|
||||||
|
chunk_df['product_id'] = chunk_df['product_id'].astype(str) # Fixed: Ensure string UUIDs
|
||||||
|
records = self._prepare_price_records(chunk_df, price_type, uuids)
|
||||||
|
self._bulk_insert_prices(records)
|
||||||
|
|
||||||
|
# Handle suggested prices separately with parallel processing
|
||||||
|
product_ids = merged_df['product_id'].unique()
|
||||||
|
suggested_prices = {}
|
||||||
|
|
||||||
|
for chunk_start in range(0, len(product_ids), self.CHUNK_SIZE):
|
||||||
|
chunk_ids = product_ids[chunk_start:chunk_start + self.CHUNK_SIZE]
|
||||||
|
chunk_prices = self._calculate_suggested_prices_batch(chunk_ids)
|
||||||
|
suggested_prices.update(chunk_prices)
|
||||||
|
|
||||||
|
# Create suggested price records
|
||||||
|
if suggested_prices:
|
||||||
|
suggested_df = pd.DataFrame([
|
||||||
|
{'product_id': str(pid), 'price': price} # Fixed: Ensure string UUIDs
|
||||||
|
for pid, price in suggested_prices.items()
|
||||||
|
])
|
||||||
|
|
||||||
|
uuids = self._bulk_generate_uuids(len(suggested_df))
|
||||||
|
records = self._prepare_price_records(suggested_df, 'suggested_price', uuids)
|
||||||
|
self._bulk_insert_prices(records)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Failed to process pricing export: {e}")
|
||||||
|
logger.error(f"Error occurred during price processing: {str(e)}")
|
||||||
|
raise
|
72
app/services/util/_dataframe.py
Normal file
72
app/services/util/_dataframe.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
import pandas as pd
|
||||||
|
from io import StringIO
|
||||||
|
from app.db.models import File
|
||||||
|
|
||||||
|
|
||||||
|
class ManaboxRow:
|
||||||
|
def __init__(self, row: pd.Series):
|
||||||
|
# Integer field
|
||||||
|
try:
|
||||||
|
self.manabox_id = int(row['manabox_id'])
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
raise ValueError(f"manabox_id must be convertible to integer, got: {row['manabox_id']}")
|
||||||
|
|
||||||
|
# String fields with None/NaN handling
|
||||||
|
self.name = str(row['name']) if pd.notna(row['name']) else ''
|
||||||
|
self.set_code = str(row['set_code']) if pd.notna(row['set_code']) else ''
|
||||||
|
self.set_name = str(row['set_name']) if pd.notna(row['set_name']) else ''
|
||||||
|
self.collector_number = str(row['collector_number']) if pd.notna(row['collector_number']) else ''
|
||||||
|
self.foil = str(row['foil']) if pd.notna(row['foil']) else ''
|
||||||
|
self.rarity = str(row['rarity']) if pd.notna(row['rarity']) else ''
|
||||||
|
self.scryfall_id = str(row['scryfall_id']) if pd.notna(row['scryfall_id']) else ''
|
||||||
|
self.condition = str(row['condition']) if pd.notna(row['condition']) else ''
|
||||||
|
self.language = str(row['language']) if pd.notna(row['language']) else ''
|
||||||
|
self.quantity = str(row['quantity']) if pd.notna(row['quantity']) else ''
|
||||||
|
|
||||||
|
|
||||||
|
class TCGPlayerPricingRow:
|
||||||
|
def __init__(self, row: pd.Series):
|
||||||
|
self.tcgplayer_id = row['tcgplayer_id']
|
||||||
|
self.product_line = row['product_line']
|
||||||
|
self.set_name = row['set_name']
|
||||||
|
self.product_name = row['product_name']
|
||||||
|
self.title = row['title']
|
||||||
|
self.number = row['number']
|
||||||
|
self.rarity = row['rarity']
|
||||||
|
self.condition = row['condition']
|
||||||
|
self.tcg_market_price = row['tcg_market_price']
|
||||||
|
self.tcg_direct_low = row['tcg_direct_low']
|
||||||
|
self.tcg_low_price_with_shipping = row['tcg_low_price_with_shipping']
|
||||||
|
self.tcg_low_price = row['tcg_low_price']
|
||||||
|
self.total_quantity = row['total_quantity']
|
||||||
|
self.add_to_quantity = row['add_to_quantity']
|
||||||
|
self.tcg_marketplace_price = row['tcg_marketplace_price']
|
||||||
|
self.photo_url = row['photo_url']
|
||||||
|
|
||||||
|
|
||||||
|
class DataframeUtil:
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def format_df_columns(self, df: pd.DataFrame) -> pd.DataFrame:
|
||||||
|
df.columns = df.columns.str.lower()
|
||||||
|
df.columns = df.columns.str.replace(' ', '_')
|
||||||
|
return df
|
||||||
|
|
||||||
|
def file_to_df(self, file: File) -> pd.DataFrame:
|
||||||
|
with open(file.filepath, 'rb') as f:
|
||||||
|
content = f.read()
|
||||||
|
content = content.decode('utf-8')
|
||||||
|
df = pd.read_csv(StringIO(content))
|
||||||
|
df = self.format_df_columns(df)
|
||||||
|
return df
|
||||||
|
|
||||||
|
def csv_bytes_to_df(self, content: bytes) -> pd.DataFrame:
|
||||||
|
content = content.decode('utf-8')
|
||||||
|
df = pd.read_csv(StringIO(content))
|
||||||
|
df = self.format_df_columns(df)
|
||||||
|
return df
|
||||||
|
|
||||||
|
def df_to_csv_bytes(self, df: pd.DataFrame) -> bytes:
|
||||||
|
csv = df.to_csv(index=False)
|
||||||
|
return csv.encode('utf-8')
|
212
app/tests/box_test.py
Normal file
212
app/tests/box_test.py
Normal file
@ -0,0 +1,212 @@
|
|||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from fastapi import BackgroundTasks
|
||||||
|
import pytest
|
||||||
|
import os
|
||||||
|
from app.main import app
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
client = TestClient(app)
|
||||||
|
|
||||||
|
test_boxes = []
|
||||||
|
|
||||||
|
def test_create_box():
|
||||||
|
# Send as form data, not JSON
|
||||||
|
response = client.post("/api/boxes",
|
||||||
|
data={
|
||||||
|
"type": "play",
|
||||||
|
"set_code": "BLB",
|
||||||
|
"sku": "1234",
|
||||||
|
"num_cards_expected": 504
|
||||||
|
}
|
||||||
|
)
|
||||||
|
test_boxes.append(response.json()["box"][0]["product_id"])
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
assert response.json()["success"] == True
|
||||||
|
assert response.json()["box"][0]["type"] == "play"
|
||||||
|
assert response.json()["box"][0]["set_code"] == "BLB"
|
||||||
|
assert response.json()["box"][0]["sku"] == "1234"
|
||||||
|
assert response.json()["box"][0]["num_cards_expected"] == 504
|
||||||
|
|
||||||
|
def test_update_box():
|
||||||
|
# Create a box first
|
||||||
|
create_response = client.post("/api/boxes",
|
||||||
|
data={
|
||||||
|
"type": "collector",
|
||||||
|
"set_code": "MKM",
|
||||||
|
"sku": "3456",
|
||||||
|
"num_cards_expected": 504
|
||||||
|
}
|
||||||
|
)
|
||||||
|
box_id = create_response.json()["box"][0]["product_id"]
|
||||||
|
test_boxes.append(box_id)
|
||||||
|
|
||||||
|
# Update the box
|
||||||
|
response = client.put(f"/api/boxes/{box_id}",
|
||||||
|
data={
|
||||||
|
"num_cards_expected": 500
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["success"] == True
|
||||||
|
assert response.json()["box"][0]["type"] == "collector"
|
||||||
|
assert response.json()["box"][0]["set_code"] == "MKM"
|
||||||
|
assert response.json()["box"][0]["sku"] == "3456"
|
||||||
|
assert response.json()["box"][0]["num_cards_expected"] == 500
|
||||||
|
|
||||||
|
def test_delete_box():
|
||||||
|
# Create a box first
|
||||||
|
create_response = client.post("/api/boxes",
|
||||||
|
data={
|
||||||
|
"type": "set",
|
||||||
|
"set_code": "LCI",
|
||||||
|
"sku": "7890",
|
||||||
|
"num_cards_expected": 504
|
||||||
|
}
|
||||||
|
)
|
||||||
|
box_id = create_response.json()["box"][0]["product_id"]
|
||||||
|
|
||||||
|
# Delete the box
|
||||||
|
response = client.delete(f"/api/boxes/{box_id}")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["success"] == True
|
||||||
|
assert response.json()["box"][0]["type"] == "set"
|
||||||
|
assert response.json()["box"][0]["set_code"] == "LCI"
|
||||||
|
assert response.json()["box"][0]["sku"] == "7890"
|
||||||
|
assert response.json()["box"][0]["num_cards_expected"] == 504
|
||||||
|
|
||||||
|
# Constants for reused values
|
||||||
|
TEST_FILE_PATH = os.path.join(os.getcwd(), "tests/test_files", "manabox_test_file.csv")
|
||||||
|
DEFAULT_METADATA = {
|
||||||
|
"source": "manabox",
|
||||||
|
"type": "scan_export_common"
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_file_size_kb(file_path):
|
||||||
|
"""Helper to consistently calculate file size in KB"""
|
||||||
|
with open(file_path, "rb") as f:
|
||||||
|
return round(len(f.read()) / 1024, 2)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_open_box():
|
||||||
|
"""Test creating a new manabox file"""
|
||||||
|
# Open file within the test scope
|
||||||
|
with open(TEST_FILE_PATH, "rb") as test_file:
|
||||||
|
files = {"file": test_file}
|
||||||
|
|
||||||
|
# Make request
|
||||||
|
response = client.post("/api/files", data=DEFAULT_METADATA, files=files)
|
||||||
|
|
||||||
|
# Check response
|
||||||
|
assert response.status_code == 201
|
||||||
|
assert response.json()["success"] == True
|
||||||
|
|
||||||
|
file_data = response.json()["files"][0]
|
||||||
|
assert file_data["source"] == DEFAULT_METADATA["source"]
|
||||||
|
assert file_data["type"] == DEFAULT_METADATA["type"]
|
||||||
|
assert file_data["status"] == "pending"
|
||||||
|
assert file_data["service"] == None
|
||||||
|
assert file_data["filename"] == "manabox_test_file.csv"
|
||||||
|
assert file_data["filesize_kb"] == get_file_size_kb(TEST_FILE_PATH)
|
||||||
|
assert file_data["id"] is not None
|
||||||
|
|
||||||
|
# Execute background tasks if they were added
|
||||||
|
background_tasks = BackgroundTasks()
|
||||||
|
for task in background_tasks.tasks:
|
||||||
|
await task()
|
||||||
|
|
||||||
|
# Create a box first
|
||||||
|
create_response = client.post("/api/boxes",
|
||||||
|
data={
|
||||||
|
"type": "play",
|
||||||
|
"set_code": "OTJ",
|
||||||
|
"sku": "2314",
|
||||||
|
"num_cards_expected": 504
|
||||||
|
}
|
||||||
|
)
|
||||||
|
box_id = create_response.json()["box"][0]["product_id"]
|
||||||
|
test_boxes.append(box_id)
|
||||||
|
|
||||||
|
# Open the box
|
||||||
|
response = client.post(f"/api/boxes/{box_id}/open",
|
||||||
|
data={
|
||||||
|
"product_id": box_id,
|
||||||
|
"file_ids": [file_data["id"]],
|
||||||
|
"num_cards_actual": 500
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
assert response.status_code == 201
|
||||||
|
assert response.json()["success"] == True
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_delete_open_box():
|
||||||
|
with open(TEST_FILE_PATH, "rb") as test_file:
|
||||||
|
files = {"file": test_file}
|
||||||
|
|
||||||
|
# Make request
|
||||||
|
response = client.post("/api/files", data=DEFAULT_METADATA, files=files)
|
||||||
|
file_id = response.json()["files"][0]["id"]
|
||||||
|
|
||||||
|
# Check response
|
||||||
|
assert response.status_code == 201
|
||||||
|
assert response.json()["success"] == True
|
||||||
|
|
||||||
|
file_data = response.json()["files"][0]
|
||||||
|
assert file_data["source"] == DEFAULT_METADATA["source"]
|
||||||
|
assert file_data["type"] == DEFAULT_METADATA["type"]
|
||||||
|
assert file_data["status"] == "pending"
|
||||||
|
assert file_data["service"] == None
|
||||||
|
assert file_data["filename"] == "manabox_test_file.csv"
|
||||||
|
assert file_data["filesize_kb"] == get_file_size_kb(TEST_FILE_PATH)
|
||||||
|
assert file_data["id"] is not None
|
||||||
|
|
||||||
|
# Execute background tasks if they were added
|
||||||
|
background_tasks = BackgroundTasks()
|
||||||
|
for task in background_tasks.tasks:
|
||||||
|
await task()
|
||||||
|
|
||||||
|
# Create a box first
|
||||||
|
create_response = client.post("/api/boxes",
|
||||||
|
data={
|
||||||
|
"type": "play",
|
||||||
|
"set_code": "INR",
|
||||||
|
"sku": "1423",
|
||||||
|
"num_cards_expected": 504
|
||||||
|
}
|
||||||
|
)
|
||||||
|
box_id = create_response.json()["box"][0]["product_id"]
|
||||||
|
|
||||||
|
# Open the box
|
||||||
|
open_response = client.post(f"/api/boxes/{box_id}/open",
|
||||||
|
data={
|
||||||
|
"product_id": box_id,
|
||||||
|
"file_ids": [file_id],
|
||||||
|
"num_cards_actual": 500
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check if the box is opened
|
||||||
|
assert open_response.status_code == 201
|
||||||
|
assert open_response.json()["success"] == True
|
||||||
|
|
||||||
|
# Get the open box ID
|
||||||
|
open_box_id = open_response.json()["open_box"][0]["id"]
|
||||||
|
|
||||||
|
# Delete the open box
|
||||||
|
response = client.delete(f"/api/boxes/{open_box_id}/open")
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["success"] == True
|
||||||
|
|
||||||
|
|
||||||
|
def test_cleanup():
|
||||||
|
cleanup = True
|
||||||
|
# Delete all boxes created during testing
|
||||||
|
if cleanup:
|
||||||
|
for box_id in test_boxes:
|
||||||
|
client.delete(f"/api/boxes/{box_id}")
|
||||||
|
|
123
app/tests/file_test.py
Normal file
123
app/tests/file_test.py
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
from fastapi.testclient import TestClient
|
||||||
|
from fastapi import BackgroundTasks
|
||||||
|
import pytest
|
||||||
|
from unittest.mock import Mock, patch
|
||||||
|
import asyncio
|
||||||
|
import os
|
||||||
|
from app.main import app
|
||||||
|
from app.services.file import FileService
|
||||||
|
from app.services.task import TaskService
|
||||||
|
|
||||||
|
client = TestClient(app)
|
||||||
|
|
||||||
|
# Constants for reused values
|
||||||
|
TEST_FILE_PATH = os.path.join(os.getcwd(), "tests/test_files", "manabox_test_file.csv")
|
||||||
|
DEFAULT_METADATA = {
|
||||||
|
"source": "manabox",
|
||||||
|
"type": "scan_export_rare"
|
||||||
|
}
|
||||||
|
|
||||||
|
def get_file_size_kb(file_path):
|
||||||
|
"""Helper to consistently calculate file size in KB"""
|
||||||
|
with open(file_path, "rb") as f:
|
||||||
|
return round(len(f.read()) / 1024, 2)
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_create_manabox_file():
|
||||||
|
"""Test creating a new manabox file"""
|
||||||
|
# Open file within the test scope
|
||||||
|
with open(TEST_FILE_PATH, "rb") as test_file:
|
||||||
|
files = {"file": test_file}
|
||||||
|
|
||||||
|
# Make request
|
||||||
|
response = client.post("/api/files", data=DEFAULT_METADATA, files=files)
|
||||||
|
|
||||||
|
# Check response
|
||||||
|
assert response.status_code == 201
|
||||||
|
assert response.json()["success"] == True
|
||||||
|
|
||||||
|
file_data = response.json()["files"][0]
|
||||||
|
assert file_data["source"] == DEFAULT_METADATA["source"]
|
||||||
|
assert file_data["type"] == DEFAULT_METADATA["type"]
|
||||||
|
assert file_data["status"] == "pending"
|
||||||
|
assert file_data["service"] == None
|
||||||
|
assert file_data["filename"] == "manabox_test_file.csv"
|
||||||
|
assert file_data["filesize_kb"] == get_file_size_kb(TEST_FILE_PATH)
|
||||||
|
assert file_data["id"] is not None
|
||||||
|
|
||||||
|
# Execute background tasks if they were added
|
||||||
|
background_tasks = BackgroundTasks()
|
||||||
|
for task in background_tasks.tasks:
|
||||||
|
await task()
|
||||||
|
|
||||||
|
def test_get_file():
|
||||||
|
"""Test retrieving a specific file"""
|
||||||
|
# Create a file first
|
||||||
|
with open(TEST_FILE_PATH, "rb") as test_file:
|
||||||
|
files = {"file": test_file}
|
||||||
|
create_response = client.post("/api/files", data=DEFAULT_METADATA, files=files)
|
||||||
|
file_id = create_response.json()["files"][0]["id"]
|
||||||
|
|
||||||
|
# Get the file
|
||||||
|
response = client.get(f"/api/files/{file_id}")
|
||||||
|
|
||||||
|
# Check response
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["success"] == True
|
||||||
|
|
||||||
|
file_data = response.json()["files"][0]
|
||||||
|
assert file_data["source"] == DEFAULT_METADATA["source"]
|
||||||
|
assert file_data["type"] == DEFAULT_METADATA["type"]
|
||||||
|
assert file_data["status"] == "completed"
|
||||||
|
assert file_data["service"] == None
|
||||||
|
assert file_data["filename"] == "manabox_test_file.csv"
|
||||||
|
assert file_data["filesize_kb"] == get_file_size_kb(TEST_FILE_PATH)
|
||||||
|
assert file_data["id"] == file_id
|
||||||
|
|
||||||
|
def test_delete_file():
|
||||||
|
"""Test file deletion"""
|
||||||
|
# Create a file first
|
||||||
|
with open(TEST_FILE_PATH, "rb") as test_file:
|
||||||
|
files = {"file": test_file}
|
||||||
|
create_response = client.post("/api/files", data=DEFAULT_METADATA, files=files)
|
||||||
|
file_id = create_response.json()["files"][0]["id"]
|
||||||
|
|
||||||
|
# Delete the file
|
||||||
|
response = client.delete(f"/api/files/{file_id}")
|
||||||
|
|
||||||
|
# Check response
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["success"] == True
|
||||||
|
|
||||||
|
file_data = response.json()["files"][0]
|
||||||
|
assert file_data["source"] == DEFAULT_METADATA["source"]
|
||||||
|
assert file_data["type"] == DEFAULT_METADATA["type"]
|
||||||
|
assert file_data["status"] == "deleted"
|
||||||
|
assert file_data["service"] == None
|
||||||
|
assert file_data["filename"] == "manabox_test_file.csv"
|
||||||
|
assert file_data["filesize_kb"] == get_file_size_kb(TEST_FILE_PATH)
|
||||||
|
assert file_data["id"] == file_id
|
||||||
|
|
||||||
|
def test_get_prepared_files():
|
||||||
|
"""Test retrieving files filtered by status"""
|
||||||
|
# Create a test file first
|
||||||
|
with open(TEST_FILE_PATH, "rb") as test_file:
|
||||||
|
files = {"file": test_file}
|
||||||
|
create_response = client.post("/api/files", data=DEFAULT_METADATA, files=files)
|
||||||
|
file_id = create_response.json()["files"][0]["id"]
|
||||||
|
|
||||||
|
# Get prepared files
|
||||||
|
response = client.get("/api/files?status=completed")
|
||||||
|
|
||||||
|
# Check response
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["success"] == True
|
||||||
|
|
||||||
|
# get file from id
|
||||||
|
file_data = [file for file in response.json()["files"] if file["id"] == file_id][0]
|
||||||
|
assert file_data["source"] == DEFAULT_METADATA["source"]
|
||||||
|
assert file_data["type"] == DEFAULT_METADATA["type"]
|
||||||
|
assert file_data["status"] == "completed"
|
||||||
|
assert file_data["service"] == None
|
||||||
|
assert file_data["filename"] == "manabox_test_file.csv"
|
||||||
|
assert file_data["filesize_kb"] == get_file_size_kb(TEST_FILE_PATH)
|
504
app/tests/test_files/manabox_test_file.csv
Normal file
504
app/tests/test_files/manabox_test_file.csv
Normal file
@ -0,0 +1,504 @@
|
|||||||
|
Name,Set code,Set name,Collector number,Foil,Rarity,Quantity,ManaBox ID,Scryfall ID,Purchase price,Misprint,Altered,Condition,Language,Purchase price currency
|
||||||
|
"Tinybones, Bauble Burglar",FDN,Foundations,72,normal,rare,1,101414,ff3d85bc-ef2d-4251-baf4-a14bd0cee61e,0.66,false,false,near_mint,en,USD
|
||||||
|
Scrawling Crawler,FDN,Foundations,132,normal,rare,1,100912,a1176dcf-40ee-4342-aa74-791b8352e99a,4.81,false,false,near_mint,en,USD
|
||||||
|
"Giada, Font of Hope",FDN,Foundations,141,normal,rare,1,100804,8ae6fc26-cfad-4da8-98d9-49c27c24d293,1.33,false,false,near_mint,en,USD
|
||||||
|
Blasphemous Edict,FDN,Foundations,57,normal,rare,1,100168,11040ecd-3153-4029-b42b-1441bc51ec34,6.9,false,false,near_mint,en,USD
|
||||||
|
"Drakuseth, Maw of Flames",FDN,Foundations,193,normal,rare,1,100092,029b1edb-e1de-4f1c-81df-8d17f4920318,0.33,false,false,near_mint,en,USD
|
||||||
|
"Koma, World-Eater",FDN,Foundations,347,normal,rare,1,100792,8889e1ca-eec1-408b-b11e-98cc0a357a97,4.69,false,false,near_mint,en,USD
|
||||||
|
"Ghalta, Primal Hunger",FDN,Foundations,222,normal,rare,1,100635,6a9c39e4-a8cf-42dd-8d0e-45634b335546,0.54,false,false,near_mint,en,USD
|
||||||
|
Sire of Seven Deaths,FDN,Foundations,1,normal,mythic,1,100812,8d8432a7-1c8a-4cfb-947c-ecf9791063eb,18.63,false,false,near_mint,en,USD
|
||||||
|
Hero's Downfall,FDN,Foundations,319,normal,uncommon,1,101639,10cedc6d-075a-4f9b-a858-e2c29809ee33,0.39,false,false,near_mint,en,USD
|
||||||
|
"Etali, Primal Storm",FDN,Foundations,194,normal,rare,1,101037,b6af9894-95b5-4c8e-902f-a9ba70f02e4a,0.32,false,false,near_mint,en,USD
|
||||||
|
High Fae Trickster,FDN,Foundations,307,normal,rare,1,100918,a21180a4-208f-4c13-a704-58403ddaf12f,3.39,false,false,near_mint,en,USD
|
||||||
|
Mocking Sprite,FDN,Foundations,159,foil,common,1,101624,f6792f63-b651-497d-8aa5-cddf4cedeca8,0.09,false,false,near_mint,en,USD
|
||||||
|
Bake into a Pie,FDN,Foundations,169,foil,common,1,101494,2ab0e660-86a3-4b92-82fa-77dcb5db947d,0.06,false,false,near_mint,en,USD
|
||||||
|
Boltwave,FDN,Foundations,79,foil,uncommon,1,100810,8d1ec351-5e70-4eb2-b590-6bff94ef8178,4.27,false,false,near_mint,en,USD
|
||||||
|
Jungle Hollow,FDN,Foundations,263,foil,common,1,101224,dc758e14-d370-45e4-bbc5-938fb4d21127,0.08,false,false,near_mint,en,USD
|
||||||
|
Ambush Wolf,FDN,Foundations,98,foil,common,1,101492,2903832c-318e-42ab-bf58-c682ec2f7afd,0.03,false,false,near_mint,en,USD
|
||||||
|
An Offer You Can't Refuse,FDN,Foundations,160,foil,uncommon,1,100948,a829747f-cf9b-4d81-ba66-9f0630ed4565,1.51,false,false,near_mint,en,USD
|
||||||
|
Sower of Chaos,FDN,Foundations,95,foil,common,1,101556,7ff50606-491c-4946-8d03-719b01cfad77,0.02,false,false,near_mint,en,USD
|
||||||
|
Guarded Heir,FDN,Foundations,14,foil,uncommon,1,100505,525ba5c7-3ce5-4e52-b8b5-96c9040a6738,0.06,false,false,near_mint,en,USD
|
||||||
|
Wind-Scarred Crag,FDN,Foundations,271,foil,common,1,100684,759e99df-11a8-4aee-b6bc-344e84e10d94,0.08,false,false,near_mint,en,USD
|
||||||
|
Think Twice,FDN,Foundations,165,foil,common,1,101202,d88faaa1-eb41-40f7-991c-5c06e1138f3d,0.03,false,false,near_mint,en,USD
|
||||||
|
Grow from the Ashes,FDN,Foundations,225,foil,common,1,101502,42525f8a-aee7-4811-8f05-471b559c2c4a,0.07,false,false,near_mint,en,USD
|
||||||
|
Spitfire Lagac,FDN,Foundations,208,foil,common,1,101496,30f600cd-b696-4f49-9cbc-5a33aa43d04c,0.05,false,false,near_mint,en,USD
|
||||||
|
Abyssal Harvester,FDN,Foundations,54,foil,rare,1,101342,f2e0f538-5825-47e9-883c-3ec6fd5b25ea,3.18,false,false,near_mint,en,USD
|
||||||
|
Sanguine Syphoner,FDN,Foundations,68,foil,common,1,101582,b1daf5bb-c8e9-4e79-a532-ca92a9a885cd,0.19,false,false,near_mint,en,USD
|
||||||
|
Goldvein Pick,FDN,Foundations,253,foil,common,1,101572,a241317d-2277-467e-a8f9-aa71c944e244,0.06,false,false,near_mint,en,USD
|
||||||
|
Goblin Negotiation,FDN,Foundations,88,foil,uncommon,1,101335,f2016585-e26c-4d13-b09f-af6383c192f7,0.14,false,false,near_mint,en,USD
|
||||||
|
Banishing Light,FDN,Foundations,138,foil,common,1,101613,e38dc3b3-1629-491b-8afd-0e7a9a857713,0.05,false,false,near_mint,en,USD
|
||||||
|
Dauntless Veteran,FDN,Foundations,8,foil,uncommon,1,100704,7a136f26-ac66-407f-b389-357222d2c4a2,0.06,false,false,near_mint,en,USD
|
||||||
|
Run Away Together,FDN,Foundations,162,foil,common,1,101614,e598eb7b-10dc-49e6-ac60-2fefa987173e,0.02,false,false,near_mint,en,USD
|
||||||
|
"Tatyova, Benthic Druid",FDN,Foundations,247,foil,uncommon,1,101301,eabc978a-0666-472d-bdc6-d4b29d29eca4,0.14,false,false,near_mint,en,USD
|
||||||
|
"Balmor, Battlemage Captain",FDN,Foundations,237,foil,uncommon,1,100142,0b45ab13-9bb6-48af-8b37-d97b25801ac8,0.13,false,false,near_mint,en,USD
|
||||||
|
Involuntary Employment,FDN,Foundations,203,foil,common,1,101622,f3ad3d62-2f24-4562-b3fa-809213dbc4a4,0.03,false,false,near_mint,en,USD
|
||||||
|
"Dwynen, Gilt-Leaf Daen",FDN,Foundations,217,foil,uncommon,1,100086,01c00d7b-7fac-4f8c-a1ea-de2cf4d06627,0.23,false,false,near_mint,en,USD
|
||||||
|
Swiftfoot Boots,FDN,Foundations,258,foil,uncommon,1,100414,41040541-b129-4cf4-9411-09b1d9d32c19,2.03,false,false,near_mint,en,USD
|
||||||
|
Soul-Shackled Zombie,FDN,Foundations,70,foil,common,1,101609,deea5690-6eb2-4353-b917-cbbf840e4e71,0.05,false,false,near_mint,en,USD
|
||||||
|
Fake Your Own Death,FDN,Foundations,174,foil,common,1,101539,693635a6-df50-44c5-9598-0c79b45d4df4,0.09,false,false,near_mint,en,USD
|
||||||
|
Gnarlid Colony,FDN,Foundations,224,foil,common,1,101508,47565d10-96bf-4fb0-820f-f20a44a76b6f,0.05,false,false,near_mint,en,USD
|
||||||
|
Apothecary Stomper,FDN,Foundations,99,foil,common,1,101537,680b7b0c-0e1b-46ce-9917-9fc6e05aa148,0.02,false,false,near_mint,en,USD
|
||||||
|
Rugged Highlands,FDN,Foundations,265,foil,common,1,101400,fd6eaf8e-8881-4d7b-bafc-75e4ca5cbef6,0.05,false,false,near_mint,en,USD
|
||||||
|
Firebrand Archer,FDN,Foundations,196,foil,common,1,101630,fe0312f1-4c98-4b7f-8a34-0059ea80edef,0.13,false,false,near_mint,en,USD
|
||||||
|
Scoured Barrens,FDN,Foundations,266,foil,common,1,100277,2632a4b2-9ca6-4b67-9a99-14f52ad3dc41,0.12,false,false,near_mint,en,USD
|
||||||
|
Courageous Goblin,FDN,Foundations,82,foil,common,1,101566,8db6819c-666a-409d-85a5-b9ac34d8dd2f,0.02,false,false,near_mint,en,USD
|
||||||
|
Jungle Hollow,FDN,Foundations,263,normal,common,1,101224,dc758e14-d370-45e4-bbc5-938fb4d21127,0.07,false,false,near_mint,en,USD
|
||||||
|
Wind-Scarred Crag,FDN,Foundations,271,normal,common,1,100684,759e99df-11a8-4aee-b6bc-344e84e10d94,0.04,false,false,near_mint,en,USD
|
||||||
|
Dismal Backwater,FDN,Foundations,261,normal,common,1,101220,dbb0df36-8467-4a41-8e1c-6c3584d4fd10,0.06,false,false,near_mint,en,USD
|
||||||
|
Bloodfell Caves,FDN,Foundations,259,normal,common,1,100806,8b90dc92-cb66-41d9-89f9-2b6e3cfc8082,0.05,false,false,near_mint,en,USD
|
||||||
|
Rugged Highlands,FDN,Foundations,265,normal,common,1,101400,fd6eaf8e-8881-4d7b-bafc-75e4ca5cbef6,0.05,false,false,near_mint,en,USD
|
||||||
|
Scavenging Ooze,FDN,Foundations,232,normal,rare,1,100808,8c504c23-1e9a-411b-9cfe-4180d0c744f6,0.15,false,false,near_mint,en,USD
|
||||||
|
"Kiora, the Rising Tide",FDN,Foundations,45,normal,rare,1,100762,83f20a32-9f5d-4a68-8995-549e57554da2,1.57,false,false,near_mint,en,USD
|
||||||
|
Curator of Destinies,FDN,Foundations,34,normal,rare,1,100908,9ff79da7-c3f7-4541-87a0-503544c699b5,0.12,false,false,near_mint,en,USD
|
||||||
|
"Loot, Exuberant Explorer",FDN,Foundations,106,normal,rare,1,100131,09980ce6-425b-4e03-94d0-0f02043cb361,4.8,false,false,near_mint,en,USD
|
||||||
|
Micromancer,FDN,Foundations,158,normal,uncommon,1,101274,e6af54ea-b57a-4e50-8e46-1747cca14430,0.07,false,false,near_mint,en,USD
|
||||||
|
"Ruby, Daring Tracker",FDN,Foundations,245,normal,uncommon,1,101405,fe3e7dd2-b66d-4218-9fde-f84bec26b7bf,0.05,false,false,near_mint,en,USD
|
||||||
|
Mild-Mannered Librarian,FDN,Foundations,228,normal,uncommon,1,100515,5389663a-fe25-41b9-8c92-1f4d7721ffc2,0.03,false,false,near_mint,en,USD
|
||||||
|
Guarded Heir,FDN,Foundations,14,normal,uncommon,1,100505,525ba5c7-3ce5-4e52-b8b5-96c9040a6738,0.05,false,false,near_mint,en,USD
|
||||||
|
Garruk's Uprising,FDN,Foundations,220,normal,uncommon,1,100447,4805c303-e73b-443b-a09f-49d2c2c88bb5,0.25,false,false,near_mint,en,USD
|
||||||
|
Vampire Nighthawk,FDN,Foundations,186,normal,uncommon,1,101474,0a1934ab-3171-4fc6-8033-ad998899ba73,0.12,false,false,near_mint,en,USD
|
||||||
|
Soulstone Sanctuary,FDN,Foundations,133,normal,rare,1,100596,642553a7-6d0f-483d-a873-3a703786db42,1.9,false,false,near_mint,en,USD
|
||||||
|
"Balmor, Battlemage Captain",FDN,Foundations,237,normal,uncommon,1,100142,0b45ab13-9bb6-48af-8b37-d97b25801ac8,0.07,false,false,near_mint,en,USD
|
||||||
|
Adventuring Gear,FDN,Foundations,249,normal,uncommon,1,100358,361f9b99-5b5d-40da-b4b9-5ad90f6280ee,0.06,false,false,near_mint,en,USD
|
||||||
|
Grappling Kraken,FDN,Foundations,39,normal,uncommon,1,101165,d1f5cab3-3fc0-448d-8252-cd55abf5b596,0.12,false,false,near_mint,en,USD
|
||||||
|
Quakestrider Ceratops,FDN,Foundations,110,normal,uncommon,1,100120,067f72c2-ead6-4879-bc9d-696c9f87c0b2,0.11,false,false,near_mint,en,USD
|
||||||
|
Genesis Wave,FDN,Foundations,221,normal,rare,1,101177,d46f7ddb-f986-4f1f-b096-ae1a02d0bdc8,0.29,false,false,near_mint,en,USD
|
||||||
|
"Lathril, Blade of the Elves",FDN,Foundations,242,normal,rare,1,100811,8d4e5480-a287-4a25-b855-a26dae555b1c,0.25,false,false,near_mint,en,USD
|
||||||
|
Elvish Archdruid,FDN,Foundations,219,normal,rare,1,100341,341da856-7414-403b-b2e3-4bebd58a5aa4,0.4,false,false,near_mint,en,USD
|
||||||
|
Imprisoned in the Moon,FDN,Foundations,156,normal,uncommon,1,101313,ee28e147-6622-4399-a314-c14a5c912dd0,0.18,false,false,near_mint,en,USD
|
||||||
|
Inspiring Call,FDN,Foundations,226,normal,uncommon,1,100400,3e241642-5172-4437-b694-f6aa159d5cd9,0.15,false,false,near_mint,en,USD
|
||||||
|
Essence Scatter,FDN,Foundations,153,normal,uncommon,1,101226,dd05c850-f91e-4ffb-b4cc-8418d49dad90,0.04,false,false,near_mint,en,USD
|
||||||
|
Exemplar of Light,FDN,Foundations,11,normal,rare,1,100832,920c8fc5-fdd2-446a-a676-5c363f96928f,2.82,false,false,near_mint,en,USD
|
||||||
|
Meteor Golem,FDN,Foundations,256,normal,uncommon,1,101167,d291ea1e-36bc-46b3-b3ae-084fa0ba69eb,0.05,false,false,near_mint,en,USD
|
||||||
|
Swiftfoot Boots,FDN,Foundations,258,normal,uncommon,1,100414,41040541-b129-4cf4-9411-09b1d9d32c19,1.19,false,false,near_mint,en,USD
|
||||||
|
Brazen Scourge,FDN,Foundations,191,normal,uncommon,1,101616,eb84b86c-3276-4fc1-a09d-47de388cb729,0.02,false,false,near_mint,en,USD
|
||||||
|
Sylvan Scavenging,FDN,Foundations,113,normal,rare,1,101100,c35b683c-d3b2-46a1-876a-81b34e8ba2fc,0.25,false,false,near_mint,en,USD
|
||||||
|
Claws Out,FDN,Foundations,6,normal,uncommon,1,100429,4396049c-b976-4b7f-8ecd-564e24ebd631,0.1,false,false,near_mint,en,USD
|
||||||
|
Snakeskin Veil,FDN,Foundations,233,normal,uncommon,1,100645,6cc4c21d-9bdc-4490-9203-17f51db0ddd1,0.08,false,false,near_mint,en,USD
|
||||||
|
Skyship Buccaneer,FDN,Foundations,50,normal,uncommon,1,100587,62958fc3-55dc-4b97-a070-490d6ed27820,0.02,false,false,near_mint,en,USD
|
||||||
|
Arcane Epiphany,FDN,Foundations,29,normal,uncommon,1,100116,06431793-5dfe-4cbf-990b-4bcc960d1f31,0.03,false,false,near_mint,en,USD
|
||||||
|
Brass's Bounty,FDN,Foundations,190,normal,rare,1,100610,65fe7127-b0ec-400f-97f1-6e17ab8e319d,0.14,false,false,near_mint,en,USD
|
||||||
|
Fiendish Panda,FDN,Foundations,120,normal,uncommon,1,100483,4e434d74-cad0-45f5-bc8d-f34aa5e1d879,0.09,false,false,near_mint,en,USD
|
||||||
|
Frenzied Goblin,FDN,Foundations,199,normal,uncommon,1,101602,d5592573-2889-40b1-b1d5-c2802482549a,0.03,false,false,near_mint,en,USD
|
||||||
|
Lunar Insight,FDN,Foundations,46,normal,rare,1,100958,a9a159f6-fecf-4bdd-b2f8-a9665a5cc32d,0.25,false,false,near_mint,en,USD
|
||||||
|
Twinblade Blessing,FDN,Foundations,26,normal,uncommon,1,101310,ecf01cbe-9fcb-4f35-bc6b-2280620b06ff,0.1,false,false,near_mint,en,USD
|
||||||
|
"Tatyova, Benthic Druid",FDN,Foundations,247,normal,uncommon,1,101301,eabc978a-0666-472d-bdc6-d4b29d29eca4,0.06,false,false,near_mint,en,USD
|
||||||
|
Dragon Trainer,FDN,Foundations,84,normal,uncommon,1,100830,91bd75a1-cb54-4e38-9ce1-e8f32a73c6eb,0.04,false,false,near_mint,en,USD
|
||||||
|
Raise the Past,FDN,Foundations,22,normal,rare,1,100641,6c6be129-56da-4fe7-a6bd-6a1d402c09e1,2.27,false,false,near_mint,en,USD
|
||||||
|
Divine Resilience,FDN,Foundations,10,normal,uncommon,1,101347,f3a08245-a535-4d24-b8c0-78759bb9c4b0,0.11,false,false,near_mint,en,USD
|
||||||
|
Bulk Up,FDN,Foundations,80,normal,uncommon,1,100857,977dcc50-da10-4281-b522-9240c1204f5d,0.2,false,false,near_mint,en,USD
|
||||||
|
Diregraf Ghoul,FDN,Foundations,171,normal,uncommon,1,100439,4682012c-d7e0-4257-b538-3de497507464,0.03,false,false,near_mint,en,USD
|
||||||
|
Drake Hatcher,FDN,Foundations,35,normal,rare,1,101071,bcaf4196-6bf3-47fa-b5c7-0e77f45cf820,0.12,false,false,near_mint,en,USD
|
||||||
|
Youthful Valkyrie,FDN,Foundations,149,normal,uncommon,1,100894,9d795f79-c3a5-4ea1-a5cf-1ce73d6837b6,0.14,false,false,near_mint,en,USD
|
||||||
|
Seeker's Folly,FDN,Foundations,69,normal,uncommon,1,101067,bc359da6-8b7f-45ec-b530-ce159fc35953,0.06,false,false,near_mint,en,USD
|
||||||
|
Heroic Reinforcements,FDN,Foundations,241,normal,uncommon,1,100631,6a05e8d5-c2ad-489a-888d-22622886b620,0.04,false,false,near_mint,en,USD
|
||||||
|
Inspiration from Beyond,FDN,Foundations,43,normal,uncommon,1,101033,b636fe95-664f-4fb1-aab9-28856edeccd6,0.04,false,false,near_mint,en,USD
|
||||||
|
"Dwynen, Gilt-Leaf Daen",FDN,Foundations,217,normal,uncommon,1,100086,01c00d7b-7fac-4f8c-a1ea-de2cf4d06627,0.14,false,false,near_mint,en,USD
|
||||||
|
Twinflame Tyrant,FDN,Foundations,97,normal,mythic,1,100228,1eb34f51-0bd2-43c3-af95-2ce8dabcc7bb,17.77,false,false,near_mint,en,USD
|
||||||
|
Sun-Blessed Healer,FDN,Foundations,25,normal,uncommon,1,100332,323d029e-9a88-4188-b3a4-38ef32cffc9f,0.09,false,false,near_mint,en,USD
|
||||||
|
Seismic Rupture,FDN,Foundations,205,normal,uncommon,1,100268,2519a51a-26a0-4884-9ba8-9db135c9ee49,0.02,false,false,near_mint,en,USD
|
||||||
|
Slumbering Cerberus,FDN,Foundations,94,normal,uncommon,1,100892,9d06faa8-201d-45db-b398-ad56f7b01848,0.03,false,false,near_mint,en,USD
|
||||||
|
Tragic Banshee,FDN,Foundations,73,normal,uncommon,1,100324,30df3e33-2f17-4067-99f1-5db6b0f41fd4,0.03,false,false,near_mint,en,USD
|
||||||
|
Stromkirk Bloodthief,FDN,Foundations,185,normal,uncommon,1,97176,485d6a5a-2054-47d5-91b8-71ce308ed4dc,0.04,false,false,near_mint,en,USD
|
||||||
|
Blanchwood Armor,FDN,Foundations,213,normal,uncommon,1,100237,1fd7ec1a-dafa-42ca-bc25-f6848fb03f60,0.07,false,false,near_mint,en,USD
|
||||||
|
Spectral Sailor,FDN,Foundations,164,normal,uncommon,1,100100,03a49535-c5f3-4a6f-b333-7ac7bffdc9ae,0.06,false,false,near_mint,en,USD
|
||||||
|
Extravagant Replication,FDN,Foundations,154,normal,rare,1,100634,6a41dfae-bc7e-4105-8f7e-fd0109197ad8,0.43,false,false,near_mint,en,USD
|
||||||
|
Electroduplicate,FDN,Foundations,85,normal,rare,1,100976,abb06b1c-5d4e-49b9-9c4a-e60ab656a257,0.3,false,false,near_mint,en,USD
|
||||||
|
Angel of Finality,FDN,Foundations,136,normal,uncommon,1,101057,baaabd52-3aa9-4e2f-9369-d4db8b405ba8,0.07,false,false,near_mint,en,USD
|
||||||
|
Battlesong Berserker,FDN,Foundations,78,normal,uncommon,1,100917,a1f8b199-5d62-485f-b1c3-b30aa550595b,0.03,false,false,near_mint,en,USD
|
||||||
|
Swiftblade Vindicator,FDN,Foundations,246,normal,rare,1,101372,f94618ec-000c-4371-b925-05ff82bfe221,0.12,false,false,near_mint,en,USD
|
||||||
|
Dauntless Veteran,FDN,Foundations,8,normal,uncommon,1,100704,7a136f26-ac66-407f-b389-357222d2c4a2,0.05,false,false,near_mint,en,USD
|
||||||
|
Hero's Downfall,FDN,Foundations,175,normal,uncommon,1,97185,ad2c01d9-8f54-46c0-9dc9-d4d4764ce1c9,0.1,false,false,near_mint,en,USD
|
||||||
|
Resolute Reinforcements,FDN,Foundations,145,normal,uncommon,1,100841,940f3989-77cc-49a9-92e0-095a75d80f0f,0.09,false,false,near_mint,en,USD
|
||||||
|
Zombify,FDN,Foundations,187,normal,uncommon,1,101225,dc798e6f-13c4-457c-b052-b7b65bc83cfe,0.09,false,false,near_mint,en,USD
|
||||||
|
Fiery Annihilation,FDN,Foundations,86,normal,uncommon,1,100523,54fe00aa-d284-48f9-b5a2-1bd4c5fa8e58,0.07,false,false,near_mint,en,USD
|
||||||
|
Clinquant Skymage,FDN,Foundations,33,normal,uncommon,1,100357,36012810-0e83-4640-8ba7-7262229f1b84,0.05,false,false,near_mint,en,USD
|
||||||
|
Consuming Aberration,FDN,Foundations,238,normal,rare,1,101066,bc2b28fd-66b0-457c-80ea-7caed2cc7926,0.16,false,false,near_mint,en,USD
|
||||||
|
Fishing Pole,FDN,Foundations,128,normal,uncommon,1,101128,c95ab836-3277-4223-9aaa-ef2c77256b65,0.07,false,false,near_mint,en,USD
|
||||||
|
Felling Blow,FDN,Foundations,105,normal,uncommon,1,100854,96948ae3-b15d-4d6d-aa73-9f52084cd903,0.05,false,false,near_mint,en,USD
|
||||||
|
Abrade,FDN,Foundations,188,normal,uncommon,1,100522,548947dc-a5ca-43b5-9531-bcef20fa4ae5,0.09,false,false,near_mint,en,USD
|
||||||
|
Spinner of Souls,FDN,Foundations,112,normal,rare,1,101358,f50a8dec-b079-4192-9098-6cdc1026c693,0.66,false,false,near_mint,en,USD
|
||||||
|
Vampire Gourmand,FDN,Foundations,74,normal,uncommon,1,100827,917514c0-9cd5-4b97-85b9-c4f753560ad4,0.09,false,false,near_mint,en,USD
|
||||||
|
Needletooth Pack,FDN,Foundations,108,normal,uncommon,1,100868,993c1679-e02b-44f2-b34e-12fd6b5142e9,0.05,false,false,near_mint,en,USD
|
||||||
|
Burnished Hart,FDN,Foundations,250,normal,uncommon,1,100609,65ebbff0-fbe6-4310-a33f-e00bb2534979,0.06,false,false,near_mint,en,USD
|
||||||
|
Arbiter of Woe,FDN,Foundations,55,normal,uncommon,1,101008,b2496c4a-df03-4583-bd76-f98ed5cb61ee,0.06,false,false,near_mint,en,USD
|
||||||
|
Good-Fortune Unicorn,FDN,Foundations,240,normal,uncommon,1,101300,eabbe163-2b15-42e3-89ce-7363e6250d3a,0.1,false,false,near_mint,en,USD
|
||||||
|
Reassembling Skeleton,FDN,Foundations,182,normal,uncommon,1,100291,28e84b1b-1c05-4e1b-93b8-9cc2ca73509d,0.08,false,false,near_mint,en,USD
|
||||||
|
Reclamation Sage,FDN,Foundations,231,normal,uncommon,1,100197,1918ea65-ab7f-4d40-97fd-a656c892a2a1,0.14,false,false,near_mint,en,USD
|
||||||
|
Leyline Axe,FDN,Foundations,129,normal,rare,1,101052,b9c03336-a321-4c06-94d1-809f328fabd8,3.17,false,false,near_mint,en,USD
|
||||||
|
An Offer You Can't Refuse,FDN,Foundations,160,normal,uncommon,1,100948,a829747f-cf9b-4d81-ba66-9f0630ed4565,0.99,false,false,near_mint,en,USD
|
||||||
|
Goblin Negotiation,FDN,Foundations,88,normal,uncommon,1,101335,f2016585-e26c-4d13-b09f-af6383c192f7,0.09,false,false,near_mint,en,USD
|
||||||
|
Empyrean Eagle,FDN,Foundations,239,normal,uncommon,1,100533,577e99a7-4a55-4314-8f08-2ae0c33b85c7,0.08,false,false,near_mint,en,USD
|
||||||
|
Solemn Simulacrum,FDN,Foundations,257,normal,rare,1,100514,5383f45e-3da2-40fb-beee-801448bbb60f,0.3,false,false,near_mint,en,USD
|
||||||
|
Crystal Barricade,FDN,Foundations,7,normal,rare,1,100822,905d3e02-ea06-45e7-9adb-c8e7583323a2,1.24,false,false,near_mint,en,USD
|
||||||
|
Hidetsugu's Second Rite,FDN,Foundations,202,normal,uncommon,1,100577,609421da-8d89-4365-b18b-778832d91482,0.04,false,false,near_mint,en,USD
|
||||||
|
Affectionate Indrik,FDN,Foundations,211,normal,uncommon,1,100310,2da8347d-06a4-46e0-a55e-cc2da4660263,0.02,false,false,near_mint,en,USD
|
||||||
|
Infernal Vessel,FDN,Foundations,63,normal,uncommon,1,101560,877b6330-2d0b-4f2f-a848-f10b06fb4ef5,0.06,false,false,near_mint,en,USD
|
||||||
|
"Zimone, Paradox Sculptor",FDN,Foundations,126,normal,mythic,1,100241,20ccbfdd-ddae-440c-9bc0-38b15a56fdd1,2.13,false,false,near_mint,en,USD
|
||||||
|
High-Society Hunter,FDN,Foundations,61,normal,rare,1,100501,51da4a4b-ea12-4169-a7cf-eb4427f13e84,0.64,false,false,near_mint,en,USD
|
||||||
|
Heraldic Banner,FDN,Foundations,254,normal,uncommon,1,100678,743ea709-dbb3-4db8-a2ce-544f47eb6339,0.24,false,false,near_mint,en,USD
|
||||||
|
Wardens of the Cycle,FDN,Foundations,125,normal,uncommon,1,100761,83ea9b2c-5723-4eff-88ac-6669975939e3,0.07,false,false,near_mint,en,USD
|
||||||
|
Preposterous Proportions,FDN,Foundations,109,normal,rare,1,100983,acb65189-60e4-42e0-9fb1-da6b716b91d7,0.94,false,false,near_mint,en,USD
|
||||||
|
Savannah Lions,FDN,Foundations,146,normal,uncommon,1,97184,9c9ac1bc-cdf3-4fa6-8319-a7ea164e9e47,0.04,false,false,near_mint,en,USD
|
||||||
|
Secluded Courtyard,FDN,Foundations,267,normal,uncommon,1,101161,d13373d2-139b-48c7-a8c9-828cefc4f150,0.12,false,false,near_mint,en,USD
|
||||||
|
Ajani's Pridemate,FDN,Foundations,135,normal,uncommon,1,100255,222c1a68-e34c-4103-b1be-17d4ceaef6ce,0.06,false,false,near_mint,en,USD
|
||||||
|
"Arahbo, the First Fang",FDN,Foundations,2,normal,rare,1,100503,524a5d93-26ed-436d-a437-dc9460acce98,1.0,false,false,near_mint,en,USD
|
||||||
|
Authority of the Consuls,FDN,Foundations,137,normal,rare,1,100425,42ce2d7f-5924-47c0-b5ed-dacf9f9617a0,5.3,false,false,near_mint,en,USD
|
||||||
|
Nine-Lives Familiar,FDN,Foundations,321,normal,rare,1,100060,6cc1623f-370d-42b5-88a2-039f31e9be0b,2.67,false,false,near_mint,en,USD
|
||||||
|
Ajani's Pridemate,FDN,Foundations,293,foil,uncommon,1,101180,d4cfb9bc-4273-4e5f-a7ac-2006a8345a4e,0.38,false,false,near_mint,en,USD
|
||||||
|
Helpful Hunter,FDN,Foundations,16,foil,common,1,97172,1b9a0e91-80b5-428f-8f08-931d0631be14,1.61,false,false,near_mint,en,USD
|
||||||
|
Felidar Savior,FDN,Foundations,12,foil,common,1,97191,cd092b14-d72f-4de0-8f19-1338661b9e3b,0.05,false,false,near_mint,en,USD
|
||||||
|
Thrill of Possibility,FDN,Foundations,210,normal,common,3,101561,882b348c-076b-41d8-b505-063480636669,0.03,false,false,near_mint,en,USD
|
||||||
|
Lightshell Duo,FDN,Foundations,157,normal,common,7,101063,bb75315c-ea8f-4eb0-899e-c73ef75fc396,0.04,false,false,near_mint,en,USD
|
||||||
|
Mischievous Pup,FDN,Foundations,144,normal,uncommon,2,100670,7214d984-6400-44d7-bde6-57d96b606e78,0.04,false,false,near_mint,en,USD
|
||||||
|
Swiftwater Cliffs,FDN,Foundations,268,normal,common,3,101389,fb88667d-7088-4889-960f-317486ebe856,0.03,false,false,near_mint,en,USD
|
||||||
|
Hare Apparent,FDN,Foundations,15,normal,common,3,100907,9fc6f0e9-eb5f-4bc0-b3d7-756644b66d12,3.62,false,false,near_mint,en,USD
|
||||||
|
Dazzling Angel,FDN,Foundations,9,normal,common,3,101468,027dc444-e544-4693-8653-3dcdda530162,0.1,false,false,near_mint,en,USD
|
||||||
|
Bigfin Bouncer,FDN,Foundations,31,normal,common,3,100882,9b1d5b76-b07e-45c6-800d-4cfce085164f,0.02,false,false,near_mint,en,USD
|
||||||
|
Ambush Wolf,FDN,Foundations,98,normal,common,4,101492,2903832c-318e-42ab-bf58-c682ec2f7afd,0.05,false,false,near_mint,en,USD
|
||||||
|
Healer's Hawk,FDN,Foundations,142,normal,common,3,101595,cc8e4563-04bb-46b5-835e-64ba11c0e972,0.09,false,false,near_mint,en,USD
|
||||||
|
Rune-Sealed Wall,FDN,Foundations,49,normal,uncommon,2,101212,da0f147b-95ed-4f32-9b46-6a633ae31976,0.15,false,false,near_mint,en,USD
|
||||||
|
Pilfer,FDN,Foundations,181,normal,common,4,101564,8c7c88b5-6d09-453b-b9c1-7dcbba8f1080,0.03,false,false,near_mint,en,USD
|
||||||
|
Stab,FDN,Foundations,71,normal,common,3,101538,6859a5ba-1c1c-4631-bba8-f9900b827178,0.04,false,false,near_mint,en,USD
|
||||||
|
Heartfire Immolator,FDN,Foundations,201,normal,uncommon,2,100390,3ca38f4d-01f5-4a02-9000-01261a440dbf,0.03,false,false,near_mint,en,USD
|
||||||
|
Marauding Blight-Priest,FDN,Foundations,178,normal,common,3,101528,5f70dafc-c638-4ec0-ab5b-62998f752720,0.12,false,false,near_mint,en,USD
|
||||||
|
Broken Wings,FDN,Foundations,214,normal,common,3,100584,61f9cbeb-cc9c-4562-be65-8a77053faefe,0.02,false,false,near_mint,en,USD
|
||||||
|
Firespitter Whelp,FDN,Foundations,197,normal,uncommon,2,100463,4b3a4c7d-3126-4bde-9dca-cb6a1e2f37c9,0.15,false,false,near_mint,en,USD
|
||||||
|
Make Your Move,FDN,Foundations,143,normal,common,3,101546,7368f861-3288-4645-90a7-ca35d6da3721,0.03,false,false,near_mint,en,USD
|
||||||
|
Treetop Snarespinner,FDN,Foundations,114,normal,common,4,101562,88e68fa3-159d-49a6-8ac6-afc9bd6f1718,0.06,false,false,near_mint,en,USD
|
||||||
|
Vengeful Bloodwitch,FDN,Foundations,76,normal,uncommon,2,97189,bd0c12dd-f138-45c0-9614-d83a1d8e8399,0.17,false,false,near_mint,en,USD
|
||||||
|
Evolving Wilds,FDN,Foundations,262,normal,common,4,100376,3a0b9356-5b91-4542-8802-f0f7275238e1,0.06,false,false,near_mint,en,USD
|
||||||
|
Bite Down,FDN,Foundations,212,normal,common,3,101625,f8d70b3b-f6f9-4b3c-ad70-0ce369e812b5,0.04,false,false,near_mint,en,USD
|
||||||
|
Elfsworn Giant,FDN,Foundations,103,normal,common,3,100497,5128a5be-ffa6-4998-8488-872d80b24cb2,0.06,false,false,near_mint,en,USD
|
||||||
|
Apothecary Stomper,FDN,Foundations,99,normal,common,3,101537,680b7b0c-0e1b-46ce-9917-9fc6e05aa148,0.05,false,false,near_mint,en,USD
|
||||||
|
Axgard Cavalry,FDN,Foundations,189,normal,common,3,101631,fe3cc41a-adae-4c9b-b4d3-03f3ca862fed,0.03,false,false,near_mint,en,USD
|
||||||
|
Wary Thespian,FDN,Foundations,235,normal,common,3,101574,a3d62d04-0974-4cb5-9a35-5e996c6456e2,0.01,false,false,near_mint,en,USD
|
||||||
|
Fleeting Flight,FDN,Foundations,13,normal,common,3,101513,55139100-9342-41fd-b10a-8e9932e605d4,0.04,false,false,near_mint,en,USD
|
||||||
|
Quick-Draw Katana,FDN,Foundations,130,normal,common,3,101540,69beec98-c89c-4673-953c-8b3ef3d81560,0.07,false,false,near_mint,en,USD
|
||||||
|
Goblin Surprise,FDN,Foundations,200,normal,common,3,101512,527dd5d4-5f72-40bb-8a9d-1f5ac3f81e2e,0.05,false,false,near_mint,en,USD
|
||||||
|
Sower of Chaos,FDN,Foundations,95,normal,common,4,101556,7ff50606-491c-4946-8d03-719b01cfad77,0.01,false,false,near_mint,en,USD
|
||||||
|
Involuntary Employment,FDN,Foundations,203,normal,common,4,101622,f3ad3d62-2f24-4562-b3fa-809213dbc4a4,0.06,false,false,near_mint,en,USD
|
||||||
|
Burst Lightning,FDN,Foundations,192,normal,common,3,100994,aec5d380-d354-4750-931a-6c91853e2edc,0.08,false,false,near_mint,en,USD
|
||||||
|
Banishing Light,FDN,Foundations,138,normal,common,4,101613,e38dc3b3-1629-491b-8afd-0e7a9a857713,0.03,false,false,near_mint,en,USD
|
||||||
|
Blossoming Sands,FDN,Foundations,260,normal,common,2,100364,37676ed8-588c-4bca-8065-874b74d84807,0.05,false,false,near_mint,en,USD
|
||||||
|
Felidar Savior,FDN,Foundations,12,normal,common,3,97191,cd092b14-d72f-4de0-8f19-1338661b9e3b,0.02,false,false,near_mint,en,USD
|
||||||
|
Revenge of the Rats,FDN,Foundations,67,normal,uncommon,2,100232,1f463c55-39a0-4f2f-aae3-0c5540bde5b7,0.12,false,false,near_mint,en,USD
|
||||||
|
Armasaur Guide,FDN,Foundations,3,normal,common,3,101591,c80fc380-0499-4499-8a60-c43844c02c9b,0.03,false,false,near_mint,en,USD
|
||||||
|
Campus Guide,FDN,Foundations,251,normal,common,3,101504,43c59814-3167-4b05-bb85-6c736f3956a4,0.02,false,false,near_mint,en,USD
|
||||||
|
Dreadwing Scavenger,FDN,Foundations,118,normal,uncommon,2,101252,e24d838b-ab48-410a-9a50-dbfea5da089b,0.04,false,false,near_mint,en,USD
|
||||||
|
Gleaming Barrier,FDN,Foundations,252,normal,common,3,101479,1b49b009-e6f2-494a-9235-f5c25c2d70a9,0.06,false,false,near_mint,en,USD
|
||||||
|
Scoured Barrens,FDN,Foundations,266,normal,common,2,100277,2632a4b2-9ca6-4b67-9a99-14f52ad3dc41,0.07,false,false,near_mint,en,USD
|
||||||
|
Erudite Wizard,FDN,Foundations,37,normal,common,3,100835,9273c417-0fcd-4273-b24e-afff76336d0c,0.01,false,false,near_mint,en,USD
|
||||||
|
Gorehorn Raider,FDN,Foundations,89,normal,common,3,101551,78ce6c40-3452-4aa0-a45b-dbfd70f8d220,0.02,false,false,near_mint,en,USD
|
||||||
|
Cackling Prowler,FDN,Foundations,101,normal,common,3,101481,1bd8e971-c075-4203-8d83-c28f22d4f9b9,0.03,false,false,near_mint,en,USD
|
||||||
|
Burglar Rat,FDN,Foundations,170,normal,common,4,101608,de1c8758-ce3d-49cf-8173-c0eb46f5e7bc,0.05,false,false,near_mint,en,USD
|
||||||
|
Mocking Sprite,FDN,Foundations,159,normal,common,3,101624,f6792f63-b651-497d-8aa5-cddf4cedeca8,0.03,false,false,near_mint,en,USD
|
||||||
|
Cathar Commando,FDN,Foundations,139,normal,common,3,100204,19cf024d-edb6-4a79-8676-73f8db0cdf1f,0.06,false,false,near_mint,en,USD
|
||||||
|
Hungry Ghoul,FDN,Foundations,62,normal,common,3,100701,790f9433-7565-4f7f-88e8-8af762ea0296,0.04,false,false,near_mint,en,USD
|
||||||
|
Vampire Soulcaller,FDN,Foundations,75,normal,common,3,101495,2d076293-3b45-4878-8f67-978927cc1f68,0.04,false,false,near_mint,en,USD
|
||||||
|
Exsanguinate,FDN,Foundations,173,normal,uncommon,1,101330,f11d7311-4066-4a5d-ba28-9857fa707a0b,0.4,false,false,near_mint,en,USD
|
||||||
|
Fanatical Firebrand,FDN,Foundations,195,normal,common,3,101598,d1296316-7781-4e98-95e6-7020648be6a5,0.03,false,false,near_mint,en,USD
|
||||||
|
Sanguine Syphoner,FDN,Foundations,68,normal,common,4,101582,b1daf5bb-c8e9-4e79-a532-ca92a9a885cd,0.07,false,false,near_mint,en,USD
|
||||||
|
Boltwave,FDN,Foundations,79,normal,uncommon,2,100810,8d1ec351-5e70-4eb2-b590-6bff94ef8178,4.08,false,false,near_mint,en,USD
|
||||||
|
Nessian Hornbeetle,FDN,Foundations,229,normal,uncommon,2,100395,3d4d93de-85c6-4653-8ddd-d8bf21516d44,0.05,false,false,near_mint,en,USD
|
||||||
|
Goldvein Pick,FDN,Foundations,253,normal,common,3,101572,a241317d-2277-467e-a8f9-aa71c944e244,0.06,false,false,near_mint,en,USD
|
||||||
|
Icewind Elemental,FDN,Foundations,42,normal,common,3,101629,fd0eba76-3829-408b-828f-0b223c884728,0.05,false,false,near_mint,en,USD
|
||||||
|
Fleeting Distraction,FDN,Foundations,155,normal,common,3,101587,c0b86a7b-4912-43a7-ab89-c3432385baa1,0.02,false,false,near_mint,en,USD
|
||||||
|
Faebloom Trick,FDN,Foundations,38,normal,uncommon,2,100148,0c3bee8f-f5be-4404-a696-c902637799c3,0.17,false,false,near_mint,en,USD
|
||||||
|
Brineborn Cutthroat,FDN,Foundations,152,normal,uncommon,2,100986,acf7aafb-931f-49e5-8691-eab8cb34b05e,0.02,false,false,near_mint,en,USD
|
||||||
|
Gutless Plunderer,FDN,Foundations,60,normal,common,3,101567,909d7778-c7f8-4fa4-89f2-8b32e86e96e4,0.05,false,false,near_mint,en,USD
|
||||||
|
Thornwood Falls,FDN,Foundations,269,normal,common,2,100424,42799f51-0f8c-444b-974e-dae281a5c697,0.05,false,false,near_mint,en,USD
|
||||||
|
Tranquil Cove,FDN,Foundations,270,normal,common,2,100719,7c9cabca-5bcc-4b97-b2ac-a345ad3ee43c,0.06,false,false,near_mint,en,USD
|
||||||
|
Fake Your Own Death,FDN,Foundations,174,normal,common,3,101539,693635a6-df50-44c5-9598-0c79b45d4df4,0.05,false,false,near_mint,en,USD
|
||||||
|
Crypt Feaster,FDN,Foundations,59,normal,common,4,100382,3b072811-998a-4a71-b59c-6afecc0dc4b6,0.03,false,false,near_mint,en,USD
|
||||||
|
Incinerating Blast,FDN,Foundations,90,normal,common,3,101603,d58e20ab-c5ca-4295-884d-78efdaa83243,0.03,false,false,near_mint,en,USD
|
||||||
|
Refute,FDN,Foundations,48,normal,common,3,100368,38806934-dd9c-4ad4-a59c-a16dce03a14a,0.06,false,false,near_mint,en,USD
|
||||||
|
Tolarian Terror,FDN,Foundations,167,normal,common,3,100270,2569d4f3-55ed-4f99-9592-34c7df0aab72,0.09,false,false,near_mint,en,USD
|
||||||
|
Joust Through,FDN,Foundations,19,normal,uncommon,2,100767,846adb38-f9bb-4fed-b8ed-36ec7885f989,0.05,false,false,near_mint,en,USD
|
||||||
|
Bake into a Pie,FDN,Foundations,169,normal,common,3,101494,2ab0e660-86a3-4b92-82fa-77dcb5db947d,0.03,false,false,near_mint,en,USD
|
||||||
|
Soul-Shackled Zombie,FDN,Foundations,70,normal,common,4,101609,deea5690-6eb2-4353-b917-cbbf840e4e71,0.04,false,false,near_mint,en,USD
|
||||||
|
Perforating Artist,FDN,Foundations,124,normal,uncommon,2,100674,72980409-53f0-43c1-965e-06f22e7bb608,0.1,false,false,near_mint,en,USD
|
||||||
|
Serra Angel,FDN,Foundations,147,normal,uncommon,2,100391,3cee9303-9d65-45a2-93d4-ef4aba59141b,0.05,false,false,near_mint,en,USD
|
||||||
|
Squad Rallier,FDN,Foundations,24,normal,common,3,101534,65e1ee86-6f08-4aa0-bf63-ae12028ef080,0.04,false,false,near_mint,en,USD
|
||||||
|
Elementalist Adept,FDN,Foundations,36,normal,common,3,101605,d9768cc6-8f53-4922-ae32-376a2f32d719,0.02,false,false,near_mint,en,USD
|
||||||
|
Elvish Regrower,FDN,Foundations,104,normal,uncommon,2,100278,2694e3cd-26ed-4a10-ae55-fb84d7800253,0.09,false,false,near_mint,en,USD
|
||||||
|
Infestation Sage,FDN,Foundations,64,normal,common,3,101601,d40c73de-7a5f-46f2-a70b-449bc8ecfe24,0.07,false,false,near_mint,en,USD
|
||||||
|
Inspiring Paladin,FDN,Foundations,18,normal,common,3,101472,0763be06-25b2-4d6b-ab33-a1af85aeb443,0.02,false,false,near_mint,en,USD
|
||||||
|
Luminous Rebuke,FDN,Foundations,20,normal,common,3,101529,621839e1-2756-4cdc-a25c-5f76ea98dd87,0.07,false,false,near_mint,en,USD
|
||||||
|
Gnarlid Colony,FDN,Foundations,224,normal,common,3,101508,47565d10-96bf-4fb0-820f-f20a44a76b6f,0.02,false,false,near_mint,en,USD
|
||||||
|
Sure Strike,FDN,Foundations,209,normal,common,3,101525,5de6a1e4-5c66-43e6-9f2a-2635bdab03f6,0.03,false,false,near_mint,en,USD
|
||||||
|
Helpful Hunter,FDN,Foundations,16,normal,common,3,97172,1b9a0e91-80b5-428f-8f08-931d0631be14,0.14,false,false,near_mint,en,USD
|
||||||
|
Goblin Boarders,FDN,Foundations,87,normal,common,3,101506,4409a063-bf2a-4a49-803e-3ce6bd474353,0.04,false,false,near_mint,en,USD
|
||||||
|
Macabre Waltz,FDN,Foundations,177,normal,common,3,101509,4d1f3c84-89ba-4426-a80b-d524f172c912,0.03,false,false,near_mint,en,USD
|
||||||
|
Grow from the Ashes,FDN,Foundations,225,normal,common,3,101502,42525f8a-aee7-4811-8f05-471b559c2c4a,0.03,false,false,near_mint,en,USD
|
||||||
|
Stroke of Midnight,FDN,Foundations,148,normal,uncommon,2,100970,ab135925-d924-456d-851a-6ccdaaf27271,0.17,false,false,near_mint,en,USD
|
||||||
|
Eaten Alive,FDN,Foundations,172,normal,common,3,100216,1c4f7b20-b2a8-498c-8c36-dc296863b0b9,0.02,false,false,near_mint,en,USD
|
||||||
|
Aetherize,FDN,Foundations,151,normal,uncommon,2,100225,1e5530fc-0291-4a17-b048-c5d24e6f51d8,0.17,false,false,near_mint,en,USD
|
||||||
|
Giant Growth,FDN,Foundations,223,normal,common,4,101073,bd0bf74e-14c1-4428-88d8-2181a080b5d0,0.03,false,false,near_mint,en,USD
|
||||||
|
Billowing Shriekmass,FDN,Foundations,56,normal,uncommon,2,100711,7b3587a9-0667-4d53-807b-c437bcb1d7b3,0.02,false,false,near_mint,en,USD
|
||||||
|
Think Twice,FDN,Foundations,165,normal,common,4,101202,d88faaa1-eb41-40f7-991c-5c06e1138f3d,0.05,false,false,near_mint,en,USD
|
||||||
|
Beast-Kin Ranger,FDN,Foundations,100,normal,common,3,100082,0102e0be-5783-4825-9489-713b1b1df0b2,0.05,false,false,near_mint,en,USD
|
||||||
|
Spitfire Lagac,FDN,Foundations,208,normal,common,4,101496,30f600cd-b696-4f49-9cbc-5a33aa43d04c,0.02,false,false,near_mint,en,USD
|
||||||
|
Aegis Turtle,FDN,Foundations,150,normal,common,3,101590,c7f2014a-fbc9-447c-a440-e06d01066bb9,0.08,false,false,near_mint,en,USD
|
||||||
|
Firebrand Archer,FDN,Foundations,196,normal,common,3,101630,fe0312f1-4c98-4b7f-8a34-0059ea80edef,0.05,false,false,near_mint,en,USD
|
||||||
|
Shivan Dragon,FDN,Foundations,206,normal,uncommon,2,100236,1fcff1e0-2745-448d-a27b-e31719e222e9,0.05,false,false,near_mint,en,USD
|
||||||
|
Cephalid Inkmage,FDN,Foundations,32,normal,uncommon,2,101040,b7e47680-18c7-4ffb-aac4-c5db6e7095ba,0.05,false,false,near_mint,en,USD
|
||||||
|
Prideful Parent,FDN,Foundations,21,normal,common,3,97188,b742117a-8a72-43b9-b05d-274829d138a2,0.04,false,false,near_mint,en,USD
|
||||||
|
Uncharted Voyage,FDN,Foundations,53,normal,common,4,101611,e0846820-e595-4743-8a28-29c57d728677,0.01,false,false,near_mint,en,USD
|
||||||
|
Eager Trufflesnout,FDN,Foundations,102,normal,uncommon,2,100940,a6e8433d-eb2a-43d1-b59b-7d70ff97c8e7,0.04,false,false,near_mint,en,USD
|
||||||
|
Juggernaut,FDN,Foundations,255,normal,uncommon,2,101351,f4468fff-cd6f-428c-b7a0-ff89f5bbea2e,0.07,false,false,near_mint,en,USD
|
||||||
|
Llanowar Elves,FDN,Foundations,227,normal,common,3,95583,6a0b230b-d391-4998-a3f7-7b158a0ec2cd,0.15,false,false,near_mint,en,USD
|
||||||
|
Overrun,FDN,Foundations,230,normal,uncommon,2,100220,1d8e9cbb-8bf4-4a48-a58e-79deb3abdf7f,0.14,false,false,near_mint,en,USD
|
||||||
|
Crackling Cyclops,FDN,Foundations,83,normal,common,3,101541,6e5b899a-52f7-471b-ad50-4fa6566758fd,0.01,false,false,near_mint,en,USD
|
||||||
|
Mischievous Mystic,FDN,Foundations,47,normal,uncommon,2,100242,20d89cec-528b-4b2a-87db-e11ce0000622,0.14,false,false,near_mint,en,USD
|
||||||
|
Witness Protection,FDN,Foundations,168,normal,common,3,101621,f231e981-0069-43ce-ac1c-c85ced613e93,0.08,false,false,near_mint,en,USD
|
||||||
|
Dwynen's Elite,FDN,Foundations,218,normal,common,3,100800,89d94c28-ea2e-4a3d-935f-6b2d9f2efc7a,0.05,false,false,near_mint,en,USD
|
||||||
|
Bushwhack,FDN,Foundations,215,normal,common,3,101469,03ebdb36-55e0-49dd-a514-785fbeb4ae19,0.1,false,false,near_mint,en,USD
|
||||||
|
Run Away Together,FDN,Foundations,162,normal,common,3,101614,e598eb7b-10dc-49e6-ac60-2fefa987173e,0.05,false,false,near_mint,en,USD
|
||||||
|
Strongbox Raider,FDN,Foundations,96,normal,uncommon,2,101006,b2223eb8-59f9-489b-a3f3-b6496218cb79,0.02,false,false,near_mint,en,USD
|
||||||
|
Vanguard Seraph,FDN,Foundations,28,normal,common,4,101503,4329c861-fc16-4a96-9c03-25af6ac2adc8,0.06,false,false,near_mint,en,USD
|
||||||
|
Self-Reflection,FDN,Foundations,163,normal,uncommon,2,101247,e1e6abc9-25b2-4d51-b519-2525079eab51,0.04,false,false,near_mint,en,USD
|
||||||
|
Strix Lookout,FDN,Foundations,52,normal,common,3,101627,fbd2422e-8e84-4c39-af29-3b4d38baee63,0.03,false,false,near_mint,en,USD
|
||||||
|
Cat Collector,FDN,Foundations,4,normal,uncommon,2,100507,526fe356-bff1-4211-9e88-bf913ac76b1d,0.1,false,false,near_mint,en,USD
|
||||||
|
Courageous Goblin,FDN,Foundations,82,normal,common,3,101566,8db6819c-666a-409d-85a5-b9ac34d8dd2f,0.03,false,false,near_mint,en,USD
|
||||||
|
"Ygra, Eater of All",BLB,Bloomburrow,241,normal,mythic,1,95825,b9ac7673-eae8-4c4b-889e-5025213a6151,11.58,false,false,near_mint,en,USD
|
||||||
|
Lifecreed Duo,BLB,Bloomburrow,20,normal,common,1,95968,ca543405-5e12-48a0-9a77-082ac9bcb2f2,0.06,false,false,near_mint,en,USD
|
||||||
|
Take Out the Trash,BLB,Bloomburrow,156,normal,common,1,95940,7a1c6f00-af4c-4d35-b682-6c0e759df9a5,0.04,false,false,near_mint,en,USD
|
||||||
|
Ravine Raider,BLB,Bloomburrow,106,normal,common,1,96370,874510be-7ecd-4eff-abad-b9594eb4821a,0.02,false,false,near_mint,en,USD
|
||||||
|
Longstalk Brawl,BLB,Bloomburrow,182,normal,common,1,95966,c7ef748c-b5e5-4e7d-bf2e-d3e6c08edb42,0.04,false,false,near_mint,en,USD
|
||||||
|
Valley Floodcaller,BLB,Bloomburrow,79,normal,rare,1,95876,90b12da0-f666-471d-95f5-15d8c9b31c92,2.65,false,false,near_mint,en,USD
|
||||||
|
Bandit's Talent,BLB,Bloomburrow,83,normal,uncommon,1,95917,485dc8d8-9e44-4a0f-9ff6-fa448e232290,0.47,false,false,near_mint,en,USD
|
||||||
|
Brambleguard Veteran,BLB,Bloomburrow,165,normal,uncommon,1,95880,bac9f6f8-6797-4580-9fc4-9a825872e017,0.09,false,false,near_mint,en,USD
|
||||||
|
Mouse Trapper,BLB,Bloomburrow,22,normal,uncommon,1,95948,8ba1bc5a-03e7-44ec-893e-44042cbc02ef,0.04,false,false,near_mint,en,USD
|
||||||
|
Bushy Bodyguard,BLB,Bloomburrow,166,normal,uncommon,1,95997,0de60cf7-fa82-4b6f-9f88-6590fba5c863,0.08,false,false,near_mint,en,USD
|
||||||
|
Valley Mightcaller,BLB,Bloomburrow,202,normal,rare,1,96057,7256451f-0122-452a-88e8-0fb0f6bea3f3,1.01,false,false,near_mint,en,USD
|
||||||
|
Druid of the Spade,BLB,Bloomburrow,170,normal,common,1,96054,6b485cf7-bad0-4824-9ba7-cb112ce4769f,0.02,false,false,near_mint,en,USD
|
||||||
|
Skyskipper Duo,BLB,Bloomburrow,71,normal,common,1,96476,d6844bad-ffbe-4c6e-b438-08562eccea52,0.04,false,false,near_mint,en,USD
|
||||||
|
Osteomancer Adept,BLB,Bloomburrow,103,normal,rare,1,95800,7d8238dd-858f-466c-96de-986bd66861d7,0.36,false,false,near_mint,en,USD
|
||||||
|
Tender Wildguide,BLB,Bloomburrow,196,normal,rare,1,95792,6b8bfa91-adb0-4596-8c16-d8bb64fdb26d,0.49,false,false,near_mint,en,USD
|
||||||
|
Huskburster Swarm,BLB,Bloomburrow,98,normal,uncommon,1,95978,ed2f61d7-4eb0-41c5-8a34-a0793c2abc51,0.13,false,false,near_mint,en,USD
|
||||||
|
Scrapshooter,BLB,Bloomburrow,191,normal,rare,1,96113,c42ab407-e72d-4c48-9a9e-2055b5e71c69,0.38,false,false,near_mint,en,USD
|
||||||
|
Scavenger's Talent,BLB,Bloomburrow,111,normal,rare,1,96084,9a52b7fe-87ae-425b-85fd-b24e6e0395f1,1.54,false,false,near_mint,en,USD
|
||||||
|
Valley Rotcaller,BLB,Bloomburrow,119,normal,rare,1,95781,4da80a9a-b1d5-4fc5-92f7-36946195d0c7,1.45,false,false,near_mint,en,USD
|
||||||
|
Thornplate Intimidator,BLB,Bloomburrow,117,normal,common,1,96019,42f66c4a-feaa-4ba6-aa56-955b43329a9e,0.02,false,false,near_mint,en,USD
|
||||||
|
Bakersbane Duo,BLB,Bloomburrow,163,normal,common,1,96035,5309354f-1ff4-4fa9-9141-01ea2f7588ab,0.1,false,false,near_mint,en,USD
|
||||||
|
Shore Up,BLB,Bloomburrow,69,normal,common,1,96277,4dc3b49e-3674-494c-bdea-4374cefd10f4,0.08,false,false,near_mint,en,USD
|
||||||
|
Emberheart Challenger,BLB,Bloomburrow,133,normal,rare,1,95888,0035082e-bb86-4f95-be48-ffc87fe5286d,4.13,false,false,near_mint,en,USD
|
||||||
|
"Gev, Scaled Scorch",BLB,Bloomburrow,214,normal,rare,1,96001,131ea976-289e-4f32-896d-27bbfd423ba9,0.37,false,false,near_mint,en,USD
|
||||||
|
Starfall Invocation,BLB,Bloomburrow,34,normal,rare,1,95904,2aea38e6-ec58-4091-b27c-2761bdd12b13,0.88,false,false,near_mint,en,USD
|
||||||
|
Tidecaller Mentor,BLB,Bloomburrow,236,normal,uncommon,1,95859,fa10ffac-7cc2-41ef-b8a0-9431923c0542,0.04,false,false,near_mint,en,USD
|
||||||
|
Jackdaw Savior,BLB,Bloomburrow,18,normal,rare,1,96000,121af600-6143-450a-9f87-12ce4833f1ec,0.27,false,false,near_mint,en,USD
|
||||||
|
"Helga, Skittish Seer",BLB,Bloomburrow,217,normal,mythic,1,95914,40339715-22d0-4f99-822b-a00d9824f27a,2.0,false,false,near_mint,en,USD
|
||||||
|
Long River Lurker,BLB,Bloomburrow,57,normal,uncommon,1,95941,7c267719-cd03-4003-b281-e732d5e42a1e,0.1,false,false,near_mint,en,USD
|
||||||
|
Thornvault Forager,BLB,Bloomburrow,197,normal,rare,1,95807,8c2d6b02-a453-40f9-992a-5c5542987cfb,0.65,false,false,near_mint,en,USD
|
||||||
|
Eddymurk Crab,BLB,Bloomburrow,48,normal,uncommon,1,96132,e6d45abe-4962-47d9-a54e-7e623ea8647c,0.18,false,false,near_mint,en,USD
|
||||||
|
Moonstone Harbinger,BLB,Bloomburrow,101,normal,uncommon,1,95922,59e4aa8d-1d06-48db-b205-aa2f1392bbcb,0.03,false,false,near_mint,en,USD
|
||||||
|
Brazen Collector,BLB,Bloomburrow,128,normal,uncommon,1,95873,78b55a58-c669-4dc6-aa63-5d9dff52e613,0.09,false,false,near_mint,en,USD
|
||||||
|
Brightblade Stoat,BLB,Bloomburrow,4,normal,uncommon,1,95882,df7fea2e-7414-4bc8-adb0-9342e174c009,0.07,false,false,near_mint,en,USD
|
||||||
|
Warren Warleader,BLB,Bloomburrow,38,normal,mythic,1,95849,eb5237a0-5ac3-4ded-9f92-5f782a7bbbd7,3.14,false,false,near_mint,en,USD
|
||||||
|
Kitnap,BLB,Bloomburrow,53,normal,rare,1,95739,085be5d1-fd85-46d1-ad39-a8aa75a06a96,0.14,false,false,near_mint,en,USD
|
||||||
|
Fountainport,BLB,Bloomburrow,253,normal,rare,1,96052,658cfcb7-81b7-48c6-9dd2-1663d06108cf,5.77,false,false,near_mint,en,USD
|
||||||
|
Whiskervale Forerunner,BLB,Bloomburrow,40,normal,rare,1,95927,60a78d59-af31-4af9-95aa-2573fe553925,0.17,false,false,near_mint,en,USD
|
||||||
|
Dreamdew Entrancer,BLB,Bloomburrow,211,normal,rare,1,95755,26bd6b0d-8606-4a37-8be3-a852f1a8e99c,0.28,false,false,near_mint,en,USD
|
||||||
|
Playful Shove,BLB,Bloomburrow,145,normal,uncommon,1,95993,07956edf-34c1-4218-9784-ddbca13e380c,0.1,false,false,near_mint,en,USD
|
||||||
|
Feed the Cycle,BLB,Bloomburrow,94,normal,uncommon,1,96067,7e017ff8-2936-4a1b-bece-00004cfbad06,0.12,false,false,near_mint,en,USD
|
||||||
|
Hoarder's Overflow,BLB,Bloomburrow,141,normal,uncommon,1,96112,c2ed5079-07b4-4575-a2c8-5f0cbff888c3,0.04,false,false,near_mint,en,USD
|
||||||
|
Sunspine Lynx,BLB,Bloomburrow,155,normal,rare,1,95875,8995ceaf-b7e0-423c-8f3e-25212d522502,1.8,false,false,near_mint,en,USD
|
||||||
|
Stormcatch Mentor,BLB,Bloomburrow,234,normal,uncommon,1,95813,99754055-6d67-4fde-aff3-41f6af6ea764,0.21,false,false,near_mint,en,USD
|
||||||
|
For the Common Good,BLB,Bloomburrow,172,normal,rare,1,95912,3ec72a27-b622-47d7-bdf3-970ccaef0d2a,0.87,false,false,near_mint,en,USD
|
||||||
|
Dawn's Truce,BLB,Bloomburrow,295,normal,rare,1,95893,0cce7aec-f9b0-461b-8245-5286b741409d,8.43,false,false,near_mint,en,USD
|
||||||
|
"Clement, the Worrywort",BLB,Bloomburrow,329,normal,rare,1,95835,d1a68d51-cd4e-4ee3-abc7-01435085aa26,0.55,false,false,near_mint,en,USD
|
||||||
|
Tender Wildguide,BLB,Bloomburrow,325,normal,rare,1,95760,2dc164c8-62ca-4d59-ae1c-ef273fde9d10,0.63,false,false,near_mint,en,USD
|
||||||
|
Valley Questcaller,BLB,Bloomburrow,299,normal,rare,1,95839,d9f25130-678d-4338-8eb4-b20d2da5bc74,1.0,false,false,near_mint,en,USD
|
||||||
|
Heirloom Epic,BLB,Bloomburrow,246,normal,uncommon,1,96061,7839ce48-0175-494a-ab89-9bdfb7a50cb1,0.06,false,false,near_mint,en,USD
|
||||||
|
Shrike Force,BLB,Bloomburrow,31,normal,uncommon,1,95763,306fec2c-d8b7-4f4b-8f58-10e3b9f3158f,0.14,false,false,near_mint,en,USD
|
||||||
|
Into the Flood Maw,BLB,Bloomburrow,52,normal,uncommon,1,95919,50b9575a-53d9-4df7-b86c-cda021107d3f,1.48,false,false,near_mint,en,USD
|
||||||
|
Salvation Swan,BLB,Bloomburrow,28,normal,rare,1,95635,b2656160-d319-4530-a6e5-c418596c3f12,0.27,false,false,near_mint,en,USD
|
||||||
|
Hired Claw,BLB,Bloomburrow,140,normal,rare,1,95897,1ae41080-0d67-4719-adb2-49bf2a268b6c,2.43,false,false,near_mint,en,USD
|
||||||
|
Starseer Mentor,BLB,Bloomburrow,233,normal,uncommon,1,95791,6b2f6dc5-9fe8-49c1-b24c-1d99ce1da619,0.05,false,false,near_mint,en,USD
|
||||||
|
Mistbreath Elder,BLB,Bloomburrow,184,normal,rare,1,95975,e5246540-5a84-41d8-9e30-8e7a6c0e84e1,0.37,false,false,near_mint,en,USD
|
||||||
|
Hivespine Wolverine,BLB,Bloomburrow,177,normal,uncommon,1,95943,821970a3-a291-4fe9-bb13-dfc54f9c3caf,0.06,false,false,near_mint,en,USD
|
||||||
|
Patchwork Banner,BLB,Bloomburrow,247,normal,uncommon,1,96097,a8a982c8-bc08-44ba-b3ed-9e4b124615d6,4.68,false,false,near_mint,en,USD
|
||||||
|
"Beza, the Bounding Spring",BLB,Bloomburrow,2,normal,mythic,1,95862,fc310a26-b6a0-4e42-98ab-bdfd7b06cb63,9.56,false,false,near_mint,en,USD
|
||||||
|
Essence Channeler,BLB,Bloomburrow,12,normal,rare,1,96042,5aaf7e4c-4d5d-4acc-a834-e6c4a7629408,1.27,false,false,near_mint,en,USD
|
||||||
|
Valley Questcaller,BLB,Bloomburrow,36,normal,rare,1,95826,ba629ca8-a368-4282-8a61-9bf6a5c217f0,1.12,false,false,near_mint,en,USD
|
||||||
|
Conduct Electricity,BLB,Bloomburrow,130,normal,common,1,95906,2f373dd6-2412-453c-85ba-10230dfe473a,0.02,false,false,near_mint,en,USD
|
||||||
|
Glidedive Duo,BLB,Bloomburrow,96,normal,common,1,96026,4831e7ae-54e3-4bd9-b5af-52dc29f81715,0.02,false,false,near_mint,en,USD
|
||||||
|
Mind Spiral,BLB,Bloomburrow,59,normal,common,1,96068,7e24fe6a-607b-49b8-9fca-cecb1e40de7f,0.01,false,false,near_mint,en,USD
|
||||||
|
Starforged Sword,BLB,Bloomburrow,249,normal,uncommon,1,96110,c23d8e96-b972-4c6c-b0c4-b6627621f048,0.03,false,false,near_mint,en,USD
|
||||||
|
Vinereap Mentor,BLB,Bloomburrow,238,normal,uncommon,1,95902,29b615ba-45c4-42a1-8525-1535f0b55300,0.16,false,false,near_mint,en,USD
|
||||||
|
Mindwhisker,BLB,Bloomburrow,60,normal,uncommon,1,96099,aaa10f34-5bfd-4d87-8f07-58de3b0f5663,0.08,false,false,near_mint,en,USD
|
||||||
|
Persistent Marshstalker,BLB,Bloomburrow,104,normal,uncommon,1,95947,8b900c71-713b-4b7e-b4be-ad9f4aa0c139,0.13,false,false,near_mint,en,USD
|
||||||
|
Portent of Calamity,BLB,Bloomburrow,66,normal,rare,1,96073,8599e2dd-9164-4da3-814f-adccef3b9497,0.14,false,false,near_mint,en,USD
|
||||||
|
Fabled Passage,BLB,Bloomburrow,252,normal,rare,1,96075,8809830f-d8e1-4603-9652-0ad8b00234e9,5.13,false,false,near_mint,en,USD
|
||||||
|
Stormsplitter,BLB,Bloomburrow,154,normal,mythic,1,96040,56f214d3-6b93-40db-a693-55e491c8a283,3.12,false,false,near_mint,en,USD
|
||||||
|
Stargaze,BLB,Bloomburrow,114,normal,uncommon,1,95939,777fc599-8de7-44d2-8fdd-9bddf5948a0c,0.14,false,false,near_mint,en,USD
|
||||||
|
Coruscation Mage,BLB,Bloomburrow,131,normal,uncommon,1,95972,dc2c1de0-6233-469a-be72-a050b97d2c8f,0.32,false,false,near_mint,en,USD
|
||||||
|
Dour Port-Mage,BLB,Bloomburrow,47,normal,rare,1,96049,6402133e-eed1-4a46-9667-8b7a310362c1,2.17,false,false,near_mint,en,USD
|
||||||
|
"Muerra, Trash Tactician",BLB,Bloomburrow,227,normal,rare,1,95821,b40e4658-fd68-46d0-9a89-25570a023d19,0.31,false,false,near_mint,en,USD
|
||||||
|
Stormchaser's Talent,BLB,Bloomburrow,75,normal,rare,1,96092,a36e682d-b43d-4e08-bf5b-70d7e924dbe5,13.62,false,false,near_mint,en,USD
|
||||||
|
Sinister Monolith,BLB,Bloomburrow,113,normal,uncommon,1,96012,2a15e06c-2608-4e7a-a16c-d35417669d86,0.08,false,false,near_mint,en,USD
|
||||||
|
Pawpatch Formation,BLB,Bloomburrow,186,normal,uncommon,1,95963,b82c20ad-0f69-4822-ae76-770832cccdf7,1.83,false,false,near_mint,en,USD
|
||||||
|
Plumecreed Mentor,BLB,Bloomburrow,228,normal,uncommon,1,95819,b1aa988f-547e-449a-9f1a-296c01d68d96,0.03,false,false,near_mint,en,USD
|
||||||
|
"Baylen, the Haymaker",BLB,Bloomburrow,205,normal,rare,1,95889,00e93be2-e06b-4774-8ba5-ccf82a6da1d8,1.04,false,false,near_mint,en,USD
|
||||||
|
Long River's Pull,BLB,Bloomburrow,58,normal,uncommon,1,95900,1c81d0fa-81a1-4f9b-a5fd-5a648fd01dea,0.23,false,false,near_mint,en,USD
|
||||||
|
Bonecache Overseer,BLB,Bloomburrow,85,normal,uncommon,1,95944,82defb87-237f-4b77-9673-5bf00607148f,0.08,false,false,near_mint,en,USD
|
||||||
|
Three Tree Scribe,BLB,Bloomburrow,199,normal,uncommon,1,95977,ea2ca1b3-4c1a-4be5-b321-f57db5ff0528,0.15,false,false,near_mint,en,USD
|
||||||
|
Cruelclaw's Heist,BLB,Bloomburrow,88,normal,rare,1,96121,cab4539a-0157-4cbe-b50f-6e2575df74e9,0.48,false,false,near_mint,en,USD
|
||||||
|
Manifold Mouse,BLB,Bloomburrow,143,normal,rare,1,95881,db3832b5-e83f-4569-bd49-fb7b86fa2d47,3.37,false,false,near_mint,en,USD
|
||||||
|
Iridescent Vinelasher,BLB,Bloomburrow,99,normal,rare,1,95877,b2bc854c-4e72-48e0-a098-e3451d6e511d,1.11,false,false,near_mint,en,USD
|
||||||
|
Daggerfang Duo,BLB,Bloomburrow,89,normal,common,1,96468,cea2bb34-e328-44fb-918a-72208c9457e4,0.03,false,false,near_mint,en,USD
|
||||||
|
Stickytongue Sentinel,BLB,Bloomburrow,193,normal,common,1,96105,b5fa9651-b217-4f93-9c46-9bdb11feedcb,0.03,false,false,near_mint,en,USD
|
||||||
|
Brave-Kin Duo,BLB,Bloomburrow,3,normal,common,1,95824,b8dd4693-424d-4d6e-86cf-24401a23d6b1,0.03,false,false,near_mint,en,USD
|
||||||
|
Driftgloom Coyote,BLB,Bloomburrow,11,normal,uncommon,1,95969,d7ab2de3-3aea-461a-a74f-fb742cf8a198,0.03,false,false,near_mint,en,USD
|
||||||
|
Rockface Village,BLB,Bloomburrow,259,normal,uncommon,1,95629,62799d24-39a6-4e66-8ac3-7cafa99e6e6d,0.48,false,false,near_mint,en,USD
|
||||||
|
Flamecache Gecko,BLB,Bloomburrow,135,normal,uncommon,1,96142,fb8e7c97-8393-41b8-bb0b-3983dcc5e7f4,0.08,false,false,near_mint,en,USD
|
||||||
|
Innkeeper's Talent,BLB,Bloomburrow,180,normal,rare,1,95954,941b0afc-0e8f-45f2-ae7f-07595e164611,19.36,false,false,near_mint,en,USD
|
||||||
|
Repel Calamity,BLB,Bloomburrow,27,foil,uncommon,1,95834,d068192a-6270-4981-819d-4945fa4a2b83,0.08,false,false,near_mint,en,USD
|
||||||
|
Galewind Moose,BLB,Bloomburrow,173,foil,uncommon,1,95871,58706bd8-558a-43b9-9f1e-c1ff0044203b,0.14,false,false,near_mint,en,USD
|
||||||
|
Brave-Kin Duo,BLB,Bloomburrow,3,foil,common,1,95824,b8dd4693-424d-4d6e-86cf-24401a23d6b1,0.06,false,false,near_mint,en,USD
|
||||||
|
Agate Assault,BLB,Bloomburrow,122,foil,common,1,96066,7dd9946b-515e-4e0d-9da2-711e126e9fa6,0.03,false,false,near_mint,en,USD
|
||||||
|
Flamecache Gecko,BLB,Bloomburrow,135,foil,uncommon,1,96142,fb8e7c97-8393-41b8-bb0b-3983dcc5e7f4,0.12,false,false,near_mint,en,USD
|
||||||
|
Rabid Gnaw,BLB,Bloomburrow,147,foil,uncommon,1,96014,2f815bae-820a-49f6-8eed-46f658e7b6ff,0.1,false,false,near_mint,en,USD
|
||||||
|
Pond Prophet,BLB,Bloomburrow,229,foil,common,1,95861,fb959e74-61ea-453d-bb9f-ad0183c0e1b1,0.16,false,false,near_mint,en,USD
|
||||||
|
Star Charter,BLB,Bloomburrow,33,foil,uncommon,1,95894,0e209237-00f7-4bf0-8287-ccde02ce8e8d,0.12,false,false,near_mint,en,USD
|
||||||
|
Kindlespark Duo,BLB,Bloomburrow,142,foil,common,1,96096,a839fba3-1b66-4dd1-bf43-9b015b44fc81,0.07,false,false,near_mint,en,USD
|
||||||
|
Crumb and Get It,BLB,Bloomburrow,8,foil,common,1,96259,3c7b3b25-d4b3-4451-9f5c-6eb369541175,0.04,false,false,near_mint,en,USD
|
||||||
|
Peerless Recycling,BLB,Bloomburrow,188,foil,uncommon,1,95925,5f72466c-505b-4371-9366-0fde525a37e6,0.23,false,false,near_mint,en,USD
|
||||||
|
Nocturnal Hunger,BLB,Bloomburrow,102,foil,common,1,96060,742c0409-9abd-4559-b52e-932cc90c531a,0.02,false,false,near_mint,en,USD
|
||||||
|
Seedpod Squire,BLB,Bloomburrow,232,foil,common,1,95852,f3684577-51ce-490e-9b59-b19c733be466,0.03,false,false,near_mint,en,USD
|
||||||
|
Nettle Guard,BLB,Bloomburrow,23,foil,common,1,95949,8c9c3cc3-2aa2-453e-a17c-2baeeaabe0a9,0.05,false,false,near_mint,en,USD
|
||||||
|
Sazacap's Brew,BLB,Bloomburrow,151,foil,common,1,96330,6d963080-b3ec-467d-82f7-39db6ecd6bbc,0.05,false,false,near_mint,en,USD
|
||||||
|
Waterspout Warden,BLB,Bloomburrow,80,foil,common,1,95909,35898b39-98e2-405b-8f18-0e054bd2c29e,0.04,false,false,near_mint,en,USD
|
||||||
|
Mindwhisker,BLB,Bloomburrow,60,foil,uncommon,1,96099,aaa10f34-5bfd-4d87-8f07-58de3b0f5663,0.12,false,false,near_mint,en,USD
|
||||||
|
Splash Portal,BLB,Bloomburrow,74,foil,uncommon,1,95958,adbaa356-28ba-487f-930a-a957d9960ab0,0.28,false,false,near_mint,en,USD
|
||||||
|
Festival of Embers,BLB,Bloomburrow,134,foil,rare,1,96023,4433ee12-2013-4fdc-979f-ae065f63a527,0.2,false,false,near_mint,en,USD
|
||||||
|
Brightblade Stoat,BLB,Bloomburrow,4,foil,uncommon,1,95882,df7fea2e-7414-4bc8-adb0-9342e174c009,0.11,false,false,near_mint,en,USD
|
||||||
|
Mind Spiral,BLB,Bloomburrow,59,foil,common,1,96068,7e24fe6a-607b-49b8-9fca-cecb1e40de7f,0.04,false,false,near_mint,en,USD
|
||||||
|
Rust-Shield Rampager,BLB,Bloomburrow,190,foil,common,1,96117,c96b01f5-83de-4237-a68d-f946c53e31a6,0.04,false,false,near_mint,en,USD
|
||||||
|
Barkform Harvester,BLB,Bloomburrow,243,foil,common,1,95984,f77049a6-0f22-415b-bc89-20bcb32accf6,0.11,false,false,near_mint,en,USD
|
||||||
|
Wax-Wane Witness,BLB,Bloomburrow,39,foil,common,1,95971,d90ea719-5320-46c6-a347-161853a14776,0.05,false,false,near_mint,en,USD
|
||||||
|
Warren Elder,BLB,Bloomburrow,37,foil,common,1,96030,4bf20069-5a20-4f95-976b-6af2b69f3ad0,0.04,false,false,near_mint,en,USD
|
||||||
|
Stickytongue Sentinel,BLB,Bloomburrow,193,foil,common,1,96105,b5fa9651-b217-4f93-9c46-9bdb11feedcb,0.05,false,false,near_mint,en,USD
|
||||||
|
"Vren, the Relentless",BLB,Bloomburrow,239,foil,rare,1,95930,6506277d-f031-4db5-9d16-bf2389094785,0.71,false,false,near_mint,en,USD
|
||||||
|
Three Tree Scribe,BLB,Bloomburrow,199,foil,uncommon,1,95977,ea2ca1b3-4c1a-4be5-b321-f57db5ff0528,0.2,false,false,near_mint,en,USD
|
||||||
|
Glidedive Duo,BLB,Bloomburrow,96,foil,common,1,96026,4831e7ae-54e3-4bd9-b5af-52dc29f81715,0.03,false,false,near_mint,en,USD
|
||||||
|
Bushy Bodyguard,BLB,Bloomburrow,166,foil,uncommon,1,95997,0de60cf7-fa82-4b6f-9f88-6590fba5c863,0.12,false,false,near_mint,en,USD
|
||||||
|
Conduct Electricity,BLB,Bloomburrow,130,foil,common,1,95906,2f373dd6-2412-453c-85ba-10230dfe473a,0.03,false,false,near_mint,en,USD
|
||||||
|
Daggerfang Duo,BLB,Bloomburrow,89,foil,common,1,96468,cea2bb34-e328-44fb-918a-72208c9457e4,0.07,false,false,near_mint,en,USD
|
||||||
|
Shore Up,BLB,Bloomburrow,69,foil,common,1,96277,4dc3b49e-3674-494c-bdea-4374cefd10f4,0.13,false,false,near_mint,en,USD
|
||||||
|
Hidden Grotto,BLB,Bloomburrow,254,foil,common,1,95918,4ba8f2e7-8357-4862-97dc-1942d066023a,0.17,false,false,near_mint,en,USD
|
||||||
|
Cindering Cutthroat,BLB,Bloomburrow,208,foil,common,1,95820,b2ea10dd-21ea-4622-be27-79d03a802b85,0.01,false,false,near_mint,en,USD
|
||||||
|
"Glarb, Calamity's Augur",BLB,Bloomburrow,215,foil,mythic,1,95864,ffc70b2d-5a3a-49ea-97db-175a62248302,4.3,false,false,near_mint,en,USD
|
||||||
|
Kindlespark Duo,BLB,Bloomburrow,142,normal,common,5,96096,a839fba3-1b66-4dd1-bf43-9b015b44fc81,0.04,false,false,near_mint,en,USD
|
||||||
|
Finch Formation,BLB,Bloomburrow,50,normal,common,2,95899,1c671eab-d1ef-4d79-94eb-8b85f0d18699,0.02,false,false,near_mint,en,USD
|
||||||
|
Builder's Talent,BLB,Bloomburrow,5,normal,uncommon,2,96002,15fa581a-724e-4196-a9a3-ff84c54bdb7d,0.08,false,false,near_mint,en,USD
|
||||||
|
Might of the Meek,BLB,Bloomburrow,144,normal,common,9,95627,509bf254-8a2b-4dfa-9ae5-386321b35e8b,0.09,false,false,near_mint,en,USD
|
||||||
|
Nightwhorl Hermit,BLB,Bloomburrow,62,normal,common,3,95994,0928e04f-2568-41e8-b603-7a25cf5f94d0,0.02,false,false,near_mint,en,USD
|
||||||
|
Fell,BLB,Bloomburrow,95,normal,uncommon,2,95830,c96ac326-de44-470b-a592-a4c2a052c091,0.3,false,false,near_mint,en,USD
|
||||||
|
Sunshower Druid,BLB,Bloomburrow,195,normal,common,6,95630,7740abc5-54e1-478d-966e-0fa64e727995,0.04,false,false,near_mint,en,USD
|
||||||
|
Wandertale Mentor,BLB,Bloomburrow,240,normal,uncommon,2,95808,8c399a55-d02e-41ed-b827-8784b738c118,0.09,false,false,near_mint,en,USD
|
||||||
|
Thought-Stalker Warlock,BLB,Bloomburrow,118,normal,uncommon,2,96018,42e80284-d489-493b-ae92-95b742d07cb3,0.12,false,false,near_mint,en,USD
|
||||||
|
Splash Portal,BLB,Bloomburrow,74,normal,uncommon,2,95958,adbaa356-28ba-487f-930a-a957d9960ab0,0.23,false,false,near_mint,en,USD
|
||||||
|
Alania's Pathmaker,BLB,Bloomburrow,123,normal,common,7,96123,d3871fe6-e26e-4ab4-bd81-7e3c7b8135c1,0.02,false,false,near_mint,en,USD
|
||||||
|
Head of the Homestead,BLB,Bloomburrow,216,normal,common,3,95762,2fc20157-edd3-484d-8864-925c071c0551,0.04,false,false,near_mint,en,USD
|
||||||
|
Hidden Grotto,BLB,Bloomburrow,254,normal,common,4,95918,4ba8f2e7-8357-4862-97dc-1942d066023a,0.08,false,false,near_mint,en,USD
|
||||||
|
Star Charter,BLB,Bloomburrow,33,normal,uncommon,3,95894,0e209237-00f7-4bf0-8287-ccde02ce8e8d,0.04,false,false,near_mint,en,USD
|
||||||
|
War Squeak,BLB,Bloomburrow,160,normal,common,4,95999,105964a7-88b7-4340-aa66-e908189a3638,0.02,false,false,near_mint,en,USD
|
||||||
|
Bellowing Crier,BLB,Bloomburrow,42,normal,common,2,96119,ca2215dd-6300-49cf-b9b2-3a840b786c31,0.04,false,false,near_mint,en,USD
|
||||||
|
Cindering Cutthroat,BLB,Bloomburrow,208,normal,common,4,95820,b2ea10dd-21ea-4622-be27-79d03a802b85,0.02,false,false,near_mint,en,USD
|
||||||
|
Intrepid Rabbit,BLB,Bloomburrow,17,normal,common,7,96276,4d70b99d-c8bf-4a56-8957-cf587fe60b81,0.03,false,false,near_mint,en,USD
|
||||||
|
Carrot Cake,BLB,Bloomburrow,7,normal,common,3,95636,eb03bb4f-8b4b-417e-bfc6-294cd2186b2e,0.06,false,false,near_mint,en,USD
|
||||||
|
Thought Shucker,BLB,Bloomburrow,77,normal,common,7,95916,44b0d83b-cc41-4f82-892c-ef6d3293228a,0.02,false,false,near_mint,en,USD
|
||||||
|
Seasoned Warrenguard,BLB,Bloomburrow,30,normal,uncommon,2,96081,90873995-876f-4e89-8bc7-41a74f4d931f,0.09,false,false,near_mint,en,USD
|
||||||
|
Junkblade Bruiser,BLB,Bloomburrow,220,normal,common,3,95810,918fd89b-5ab7-4ae2-920c-faca5e9da7b9,0.04,false,false,near_mint,en,USD
|
||||||
|
Cache Grab,BLB,Bloomburrow,167,normal,common,2,95842,dfd977dc-a7c3-4d0a-aca7-b25bd154e963,0.08,false,false,near_mint,en,USD
|
||||||
|
Lilypad Village,BLB,Bloomburrow,255,normal,uncommon,2,95631,7e95a7cc-ed77-4ca4-80db-61c0fc68bf50,0.14,false,false,near_mint,en,USD
|
||||||
|
Agate-Blade Assassin,BLB,Bloomburrow,82,normal,common,5,96017,39ebb84a-1c52-4b07-9bd0-b360523b3a5b,0.03,false,false,near_mint,en,USD
|
||||||
|
Repel Calamity,BLB,Bloomburrow,27,normal,uncommon,2,95834,d068192a-6270-4981-819d-4945fa4a2b83,0.07,false,false,near_mint,en,USD
|
||||||
|
Hazel's Nocturne,BLB,Bloomburrow,97,normal,uncommon,2,96009,239363df-4de8-4b64-80fc-a1f4b5c36027,0.07,false,false,near_mint,en,USD
|
||||||
|
Treeguard Duo,BLB,Bloomburrow,200,normal,common,4,96077,89c8456e-c971-42b7-abf3-ff5ae1320abe,0.01,false,false,near_mint,en,USD
|
||||||
|
Calamitous Tide,BLB,Bloomburrow,43,normal,uncommon,2,96003,178bc8b2-ffa0-4549-aead-aacb3db3cf19,0.03,false,false,near_mint,en,USD
|
||||||
|
Splash Lasher,BLB,Bloomburrow,73,normal,uncommon,2,95910,362ee125-35a0-46cd-a201-e6797d12d33a,0.04,false,false,near_mint,en,USD
|
||||||
|
Blooming Blast,BLB,Bloomburrow,126,normal,uncommon,2,95996,0cd92a83-cec3-4085-a929-3f204e3e0140,0.06,false,false,near_mint,en,USD
|
||||||
|
Sugar Coat,BLB,Bloomburrow,76,normal,uncommon,2,95887,fcacbe71-efb0-49e1-b2d0-3ee65ec6cf8b,0.05,false,false,near_mint,en,USD
|
||||||
|
Dazzling Denial,BLB,Bloomburrow,45,normal,common,6,96369,8739f1ac-2e57-4b52-a7ff-cc8df5936aad,0.04,false,false,near_mint,en,USD
|
||||||
|
Nettle Guard,BLB,Bloomburrow,23,normal,common,4,95949,8c9c3cc3-2aa2-453e-a17c-2baeeaabe0a9,0.03,false,false,near_mint,en,USD
|
||||||
|
Raccoon Rallier,BLB,Bloomburrow,148,normal,common,5,96104,b5b5180f-5a1c-4df8-9019-195e65a50ce3,0.04,false,false,near_mint,en,USD
|
||||||
|
High Stride,BLB,Bloomburrow,176,normal,common,8,96153,09c8cf4b-8e65-4a1c-b458-28b5ab56b390,0.04,false,false,near_mint,en,USD
|
||||||
|
Otterball Antics,BLB,Bloomburrow,63,normal,uncommon,2,95913,3ff83ff7-e428-4ccc-8341-f223dab76bd1,0.1,false,false,near_mint,en,USD
|
||||||
|
Frilled Sparkshooter,BLB,Bloomburrow,136,normal,common,7,95934,674bbd6d-e329-42cf-963d-88d1ce8fe51e,0.02,false,false,near_mint,en,USD
|
||||||
|
Moonrise Cleric,BLB,Bloomburrow,226,normal,common,3,95767,35f2a71f-31e8-4b51-9dd4-51a5336b3b86,0.04,false,false,near_mint,en,USD
|
||||||
|
Wax-Wane Witness,BLB,Bloomburrow,39,normal,common,3,95971,d90ea719-5320-46c6-a347-161853a14776,0.02,false,false,near_mint,en,USD
|
||||||
|
Pearl of Wisdom,BLB,Bloomburrow,64,normal,common,7,95625,13cb9575-1138-4f99-8e90-0eaf00bdf4a1,0.01,false,false,near_mint,en,USD
|
||||||
|
Run Away Together,BLB,Bloomburrow,67,normal,common,3,95799,7cb7ec70-a5a4-4188-ba1a-e88b81bdbad0,0.04,false,false,near_mint,en,USD
|
||||||
|
Early Winter,BLB,Bloomburrow,93,normal,common,2,95626,5030e6ac-211d-4145-8c87-998a8351a467,0.05,false,false,near_mint,en,USD
|
||||||
|
Three Tree Rootweaver,BLB,Bloomburrow,198,normal,common,2,96469,d1ab6e14-26e0-4174-b5c6-bc0f5c26b177,0.04,false,false,near_mint,en,USD
|
||||||
|
Mudflat Village,BLB,Bloomburrow,257,normal,uncommon,2,95628,53ec4ad3-9cf0-4f1b-a9db-d63feee594ab,0.24,false,false,near_mint,en,USD
|
||||||
|
Starlit Soothsayer,BLB,Bloomburrow,115,normal,common,6,95895,184c1eca-2991-438f-b5d2-cd2529b9c9b4,0.03,false,false,near_mint,en,USD
|
||||||
|
Hop to It,BLB,Bloomburrow,16,normal,uncommon,2,95851,ee7207f8-5daa-42af-aeea-7a489047110b,0.07,false,false,near_mint,en,USD
|
||||||
|
Psychic Whorl,BLB,Bloomburrow,105,normal,common,5,96127,df900308-8432-4a0a-be21-17482026012b,0.04,false,false,near_mint,en,USD
|
||||||
|
Barkform Harvester,BLB,Bloomburrow,243,normal,common,4,95984,f77049a6-0f22-415b-bc89-20bcb32accf6,0.06,false,false,near_mint,en,USD
|
||||||
|
Daring Waverider,BLB,Bloomburrow,44,normal,uncommon,2,95896,19422406-0c1a-497e-bed1-708bc556491a,0.06,false,false,near_mint,en,USD
|
||||||
|
Plumecreed Escort,BLB,Bloomburrow,65,normal,uncommon,2,95983,f71320ed-2f30-49ce-bcb0-19aebba3f0e8,0.05,false,false,near_mint,en,USD
|
||||||
|
Parting Gust,BLB,Bloomburrow,24,normal,uncommon,2,95744,1086e826-94b8-4398-8a38-d8eacca56a43,0.38,false,false,near_mint,en,USD
|
||||||
|
Veteran Guardmouse,BLB,Bloomburrow,237,normal,common,3,95771,3db43c46-b616-4ef8-80ed-0fab345ab3d0,0.01,false,false,near_mint,en,USD
|
||||||
|
Dire Downdraft,BLB,Bloomburrow,46,normal,common,6,96526,f1931f22-974c-43ad-911e-684bf3f9995d,0.02,false,false,near_mint,en,USD
|
||||||
|
Waterspout Warden,BLB,Bloomburrow,80,normal,common,4,95909,35898b39-98e2-405b-8f18-0e054bd2c29e,0.01,false,false,near_mint,en,USD
|
||||||
|
Lupinflower Village,BLB,Bloomburrow,256,normal,uncommon,2,95634,8ab9d56f-9178-4ec9-a5f6-b934f50d8d9d,0.1,false,false,near_mint,en,USD
|
||||||
|
Heartfire Hero,BLB,Bloomburrow,138,normal,uncommon,2,95870,48ace959-66b2-40c8-9bff-fd7ed9c99a82,2.1,false,false,near_mint,en,USD
|
||||||
|
Peerless Recycling,BLB,Bloomburrow,188,normal,uncommon,2,95925,5f72466c-505b-4371-9366-0fde525a37e6,0.1,false,false,near_mint,en,USD
|
||||||
|
Pond Prophet,BLB,Bloomburrow,229,normal,common,4,95861,fb959e74-61ea-453d-bb9f-ad0183c0e1b1,0.09,false,false,near_mint,en,USD
|
||||||
|
Crumb and Get It,BLB,Bloomburrow,8,normal,common,2,96259,3c7b3b25-d4b3-4451-9f5c-6eb369541175,0.03,false,false,near_mint,en,USD
|
||||||
|
Wildfire Howl,BLB,Bloomburrow,162,normal,uncommon,2,96059,7392d397-9836-4df2-944d-c930c9566811,0.05,false,false,near_mint,en,USD
|
||||||
|
Bark-Knuckle Boxer,BLB,Bloomburrow,164,normal,uncommon,2,95921,582637a9-6aa0-4824-bed7-d5fc91bda35e,0.03,false,false,near_mint,en,USD
|
||||||
|
Ruthless Negotiation,BLB,Bloomburrow,108,normal,uncommon,2,95828,c7f4360c-8d68-4058-b9ec-da9948cb060d,0.1,false,false,near_mint,en,USD
|
||||||
|
Three Tree Mascot,FDN,Foundations,682,normal,common,3,100412,40b8bf3a-1cb5-4ce2-ac25-9410f17130de,0.11,false,false,near_mint,en,USD
|
||||||
|
Tempest Angler,BLB,Bloomburrow,235,normal,common,2,95803,850daae4-f0b7-4604-95e7-ad044ec165c3,0.04,false,false,near_mint,en,USD
|
||||||
|
Starscape Cleric,BLB,Bloomburrow,116,normal,uncommon,2,96037,53a938a7-0154-4350-87cb-00da24ec3824,0.62,false,false,near_mint,en,USD
|
||||||
|
Wick's Patrol,BLB,Bloomburrow,121,normal,uncommon,3,95926,5fa0c53d-fe7b-4b8b-ad81-7967ca318ff7,0.07,false,false,near_mint,en,USD
|
||||||
|
Fireglass Mentor,BLB,Bloomburrow,213,normal,uncommon,2,95823,b78fbaa3-c580-4290-9c28-b74169aab2fc,0.08,false,false,near_mint,en,USD
|
||||||
|
Steampath Charger,BLB,Bloomburrow,153,normal,common,2,95890,03bf1296-e347-4070-8c6f-5c362c2f9364,0.03,false,false,near_mint,en,USD
|
||||||
|
Whiskerquill Scribe,BLB,Bloomburrow,161,normal,common,2,96124,da653996-9bd4-40bd-afb4-48c7e070a269,0.01,false,false,near_mint,en,USD
|
||||||
|
Lilysplash Mentor,BLB,Bloomburrow,222,normal,uncommon,3,95789,64de7b1f-a03e-4407-91f1-e108a2f26735,0.12,false,false,near_mint,en,USD
|
||||||
|
Roughshod Duo,BLB,Bloomburrow,150,normal,common,3,96343,78cdcfb9-a247-4c2d-a098-5b57570f8cd5,0.03,false,false,near_mint,en,USD
|
||||||
|
Bonebind Orator,BLB,Bloomburrow,84,normal,common,3,96535,faf226fa-ca09-4468-8804-87b2a7de2c66,0.02,false,false,near_mint,en,USD
|
||||||
|
Agate Assault,BLB,Bloomburrow,122,normal,common,2,96066,7dd9946b-515e-4e0d-9da2-711e126e9fa6,0.02,false,false,near_mint,en,USD
|
||||||
|
Nocturnal Hunger,BLB,Bloomburrow,102,normal,common,3,96060,742c0409-9abd-4559-b52e-932cc90c531a,0.02,false,false,near_mint,en,USD
|
||||||
|
Jolly Gerbils,BLB,Bloomburrow,19,normal,uncommon,2,96167,0eab51d6-ba17-4a8c-8834-25db363f2b6b,0.04,false,false,near_mint,en,USD
|
||||||
|
Downwind Ambusher,BLB,Bloomburrow,92,normal,uncommon,2,95920,55cfd628-933a-4d3d-b2e5-70bc86960d1c,0.02,false,false,near_mint,en,USD
|
||||||
|
Scales of Shale,BLB,Bloomburrow,110,normal,common,2,95955,9ae14276-dbbd-4257-80e9-accd6c19f5b2,0.02,false,false,near_mint,en,USD
|
||||||
|
Treetop Sentries,BLB,Bloomburrow,201,normal,common,4,95974,e16d4d6e-1fe5-4ff6-9877-8c849a24f5e0,0.03,false,false,near_mint,en,USD
|
||||||
|
Seedpod Squire,BLB,Bloomburrow,232,normal,common,4,95852,f3684577-51ce-490e-9b59-b19c733be466,0.01,false,false,near_mint,en,USD
|
||||||
|
Savor,BLB,Bloomburrow,109,normal,common,4,96178,1397f689-dca1-4d35-864b-92c5606afb9a,0.04,false,false,near_mint,en,USD
|
||||||
|
Polliwallop,BLB,Bloomburrow,189,normal,common,2,95935,6bc4963c-d90b-4588-bdb7-85956e42a623,0.03,false,false,near_mint,en,USD
|
||||||
|
Sonar Strike,BLB,Bloomburrow,32,normal,common,2,96093,a50da179-751f-47a8-a547-8c4a291ed381,0.02,false,false,near_mint,en,USD
|
||||||
|
Uncharted Haven,FDN,Foundations,564,normal,common,3,97170,172cd5b7-98fc-4add-b858-a0b3dfb75c19,0.14,false,false,near_mint,en,USD
|
||||||
|
Teapot Slinger,BLB,Bloomburrow,157,normal,uncommon,2,96015,30506844-349f-4b68-8cc1-d028c1611cc7,0.06,false,false,near_mint,en,USD
|
||||||
|
Harvestrite Host,BLB,Bloomburrow,15,normal,uncommon,2,95915,41762689-0c13-4d45-9d81-ba2afad980f8,0.07,false,false,near_mint,en,USD
|
||||||
|
Spellgyre,BLB,Bloomburrow,72,normal,uncommon,2,96139,f6f6620a-1d40-429d-9a0c-aaeb62adaa71,0.08,false,false,near_mint,en,USD
|
||||||
|
Oakhollow Village,BLB,Bloomburrow,258,normal,uncommon,2,95624,0d49b016-b02b-459f-85e9-c04f6bdcb94e,0.35,false,false,near_mint,en,USD
|
||||||
|
Bumbleflower's Sharepot,BLB,Bloomburrow,244,normal,common,2,95924,5f0affd5-5dcd-4dd1-a694-37a9aedf4084,0.02,false,false,near_mint,en,USD
|
||||||
|
Overprotect,BLB,Bloomburrow,185,normal,uncommon,2,95891,079e979f-b618-4625-989c-e0ea5b61ed8a,0.55,false,false,near_mint,en,USD
|
||||||
|
Heaped Harvest,BLB,Bloomburrow,175,normal,common,3,96255,3b5349db-0e0a-4b15-886e-0db403ef49cb,0.1,false,false,near_mint,en,USD
|
||||||
|
Flowerfoot Swordmaster,BLB,Bloomburrow,14,normal,uncommon,2,95812,97ff118f-9c3c-43a2-8085-980c7fe7d227,0.15,false,false,near_mint,en,USD
|
||||||
|
Banishing Light,BLB,Bloomburrow,1,normal,common,6,96011,25a06f82-ebdb-4dd6-bfe8-958018ce557c,0.04,false,false,near_mint,en,USD
|
||||||
|
Sazacap's Brew,BLB,Bloomburrow,151,normal,common,3,96330,6d963080-b3ec-467d-82f7-39db6ecd6bbc,0.05,false,false,near_mint,en,USD
|
||||||
|
Diresight,BLB,Bloomburrow,91,normal,common,3,95985,fada29c0-5293-40a4-b36d-d073ee99e650,0.1,false,false,near_mint,en,USD
|
||||||
|
Gossip's Talent,BLB,Bloomburrow,51,normal,uncommon,2,95961,b299889a-03d6-4659-b0e1-f0830842e40f,0.18,false,false,near_mint,en,USD
|
||||||
|
Fountainport Bell,BLB,Bloomburrow,245,normal,common,3,96094,a5c94bc0-a49d-451b-8e8d-64d46b8b8603,0.04,false,false,near_mint,en,USD
|
||||||
|
Reptilian Recruiter,BLB,Bloomburrow,149,normal,uncommon,2,96072,81dec453-c9d7-42cb-980a-c82f82bede76,0.02,false,false,near_mint,en,USD
|
||||||
|
Thistledown Players,BLB,Bloomburrow,35,normal,common,2,95960,afa8d83f-8586-4127-8b55-9715e9547488,0.01,false,false,near_mint,en,USD
|
||||||
|
Clifftop Lookout,BLB,Bloomburrow,168,normal,uncommon,2,95931,662d3bcc-65f3-4c69-8ea1-446870a1193d,0.16,false,false,near_mint,en,USD
|
||||||
|
Rust-Shield Rampager,BLB,Bloomburrow,190,normal,common,2,96117,c96b01f5-83de-4237-a68d-f946c53e31a6,0.02,false,false,near_mint,en,USD
|
||||||
|
Consumed by Greed,BLB,Bloomburrow,87,normal,uncommon,2,95884,e50acc41-3517-42db-b1d3-1bdfd7294d84,0.09,false,false,near_mint,en,USD
|
||||||
|
Rabbit Response,BLB,Bloomburrow,26,normal,common,2,96114,c4ded450-346d-4917-917a-b62bc0267509,0.02,false,false,near_mint,en,USD
|
||||||
|
Corpseberry Cultivator,BLB,Bloomburrow,210,normal,common,2,95829,c911a759-ed7b-452b-88a3-663478357610,0.02,false,false,near_mint,en,USD
|
||||||
|
Mind Drill Assailant,BLB,Bloomburrow,225,normal,common,2,95783,507ba708-ca9b-453e-b4c2-23b6650eb5a8,0.05,false,false,near_mint,en,USD
|
||||||
|
Hazardroot Herbalist,BLB,Bloomburrow,174,normal,uncommon,2,96130,e2882982-b3a3-4762-a550-6b82db1038e8,0.04,false,false,near_mint,en,USD
|
||||||
|
Dewdrop Cure,BLB,Bloomburrow,10,normal,uncommon,2,95932,666aefc2-44e0-4c27-88d5-7906f245a71f,0.13,false,false,near_mint,en,USD
|
||||||
|
Valley Rally,BLB,Bloomburrow,159,normal,uncommon,2,95878,b6178258-1ad6-4122-a56f-6eb7d0611e84,0.04,false,false,near_mint,en,USD
|
||||||
|
Blacksmith's Talent,BLB,Bloomburrow,125,normal,uncommon,2,96029,4bb318fa-481d-40a7-978e-f01b49101ae0,0.17,false,false,near_mint,en,USD
|
||||||
|
Pileated Provisioner,BLB,Bloomburrow,25,normal,common,2,96102,ae442cd6-c4df-4aad-9b1d-ccd936c5ec96,0.02,false,false,near_mint,en,USD
|
||||||
|
Short Bow,BLB,Bloomburrow,248,normal,uncommon,2,96281,51d8b72b-fa8f-48d3-bddc-d3ce9b8ba2ea,0.15,false,false,near_mint,en,USD
|
||||||
|
Warren Elder,BLB,Bloomburrow,37,normal,common,2,96030,4bf20069-5a20-4f95-976b-6af2b69f3ad0,0.03,false,false,near_mint,en,USD
|
|
162
db/models.py
162
db/models.py
@ -1,162 +0,0 @@
|
|||||||
from sqlalchemy import Column, Integer, String, Float, Boolean, DateTime, ForeignKey
|
|
||||||
from sqlalchemy.ext.declarative import declarative_base
|
|
||||||
from sqlalchemy.orm import relationship
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
|
|
||||||
Base = declarative_base()
|
|
||||||
|
|
||||||
class Box(Base):
|
|
||||||
__tablename__ = "boxes"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True, index=True)
|
|
||||||
upload_id = Column(String, ForeignKey("upload_history.upload_id"))
|
|
||||||
set_name = Column(String)
|
|
||||||
set_code = Column(String)
|
|
||||||
type = Column(String)
|
|
||||||
cost = Column(Float)
|
|
||||||
date_purchased = Column(DateTime)
|
|
||||||
date_opened = Column(DateTime)
|
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
|
||||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
|
||||||
|
|
||||||
class ManaboxExportData(Base):
|
|
||||||
__tablename__ = "manabox_export_data"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
upload_id = Column(String)
|
|
||||||
box_id = Column(String, nullable=True)
|
|
||||||
name = Column(String)
|
|
||||||
set_code = Column(String)
|
|
||||||
set_name = Column(String)
|
|
||||||
collector_number = Column(String)
|
|
||||||
foil = Column(String)
|
|
||||||
rarity = Column(String)
|
|
||||||
quantity = Column(Integer)
|
|
||||||
manabox_id = Column(String)
|
|
||||||
scryfall_id = Column(String)
|
|
||||||
purchase_price = Column(Float)
|
|
||||||
misprint = Column(String)
|
|
||||||
altered = Column(String)
|
|
||||||
condition = Column(String)
|
|
||||||
language = Column(String)
|
|
||||||
purchase_price_currency = Column(String)
|
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
|
||||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
|
||||||
|
|
||||||
class UploadHistory(Base):
|
|
||||||
__tablename__ = "upload_history"
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
upload_id = Column(String)
|
|
||||||
filename = Column(String)
|
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
|
||||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
|
||||||
status = Column(String)
|
|
||||||
|
|
||||||
class TCGPlayerGroups(Base):
|
|
||||||
__tablename__ = 'tcgplayer_groups'
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
group_id = Column(Integer)
|
|
||||||
name = Column(String)
|
|
||||||
abbreviation = Column(String)
|
|
||||||
is_supplemental = Column(String)
|
|
||||||
published_on = Column(String)
|
|
||||||
modified_on = Column(String)
|
|
||||||
category_id = Column(Integer)
|
|
||||||
|
|
||||||
class TCGPlayerInventory(Base):
|
|
||||||
__tablename__ = 'tcgplayer_inventory'
|
|
||||||
|
|
||||||
# TCGplayer Id,Product Line,Set Name,Product Name,Title,Number,Rarity,Condition,TCG Market Price,TCG Direct Low,TCG Low Price With Shipping,TCG Low Price,Total Quantity,Add to Quantity,TCG Marketplace Price,Photo URL
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
export_id = Column(String)
|
|
||||||
tcgplayer_product_id = Column(String, ForeignKey("tcgplayer_product.id"), nullable=True)
|
|
||||||
tcgplayer_id = Column(Integer)
|
|
||||||
product_line = Column(String)
|
|
||||||
set_name = Column(String)
|
|
||||||
product_name = Column(String)
|
|
||||||
title = Column(String)
|
|
||||||
number = Column(String)
|
|
||||||
rarity = Column(String)
|
|
||||||
condition = Column(String)
|
|
||||||
tcg_market_price = Column(Float)
|
|
||||||
tcg_direct_low = Column(Float)
|
|
||||||
tcg_low_price_with_shipping = Column(Float)
|
|
||||||
tcg_low_price = Column(Float)
|
|
||||||
total_quantity = Column(Integer)
|
|
||||||
add_to_quantity = Column(Integer)
|
|
||||||
tcg_marketplace_price = Column(Float)
|
|
||||||
photo_url = Column(String)
|
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
|
||||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
|
||||||
|
|
||||||
class TCGPlayerExportHistory(Base):
|
|
||||||
__tablename__ = 'tcgplayer_export_history'
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
type = Column(String)
|
|
||||||
pricing_export_id = Column(String)
|
|
||||||
inventory_export_id = Column(String)
|
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
|
||||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
|
||||||
|
|
||||||
class TCGPlayerPricingHistory(Base):
|
|
||||||
__tablename__ = 'tcgplayer_pricing_history'
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
tcgplayer_product_id = Column(String, ForeignKey("tcgplayer_product.id"))
|
|
||||||
export_id = Column(String)
|
|
||||||
group_id = Column(Integer)
|
|
||||||
tcgplayer_id = Column(Integer)
|
|
||||||
tcg_market_price = Column(Float)
|
|
||||||
tcg_direct_low = Column(Float)
|
|
||||||
tcg_low_price_with_shipping = Column(Float)
|
|
||||||
tcg_low_price = Column(Float)
|
|
||||||
tcg_marketplace_price = Column(Float)
|
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
|
||||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
|
||||||
|
|
||||||
class TCGPlayerProduct(Base):
|
|
||||||
__tablename__ = 'tcgplayer_product'
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
group_id = Column(Integer)
|
|
||||||
tcgplayer_id = Column(Integer)
|
|
||||||
product_line = Column(String)
|
|
||||||
set_name = Column(String)
|
|
||||||
product_name = Column(String)
|
|
||||||
title = Column(String)
|
|
||||||
number = Column(String)
|
|
||||||
rarity = Column(String)
|
|
||||||
condition = Column(String)
|
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
|
||||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
|
||||||
|
|
||||||
class ManaboxTCGPlayerMapping(Base):
|
|
||||||
__tablename__ = 'manabox_tcgplayer_mapping'
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
manabox_id = Column(String, ForeignKey("manabox_export_data.id"))
|
|
||||||
tcgplayer_id = Column(Integer, ForeignKey("tcgplayer_inventory.tcgplayer_id"))
|
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
|
||||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
|
||||||
|
|
||||||
class SetCodeGroupIdMapping(Base):
|
|
||||||
__tablename__ = 'set_code_group_id_mapping'
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
set_code = Column(String)
|
|
||||||
group_id = Column(Integer)
|
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
|
||||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
|
||||||
|
|
||||||
class UnmatchedManaboxData(Base):
|
|
||||||
__tablename__ = 'unmatched_manabox_data'
|
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
|
||||||
manabox_id = Column(String, ForeignKey("manabox_export_data.id"))
|
|
||||||
reason = Column(String)
|
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
|
||||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
|
@ -1,45 +0,0 @@
|
|||||||
from sqlalchemy.orm import Session
|
|
||||||
from services.data import DataService
|
|
||||||
from services.upload import UploadService
|
|
||||||
from services.box import BoxService
|
|
||||||
from services.tcgplayer import TCGPlayerService
|
|
||||||
from services.pricing import PricingService
|
|
||||||
from fastapi import Depends
|
|
||||||
from db.database import get_db
|
|
||||||
|
|
||||||
|
|
||||||
## Upload
|
|
||||||
|
|
||||||
def get_upload_service(db: Session = Depends(get_db)) -> UploadService:
|
|
||||||
"""Dependency injection for UploadService"""
|
|
||||||
return UploadService(db)
|
|
||||||
|
|
||||||
## box
|
|
||||||
|
|
||||||
def get_box_service(db: Session = Depends(get_db)) -> BoxService:
|
|
||||||
"""Dependency injection for BoxService"""
|
|
||||||
return BoxService(db)
|
|
||||||
|
|
||||||
## Pricing
|
|
||||||
|
|
||||||
def get_pricing_service(db: Session = Depends(get_db)) -> PricingService:
|
|
||||||
"""Dependency injection for PricingService"""
|
|
||||||
return PricingService(db)
|
|
||||||
|
|
||||||
## tcgplayer
|
|
||||||
|
|
||||||
def get_tcgplayer_service(
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
pricing_service: PricingService = Depends(get_pricing_service)
|
|
||||||
) -> TCGPlayerService:
|
|
||||||
"""Dependency injection for TCGPlayerService"""
|
|
||||||
return TCGPlayerService(db, pricing_service)
|
|
||||||
|
|
||||||
## Data
|
|
||||||
def get_data_service(
|
|
||||||
db: Session = Depends(get_db),
|
|
||||||
tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service)
|
|
||||||
) -> DataService:
|
|
||||||
"""Dependency injection for DataService"""
|
|
||||||
return DataService(db, tcgplayer_service)
|
|
||||||
|
|
75
main.py
75
main.py
@ -1,75 +0,0 @@
|
|||||||
from fastapi import FastAPI
|
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
|
||||||
import uvicorn
|
|
||||||
from routes.routes import router
|
|
||||||
from db.database import init_db, check_db_connection, destroy_db, get_db
|
|
||||||
from db.utils import db_transaction
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
from services.tcgplayer import TCGPlayerService, PricingService
|
|
||||||
from db.models import TCGPlayerGroups
|
|
||||||
|
|
||||||
|
|
||||||
logging.basicConfig(
|
|
||||||
level=logging.DEBUG,
|
|
||||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
||||||
handlers=[
|
|
||||||
logging.StreamHandler(sys.stdout),
|
|
||||||
logging.FileHandler('app.log') # Added this line
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Create FastAPI instance
|
|
||||||
app = FastAPI(
|
|
||||||
title="Card Management API",
|
|
||||||
description="API for managing card collections and TCGPlayer integration",
|
|
||||||
version="1.0.0",
|
|
||||||
debug=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# Configure CORS
|
|
||||||
app.add_middleware(
|
|
||||||
CORSMiddleware,
|
|
||||||
allow_origins=["*"], # Modify this in production
|
|
||||||
allow_credentials=True,
|
|
||||||
allow_methods=["*"],
|
|
||||||
allow_headers=["*"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Include routers
|
|
||||||
app.include_router(router)
|
|
||||||
|
|
||||||
# Optional: Add startup and shutdown events
|
|
||||||
@app.on_event("startup")
|
|
||||||
async def startup_event():
|
|
||||||
# Check database connection
|
|
||||||
if not check_db_connection():
|
|
||||||
raise Exception("Database connection failed")
|
|
||||||
# destroy db
|
|
||||||
#destroy_db()
|
|
||||||
# Initialize database
|
|
||||||
init_db()
|
|
||||||
# get db session
|
|
||||||
db = next(get_db())
|
|
||||||
# populate tcgplayer groups
|
|
||||||
if db.query(TCGPlayerGroups).count() == 0:
|
|
||||||
with db_transaction(db):
|
|
||||||
tcgplayer_service = TCGPlayerService(db, PricingService(db))
|
|
||||||
tcgplayer_service.populate_tcgplayer_groups()
|
|
||||||
|
|
||||||
|
|
||||||
@app.on_event("shutdown")
|
|
||||||
async def shutdown_event():
|
|
||||||
# Clean up any connections or resources
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Root endpoint
|
|
||||||
@app.get("/")
|
|
||||||
async def root():
|
|
||||||
return {"message": "Card Management API"}
|
|
||||||
|
|
||||||
# Run the application
|
|
||||||
if __name__ == "__main__":
|
|
||||||
uvicorn.run("main:app", host="0.0.0.0", port=8000, reload=True)
|
|
23
requests.md
Normal file
23
requests.md
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
curl -J http://192.168.1.41:8000/api/tcgplayer/inventory/update --remote-name
|
||||||
|
|
||||||
|
curl -J -X POST \ -H "Content-Type: application/json" \
|
||||||
|
-d '{"open_box_ids": ["e20cc342-23cb-4593-89cb-56a0cb3ed3f3"]}' \
|
||||||
|
http://192.168.1.41:8000/api/tcgplayer/inventory/add --remote-name
|
||||||
|
|
||||||
|
curl -X POST http://192.168.1.41:8000/api/boxes \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"type": "collector",
|
||||||
|
"set_code": "MOM",
|
||||||
|
"sku": "ABC123",
|
||||||
|
"num_cards_expected": 15
|
||||||
|
}'
|
||||||
|
|
||||||
|
curl -X POST http://192.168.1.41:8000/api/boxes/box123/open \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"product_id": "box123",
|
||||||
|
"file_ids": ["file1", "file2"],
|
||||||
|
"num_cards_actual": 15,
|
||||||
|
"date_opened": "2025-02-07T12:00:00Z"
|
||||||
|
}'
|
@ -1,18 +1,32 @@
|
|||||||
|
alembic==1.14.1
|
||||||
annotated-types==0.7.0
|
annotated-types==0.7.0
|
||||||
anyio==4.8.0
|
anyio==4.8.0
|
||||||
|
APScheduler==3.11.0
|
||||||
browser-cookie3==0.20.1
|
browser-cookie3==0.20.1
|
||||||
certifi==2025.1.31
|
certifi==2025.1.31
|
||||||
charset-normalizer==3.4.1
|
charset-normalizer==3.4.1
|
||||||
click==8.1.8
|
click==8.1.8
|
||||||
|
coverage==7.6.10
|
||||||
fastapi==0.115.8
|
fastapi==0.115.8
|
||||||
h11==0.14.0
|
h11==0.14.0
|
||||||
|
httpcore==1.0.7
|
||||||
|
httpx==0.28.1
|
||||||
idna==3.10
|
idna==3.10
|
||||||
|
iniconfig==2.0.0
|
||||||
lz4==4.4.3
|
lz4==4.4.3
|
||||||
|
Mako==1.3.9
|
||||||
|
MarkupSafe==3.0.2
|
||||||
numpy==2.2.2
|
numpy==2.2.2
|
||||||
|
packaging==24.2
|
||||||
pandas==2.2.3
|
pandas==2.2.3
|
||||||
|
pluggy==1.5.0
|
||||||
|
psycopg2-binary==2.9.10
|
||||||
pycryptodomex==3.21.0
|
pycryptodomex==3.21.0
|
||||||
pydantic==2.10.6
|
pydantic==2.10.6
|
||||||
pydantic_core==2.27.2
|
pydantic_core==2.27.2
|
||||||
|
pytest==8.3.4
|
||||||
|
pytest-asyncio==0.25.3
|
||||||
|
pytest-cov==6.0.0
|
||||||
python-dateutil==2.9.0.post0
|
python-dateutil==2.9.0.post0
|
||||||
python-multipart==0.0.20
|
python-multipart==0.0.20
|
||||||
pytz==2025.1
|
pytz==2025.1
|
||||||
@ -23,5 +37,6 @@ SQLAlchemy==2.0.37
|
|||||||
starlette==0.45.3
|
starlette==0.45.3
|
||||||
typing_extensions==4.12.2
|
typing_extensions==4.12.2
|
||||||
tzdata==2025.1
|
tzdata==2025.1
|
||||||
|
tzlocal==5.2
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
uvicorn==0.34.0
|
uvicorn==0.34.0
|
||||||
|
167
routes/routes.py
167
routes/routes.py
@ -1,167 +0,0 @@
|
|||||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, Request, BackgroundTasks
|
|
||||||
from fastapi.responses import StreamingResponse
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from typing import Dict, Any, List
|
|
||||||
from db.database import get_db
|
|
||||||
from services.upload import UploadService
|
|
||||||
from services.box import BoxService
|
|
||||||
from services.tcgplayer import TCGPlayerService
|
|
||||||
from services.data import DataService
|
|
||||||
from dependencies import get_data_service, get_upload_service, get_tcgplayer_service, get_box_service
|
|
||||||
|
|
||||||
|
|
||||||
import logging
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/api", tags=["cards"])
|
|
||||||
|
|
||||||
## health check
|
|
||||||
@router.get("/health", response_model=dict)
|
|
||||||
async def health_check() -> dict:
|
|
||||||
"""
|
|
||||||
Health check endpoint
|
|
||||||
"""
|
|
||||||
logger.info("Health check")
|
|
||||||
return {"status": "ok"}
|
|
||||||
|
|
||||||
## test endpoint - logs all detail about request
|
|
||||||
@router.post("/test", response_model=dict)
|
|
||||||
async def test_endpoint(request: Request, file:UploadFile = File(...)) -> dict:
|
|
||||||
"""
|
|
||||||
Test endpoint
|
|
||||||
"""
|
|
||||||
content = await file.read()
|
|
||||||
# log filename
|
|
||||||
logger.info(f"file received: {file.filename}")
|
|
||||||
# print first 100 characters of file content
|
|
||||||
logger.info(f"file content: {content[:100]}")
|
|
||||||
|
|
||||||
return {"status": "ok"}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/upload/manabox", response_model=dict)
|
|
||||||
async def upload_manabox(
|
|
||||||
background_tasks: BackgroundTasks,
|
|
||||||
upload_service: UploadService = Depends(get_upload_service),
|
|
||||||
data_service: DataService = Depends(get_data_service),
|
|
||||||
file: UploadFile = File(...)
|
|
||||||
) -> dict:
|
|
||||||
"""
|
|
||||||
Upload endpoint for Manabox CSV files
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
logger.info(f"file received: {file.filename}")
|
|
||||||
# Read the file content
|
|
||||||
content = await file.read()
|
|
||||||
filename = file.filename
|
|
||||||
if not content:
|
|
||||||
logger.error("Empty file content")
|
|
||||||
raise HTTPException(status_code=400, detail="Empty file content")
|
|
||||||
|
|
||||||
# You might want to validate it's a CSV file
|
|
||||||
if not file.filename.endswith('.csv'):
|
|
||||||
logger.error("File must be a CSV")
|
|
||||||
raise HTTPException(status_code=400, detail="File must be a CSV")
|
|
||||||
|
|
||||||
result = upload_service.process_manabox_upload(content, filename)
|
|
||||||
background_tasks.add_task(data_service.bg_set_manabox_tcg_relationship, upload_id=result[1])
|
|
||||||
return result[0]
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Manabox upload failed: {str(e)}")
|
|
||||||
raise HTTPException(status_code=400, detail=str(e))
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/createBox", response_model=dict)
|
|
||||||
async def create_box(
|
|
||||||
upload_id: str,
|
|
||||||
box_service: BoxService = Depends(get_box_service)
|
|
||||||
) -> dict:
|
|
||||||
try:
|
|
||||||
result = box_service.convert_upload_to_boxes(upload_id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Box creation failed: {str(e)}")
|
|
||||||
raise HTTPException(status_code=400, detail=str(e))
|
|
||||||
return result
|
|
||||||
|
|
||||||
@router.post("/deleteBox", response_model=dict)
|
|
||||||
async def delete_box(
|
|
||||||
box_id: str,
|
|
||||||
box_service: BoxService = Depends(get_box_service)
|
|
||||||
) -> dict:
|
|
||||||
try:
|
|
||||||
result = box_service.delete_box(box_id)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Box deletion failed: {str(e)}")
|
|
||||||
raise HTTPException(status_code=400, detail=str(e))
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/tcgplayer/add/box/{box_id}", response_model=dict)
|
|
||||||
async def add_box(box_id: str = None, tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service)):
|
|
||||||
try:
|
|
||||||
csv_content = tcgplayer_service.add_to_tcgplayer(box_id)
|
|
||||||
return StreamingResponse(
|
|
||||||
iter([csv_content]),
|
|
||||||
media_type="text/csv",
|
|
||||||
headers={"Content-Disposition": "attachment; filename=add_to_tcgplayer.csv"}
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Box add failed: {str(e)}")
|
|
||||||
raise HTTPException(status_code=400, detail=str(e))
|
|
||||||
|
|
||||||
@router.post("/tcgplayer/update/box/{box_id}", response_model=dict)
|
|
||||||
async def update_box(box_id: int = None):
|
|
||||||
"""asdf"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@router.post("/tcgplayer/updateInventory", response_model=dict)
|
|
||||||
async def update_inventory(
|
|
||||||
background_tasks: BackgroundTasks,
|
|
||||||
tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service),
|
|
||||||
data_service: DataService = Depends(get_data_service)):
|
|
||||||
try:
|
|
||||||
result = tcgplayer_service.update_inventory('live')
|
|
||||||
export_id = result['export_id']
|
|
||||||
background_tasks.add_task(data_service.bg_set_tcg_inventory_product_relationship, export_id)
|
|
||||||
return result
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Inventory update failed: {str(e)}")
|
|
||||||
raise HTTPException(status_code=400, detail=str(e))
|
|
||||||
|
|
||||||
@router.post("/tcgplayer/updatePricing", response_model=dict)
|
|
||||||
async def update_inventory(
|
|
||||||
tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service),
|
|
||||||
group_ids: Dict = None):
|
|
||||||
try:
|
|
||||||
result = tcgplayer_service.update_pricing(group_ids)
|
|
||||||
return result
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Pricing update failed: {str(e)}")
|
|
||||||
raise HTTPException(status_code=400, detail=str(e))
|
|
||||||
|
|
||||||
@router.post("/tcgplayer/updatePricingAll", response_model=dict)
|
|
||||||
async def update_inventory(tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service)):
|
|
||||||
try:
|
|
||||||
result = tcgplayer_service.update_pricing_all()
|
|
||||||
return result
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Pricing update failed: {str(e)}")
|
|
||||||
raise HTTPException(status_code=400, detail=str(e))
|
|
||||||
|
|
||||||
@router.get("/tcgplayer/createLiveInventoryPricingUpdateFile")
|
|
||||||
async def create_inventory_import(
|
|
||||||
tcgplayer_service: TCGPlayerService = Depends(get_tcgplayer_service)
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
csv_content = tcgplayer_service.get_live_inventory_pricing_update_csv()
|
|
||||||
return StreamingResponse(
|
|
||||||
iter([csv_content]),
|
|
||||||
media_type="text/csv",
|
|
||||||
headers={"Content-Disposition": "attachment; filename=inventory_pricing_update.csv"}
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Inventory import creation failed: {str(e)}")
|
|
||||||
raise HTTPException(status_code=400, detail=str(e))
|
|
100
services/box.py
100
services/box.py
@ -1,100 +0,0 @@
|
|||||||
from db.models import ManaboxExportData, Box, UploadHistory
|
|
||||||
from db.utils import db_transaction
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from sqlalchemy.engine.result import Row
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
import logging
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
class BoxObject:
|
|
||||||
def __init__(
|
|
||||||
self, upload_id: str, set_name: str,
|
|
||||||
set_code: str, cost: float = None, date_purchased: datetime = None,
|
|
||||||
date_opened: datetime = None, box_id: str = None):
|
|
||||||
self.upload_id = upload_id
|
|
||||||
self.box_id = box_id if box_id else str(uuid.uuid4())
|
|
||||||
self.set_name = set_name
|
|
||||||
self.set_code = set_code
|
|
||||||
self.cost = cost
|
|
||||||
self.date_purchased = date_purchased
|
|
||||||
self.date_opened = date_opened
|
|
||||||
|
|
||||||
class BoxService:
|
|
||||||
def __init__(self, db: Session):
|
|
||||||
self.db = db
|
|
||||||
|
|
||||||
def _validate_upload_id(self, upload_id: str):
|
|
||||||
# check if upload_history status = 'success'
|
|
||||||
if self.db.query(UploadHistory).filter(UploadHistory.upload_id == upload_id).first() is None:
|
|
||||||
raise Exception(f"Upload ID {upload_id} not found")
|
|
||||||
if self.db.query(UploadHistory).filter(UploadHistory.upload_id == upload_id).first().status != 'success':
|
|
||||||
raise Exception(f"Upload ID {upload_id} not successful")
|
|
||||||
# check if at least 1 row in manabox_export_data with upload_id
|
|
||||||
if self.db.query(ManaboxExportData).filter(ManaboxExportData.upload_id == upload_id).first() is None:
|
|
||||||
raise Exception(f"Upload ID {upload_id} has no data")
|
|
||||||
|
|
||||||
def _get_set_info(self, upload_id: str) -> list[Row[tuple[str, str]]]:
|
|
||||||
# get distinct set_name, set_code from manabox_export_data for upload_id
|
|
||||||
boxes = self.db.query(ManaboxExportData.set_name, ManaboxExportData.set_code).filter(ManaboxExportData.upload_id == upload_id).distinct().all()
|
|
||||||
if not boxes or len(boxes) == 0:
|
|
||||||
raise Exception(f"Upload ID {upload_id} has no data")
|
|
||||||
return boxes
|
|
||||||
|
|
||||||
def _update_manabox_export_data_box_id(self, box: Box):
|
|
||||||
# based on upload_id, set_name, set_code, update box_id in manabox_export_data for all rows where box id is null
|
|
||||||
with db_transaction(self.db):
|
|
||||||
self.db.query(ManaboxExportData).filter(
|
|
||||||
ManaboxExportData.upload_id == box.upload_id).filter(
|
|
||||||
ManaboxExportData.set_name == box.set_name).filter(
|
|
||||||
ManaboxExportData.set_code == box.set_code).filter(
|
|
||||||
ManaboxExportData.box_id == None).update({ManaboxExportData.box_id: box.id})
|
|
||||||
|
|
||||||
def convert_upload_to_boxes(self, upload_id: str):
|
|
||||||
self._validate_upload_id(upload_id)
|
|
||||||
# get distinct set_name, set_code from manabox_export_data for upload_id
|
|
||||||
box_set_info = self._get_set_info(upload_id)
|
|
||||||
created_boxes = []
|
|
||||||
# create boxes
|
|
||||||
for box in box_set_info:
|
|
||||||
box_obj = BoxObject(upload_id, set_name = box.set_name, set_code = box.set_code)
|
|
||||||
new_box = self.create_box(box_obj)
|
|
||||||
logger.info(f"Created box {new_box.id} for upload {upload_id}")
|
|
||||||
self._update_manabox_export_data_box_id(new_box)
|
|
||||||
created_boxes.append(new_box)
|
|
||||||
|
|
||||||
return {"status": "success", "boxes": f"{[box.id for box in created_boxes]}"}
|
|
||||||
|
|
||||||
|
|
||||||
def create_box(self, box: BoxObject):
|
|
||||||
with db_transaction(self.db):
|
|
||||||
box_record = Box(
|
|
||||||
id = box.box_id,
|
|
||||||
upload_id = box.upload_id,
|
|
||||||
set_name = box.set_name,
|
|
||||||
set_code = box.set_code,
|
|
||||||
cost = box.cost,
|
|
||||||
date_purchased = box.date_purchased,
|
|
||||||
date_opened = box.date_opened
|
|
||||||
)
|
|
||||||
self.db.add(box_record)
|
|
||||||
return box_record
|
|
||||||
|
|
||||||
def get_box(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def delete_box(self, box_id: str):
|
|
||||||
# delete box
|
|
||||||
with db_transaction(self.db):
|
|
||||||
self.db.query(Box).filter(Box.id == box_id).delete()
|
|
||||||
# update manabox_export_data box_id to null
|
|
||||||
with db_transaction(self.db):
|
|
||||||
self.db.query(ManaboxExportData).filter(ManaboxExportData.box_id == box_id).update({ManaboxExportData.box_id: None})
|
|
||||||
return {"status": "success", "box_id": box_id}
|
|
||||||
|
|
||||||
def update_box(self):
|
|
||||||
pass
|
|
||||||
|
|
149
services/data.py
149
services/data.py
@ -1,149 +0,0 @@
|
|||||||
from sqlalchemy.orm import Session
|
|
||||||
import logging
|
|
||||||
from fastapi import BackgroundTasks
|
|
||||||
from db.models import TCGPlayerGroups, SetCodeGroupIdMapping, ManaboxExportData, TCGPlayerProduct, ManaboxTCGPlayerMapping, UnmatchedManaboxData, TCGPlayerInventory
|
|
||||||
from db.utils import db_transaction
|
|
||||||
import uuid
|
|
||||||
from services.tcgplayer import TCGPlayerService
|
|
||||||
from sqlalchemy.sql import exists
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
class DataService:
|
|
||||||
def __init__(self, db: Session, tcgplayer_service: TCGPlayerService):
|
|
||||||
self.db = db
|
|
||||||
self.tcgplayer_service = tcgplayer_service
|
|
||||||
|
|
||||||
def _normalize_rarity(self, rarity: str) -> str:
|
|
||||||
if rarity.lower() == "rare":
|
|
||||||
return "R"
|
|
||||||
elif rarity.lower() == "mythic":
|
|
||||||
return "M"
|
|
||||||
elif rarity.lower() == "uncommon":
|
|
||||||
return "U"
|
|
||||||
elif rarity.lower() == "common":
|
|
||||||
return "C"
|
|
||||||
elif rarity.lower() in ["R", "M", "U", "C"]:
|
|
||||||
return rarity.upper()
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Invalid rarity: {rarity}")
|
|
||||||
|
|
||||||
def _normalize_condition(self, condition: str, foil: str) -> str:
|
|
||||||
if condition.lower() == "near_mint":
|
|
||||||
condition1 = "Near Mint"
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Invalid condition: {condition}")
|
|
||||||
if foil.lower() == "foil":
|
|
||||||
condition2 = " Foil"
|
|
||||||
elif foil.lower() == "normal":
|
|
||||||
condition2 = ""
|
|
||||||
else:
|
|
||||||
raise ValueError(f"Invalid foil: {foil}")
|
|
||||||
return condition1 + condition2
|
|
||||||
|
|
||||||
def _normalize_number(self, number: str) -> str:
|
|
||||||
return str(number.split(".")[0])
|
|
||||||
|
|
||||||
def _convert_set_code_to_group_id(self, set_code: str) -> str:
|
|
||||||
group = self.db.query(TCGPlayerGroups).filter(TCGPlayerGroups.abbreviation == set_code).first()
|
|
||||||
return group.group_id
|
|
||||||
|
|
||||||
def _add_set_group_mapping(self, set_code: str, group_id: str) -> None:
|
|
||||||
with db_transaction(self.db):
|
|
||||||
self.db.add(SetCodeGroupIdMapping(id=str(uuid.uuid4()), set_code=set_code, group_id=group_id))
|
|
||||||
|
|
||||||
def _get_set_codes(self, **filters) -> list:
|
|
||||||
query = self.db.query(ManaboxExportData.set_code).distinct()
|
|
||||||
for field, value in filters.items():
|
|
||||||
if value is not None:
|
|
||||||
query = query.filter(getattr(ManaboxExportData, field) == value)
|
|
||||||
return [code[0] for code in query.all()]
|
|
||||||
|
|
||||||
async def bg_set_manabox_tcg_relationship(self, box_id: str = None, upload_id: str = None) -> None:
|
|
||||||
if not bool(box_id) ^ bool(upload_id):
|
|
||||||
raise ValueError("Must provide exactly one of box_id or upload_id")
|
|
||||||
|
|
||||||
filters = {"box_id": box_id} if box_id else {"upload_id": upload_id}
|
|
||||||
set_codes = self._get_set_codes(**filters)
|
|
||||||
|
|
||||||
for set_code in set_codes:
|
|
||||||
try:
|
|
||||||
group_id = self._convert_set_code_to_group_id(set_code)
|
|
||||||
except AttributeError:
|
|
||||||
logger.warning(f"No group found for set code {set_code}")
|
|
||||||
continue
|
|
||||||
self._add_set_group_mapping(set_code, group_id)
|
|
||||||
# update pricing for groups
|
|
||||||
if self.db.query(TCGPlayerProduct).filter(TCGPlayerProduct.group_id == group_id).count() == 0:
|
|
||||||
self.tcgplayer_service.update_pricing(set_name_ids={"set_name_ids":[group_id]})
|
|
||||||
|
|
||||||
# match manabox data to tcgplayer pricing data
|
|
||||||
# match on manabox - set_code (through group_id), collector_number, foil, rarity, condition
|
|
||||||
# match on tcgplayer - group_id, number, rarity, condition (condition + foil)
|
|
||||||
# use normalizing functions
|
|
||||||
matched_records = self.db.query(ManaboxExportData).filter(ManaboxExportData.set_code.in_(set_codes)).all()
|
|
||||||
for record in matched_records:
|
|
||||||
rarity = self._normalize_rarity(record.rarity)
|
|
||||||
condition = self._normalize_condition(record.condition, record.foil)
|
|
||||||
number = self._normalize_number(record.collector_number)
|
|
||||||
group_id = self._convert_set_code_to_group_id(record.set_code)
|
|
||||||
tcg_record = self.db.query(TCGPlayerProduct).filter(
|
|
||||||
TCGPlayerProduct.group_id == group_id,
|
|
||||||
TCGPlayerProduct.number == number,
|
|
||||||
TCGPlayerProduct.rarity == rarity,
|
|
||||||
TCGPlayerProduct.condition == condition
|
|
||||||
).all()
|
|
||||||
if len(tcg_record) == 0:
|
|
||||||
logger.warning(f"No match found for {record.name}")
|
|
||||||
if self.db.query(UnmatchedManaboxData).filter(UnmatchedManaboxData.manabox_id == record.id).count() == 0:
|
|
||||||
with db_transaction(self.db):
|
|
||||||
self.db.add(UnmatchedManaboxData(id=str(uuid.uuid4()), manabox_id=record.id, reason="No match found"))
|
|
||||||
elif len(tcg_record) > 1:
|
|
||||||
logger.warning(f"Multiple matches found for {record.name}")
|
|
||||||
if self.db.query(UnmatchedManaboxData).filter(UnmatchedManaboxData.manabox_id == record.id).count() == 0:
|
|
||||||
with db_transaction(self.db):
|
|
||||||
self.db.add(UnmatchedManaboxData(id=str(uuid.uuid4()), manabox_id=record.id, reason="Multiple matches found"))
|
|
||||||
else:
|
|
||||||
with db_transaction(self.db):
|
|
||||||
self.db.add(ManaboxTCGPlayerMapping(id=str(uuid.uuid4()), manabox_id=record.id, tcgplayer_id=tcg_record[0].id))
|
|
||||||
|
|
||||||
async def bg_set_tcg_inventory_product_relationship(self, export_id: str) -> None:
|
|
||||||
inventory_without_product = (
|
|
||||||
self.db.query(TCGPlayerInventory.tcgplayer_id, TCGPlayerInventory.set_name)
|
|
||||||
.filter(TCGPlayerInventory.total_quantity > 0)
|
|
||||||
.filter(TCGPlayerInventory.product_line == "Magic")
|
|
||||||
.filter(TCGPlayerInventory.export_id == export_id)
|
|
||||||
.filter(TCGPlayerInventory.tcgplayer_product_id.is_(None))
|
|
||||||
.filter(~exists().where(
|
|
||||||
TCGPlayerProduct.id == TCGPlayerInventory.tcgplayer_product_id
|
|
||||||
))
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
|
|
||||||
set_names = list(set(inv.set_name for inv in inventory_without_product
|
|
||||||
if inv.set_name is not None and isinstance(inv.set_name, str)))
|
|
||||||
|
|
||||||
group_ids = self.db.query(TCGPlayerGroups.group_id).filter(
|
|
||||||
TCGPlayerGroups.name.in_(set_names)
|
|
||||||
).all()
|
|
||||||
|
|
||||||
group_ids = [str(group_id[0]) for group_id in group_ids]
|
|
||||||
|
|
||||||
self.tcgplayer_service.update_pricing(set_name_ids={"set_name_ids": group_ids})
|
|
||||||
|
|
||||||
for inventory in inventory_without_product:
|
|
||||||
product = self.db.query(TCGPlayerProduct).filter(
|
|
||||||
TCGPlayerProduct.tcgplayer_id == inventory.tcgplayer_id
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if product:
|
|
||||||
with db_transaction(self.db):
|
|
||||||
inventory_record = self.db.query(TCGPlayerInventory).filter(
|
|
||||||
TCGPlayerInventory.tcgplayer_id == inventory.tcgplayer_id,
|
|
||||||
TCGPlayerInventory.export_id == export_id
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if inventory_record:
|
|
||||||
inventory_record.tcgplayer_product_id = product.id
|
|
||||||
self.db.add(inventory_record)
|
|
@ -1,205 +0,0 @@
|
|||||||
import logging
|
|
||||||
from typing import Callable
|
|
||||||
from db.models import TCGPlayerInventory, TCGPlayerExportHistory, TCGPlayerPricingHistory, ManaboxExportData, ManaboxTCGPlayerMapping, TCGPlayerProduct
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
import pandas as pd
|
|
||||||
from db.utils import db_transaction
|
|
||||||
from sqlalchemy import func, and_, exists
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
class PricingService:
|
|
||||||
def __init__(self, db: Session):
|
|
||||||
self.db = db
|
|
||||||
|
|
||||||
def get_box_with_most_recent_prices(self, box_id: str) -> pd.DataFrame:
|
|
||||||
latest_prices = (
|
|
||||||
self.db.query(
|
|
||||||
TCGPlayerPricingHistory.tcgplayer_product_id,
|
|
||||||
func.max(TCGPlayerPricingHistory.date_created).label('max_date')
|
|
||||||
)
|
|
||||||
.group_by(TCGPlayerPricingHistory.tcgplayer_product_id)
|
|
||||||
.subquery('latest') # Added name to subquery
|
|
||||||
)
|
|
||||||
|
|
||||||
result = (
|
|
||||||
self.db.query(ManaboxExportData, TCGPlayerPricingHistory, TCGPlayerProduct)
|
|
||||||
.join(ManaboxTCGPlayerMapping, ManaboxExportData.id == ManaboxTCGPlayerMapping.manabox_id)
|
|
||||||
.join(TCGPlayerProduct, ManaboxTCGPlayerMapping.tcgplayer_id == TCGPlayerProduct.id)
|
|
||||||
.join(TCGPlayerPricingHistory, TCGPlayerProduct.id == TCGPlayerPricingHistory.tcgplayer_product_id)
|
|
||||||
.join(
|
|
||||||
latest_prices,
|
|
||||||
and_(
|
|
||||||
TCGPlayerPricingHistory.tcgplayer_product_id == latest_prices.c.tcgplayer_product_id,
|
|
||||||
TCGPlayerPricingHistory.date_created == latest_prices.c.max_date
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.filter(ManaboxExportData.box_id == box_id) # Removed str() conversion
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.debug(f"Found {len(result)} rows")
|
|
||||||
|
|
||||||
df = pd.DataFrame([{
|
|
||||||
**{f"manabox_{k}": v for k, v in row[0].__dict__.items() if not k.startswith('_')},
|
|
||||||
**{f"pricing_{k}": v for k, v in row[1].__dict__.items() if not k.startswith('_')},
|
|
||||||
**{f"tcgproduct_{k}": v for k, v in row[2].__dict__.items() if not k.startswith('_')}
|
|
||||||
} for row in result])
|
|
||||||
|
|
||||||
return df
|
|
||||||
|
|
||||||
def get_live_inventory_with_most_recent_prices(self) -> pd.DataFrame:
|
|
||||||
# Get latest export IDs using subqueries
|
|
||||||
latest_inventory_export = (
|
|
||||||
self.db.query(TCGPlayerExportHistory.inventory_export_id)
|
|
||||||
.filter(TCGPlayerExportHistory.type == "live_inventory")
|
|
||||||
.order_by(TCGPlayerExportHistory.date_created.desc())
|
|
||||||
.limit(1)
|
|
||||||
.scalar_subquery()
|
|
||||||
)
|
|
||||||
# this is bad because latest pricing export is not guaranteed to be related to the latest inventory export
|
|
||||||
latest_pricing_export = (
|
|
||||||
self.db.query(TCGPlayerExportHistory.pricing_export_id)
|
|
||||||
.filter(TCGPlayerExportHistory.type == "pricing")
|
|
||||||
.order_by(TCGPlayerExportHistory.date_created.desc())
|
|
||||||
.limit(1)
|
|
||||||
.scalar_subquery()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Join inventory and pricing data in a single query
|
|
||||||
inventory_with_pricing = (
|
|
||||||
self.db.query(TCGPlayerInventory, TCGPlayerPricingHistory)
|
|
||||||
.join(
|
|
||||||
TCGPlayerPricingHistory,
|
|
||||||
TCGPlayerInventory.tcgplayer_product_id == TCGPlayerPricingHistory.tcgplayer_product_id
|
|
||||||
)
|
|
||||||
.filter(
|
|
||||||
TCGPlayerInventory.export_id == latest_inventory_export,
|
|
||||||
TCGPlayerPricingHistory.export_id == latest_pricing_export
|
|
||||||
)
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
|
|
||||||
# Convert to pandas DataFrame
|
|
||||||
df = pd.DataFrame([{
|
|
||||||
# Inventory columns
|
|
||||||
**{f"inventory_{k}": v
|
|
||||||
for k, v in row[0].__dict__.items()
|
|
||||||
if not k.startswith('_')},
|
|
||||||
# Pricing columns
|
|
||||||
**{f"pricing_{k}": v
|
|
||||||
for k, v in row[1].__dict__.items()
|
|
||||||
if not k.startswith('_')}
|
|
||||||
} for row in inventory_with_pricing])
|
|
||||||
|
|
||||||
return df
|
|
||||||
|
|
||||||
def default_pricing_algo(self, df: pd.DataFrame = None):
|
|
||||||
if df is None:
|
|
||||||
logger.debug("No DataFrame provided, fetching live inventory with most recent prices")
|
|
||||||
df = self.get_live_inventory_with_most_recent_prices()
|
|
||||||
# if tcg low price is < 0.35, set my_price to 0.35
|
|
||||||
# if either tcg low price or tcg low price with shipping is under 5, set my_price to tcg low price * 1.25
|
|
||||||
# if tcg low price with shipping is > 25 set price to tcg low price with shipping * 1.025
|
|
||||||
# otherwise, set price to tcg low price with shipping * 1.10
|
|
||||||
# also round to 2 decimal places
|
|
||||||
df['my_price'] = df.apply(lambda row: round(
|
|
||||||
0.35 if row['pricing_tcg_low_price'] < 0.35 else
|
|
||||||
row['pricing_tcg_low_price'] * 1.25 if row['pricing_tcg_low_price'] < 5 or row['pricing_tcg_low_price_with_shipping'] < 5 else
|
|
||||||
row['pricing_tcg_low_price_with_shipping'] * 1.025 if row['pricing_tcg_low_price_with_shipping'] > 25 else
|
|
||||||
row['pricing_tcg_low_price_with_shipping'] * 1.10, 2), axis=1)
|
|
||||||
# log rows with no price
|
|
||||||
no_price = df[df['my_price'].isnull()]
|
|
||||||
if len(no_price) > 0:
|
|
||||||
logger.warning(f"Found {len(no_price)} rows with no price")
|
|
||||||
logger.warning(no_price)
|
|
||||||
# remove rows with no price
|
|
||||||
df = df.dropna(subset=['my_price'])
|
|
||||||
return df
|
|
||||||
|
|
||||||
def convert_df_to_csv(self, df: pd.DataFrame):
|
|
||||||
# Flip the mapping to be from current names TO desired names
|
|
||||||
column_mapping = {
|
|
||||||
'inventory_tcgplayer_id': 'TCGplayer Id',
|
|
||||||
'inventory_product_line': 'Product Line',
|
|
||||||
'inventory_set_name': 'Set Name',
|
|
||||||
'inventory_product_name': 'Product Name',
|
|
||||||
'inventory_title': 'Title',
|
|
||||||
'inventory_number': 'Number',
|
|
||||||
'inventory_rarity': 'Rarity',
|
|
||||||
'inventory_condition': 'Condition',
|
|
||||||
'pricing_tcg_market_price': 'TCG Market Price',
|
|
||||||
'pricing_tcg_direct_low': 'TCG Direct Low',
|
|
||||||
'pricing_tcg_low_price_with_shipping': 'TCG Low Price With Shipping',
|
|
||||||
'pricing_tcg_low_price': 'TCG Low Price',
|
|
||||||
'inventory_total_quantity': 'Total Quantity',
|
|
||||||
'inventory_add_to_quantity': 'Add to Quantity',
|
|
||||||
'my_price': 'TCG Marketplace Price',
|
|
||||||
'inventory_photo_url': 'Photo URL'
|
|
||||||
}
|
|
||||||
|
|
||||||
df['pricing_tcg_market_price'] = ""
|
|
||||||
df['pricing_tcg_direct_low'] = ""
|
|
||||||
df['pricing_tcg_low_price_with_shipping'] = ""
|
|
||||||
df['pricing_tcg_low_price'] = ""
|
|
||||||
df['inventory_total_quantity'] = ""
|
|
||||||
df['inventory_add_to_quantity'] = 0
|
|
||||||
df['inventory_photo_url'] = ""
|
|
||||||
|
|
||||||
# First select the columns we want (using the keys of our mapping)
|
|
||||||
# Then rename them to the desired names (the values in our mapping)
|
|
||||||
df = df[column_mapping.keys()].rename(columns=column_mapping)
|
|
||||||
|
|
||||||
return df.to_csv(index=False, quoting=1, quotechar='"')
|
|
||||||
|
|
||||||
def convert_add_df_to_csv(self, df: pd.DataFrame):
|
|
||||||
column_mapping = {
|
|
||||||
'tcgproduct_tcgplayer_id': 'TCGplayer Id',
|
|
||||||
'tcgproduct_product_line': 'Product Line',
|
|
||||||
'tcgproduct_set_name': 'Set Name',
|
|
||||||
'tcgproduct_product_name': 'Product Name',
|
|
||||||
'tcgproduct_title': 'Title',
|
|
||||||
'tcgproduct_number': 'Number',
|
|
||||||
'tcgproduct_rarity': 'Rarity',
|
|
||||||
'tcgproduct_condition': 'Condition',
|
|
||||||
'pricing_tcg_market_price': 'TCG Market Price',
|
|
||||||
'pricing_tcg_direct_low': 'TCG Direct Low',
|
|
||||||
'pricing_tcg_low_price_with_shipping': 'TCG Low Price With Shipping',
|
|
||||||
'pricing_tcg_low_price': 'TCG Low Price',
|
|
||||||
'tcgproduct_group_id': 'Total Quantity',
|
|
||||||
'manabox_quantity': 'Add to Quantity',
|
|
||||||
'my_price': 'TCG Marketplace Price',
|
|
||||||
'tcgproduct_photo_url': 'Photo URL'
|
|
||||||
}
|
|
||||||
df['tcgproduct_group_id'] = ""
|
|
||||||
df['pricing_tcg_market_price'] = ""
|
|
||||||
df['pricing_tcg_direct_low'] = ""
|
|
||||||
df['pricing_tcg_low_price_with_shipping'] = ""
|
|
||||||
df['pricing_tcg_low_price'] = ""
|
|
||||||
df['tcgproduct_photo_url'] = ""
|
|
||||||
|
|
||||||
df = df[column_mapping.keys()].rename(columns=column_mapping)
|
|
||||||
|
|
||||||
return df.to_csv(index=False, quoting=1, quotechar='"')
|
|
||||||
|
|
||||||
def create_live_inventory_pricing_update_csv(self, algo: Callable = None) -> str:
|
|
||||||
actual_algo = algo if algo is not None else self.default_pricing_algo
|
|
||||||
df = actual_algo()
|
|
||||||
csv = self.convert_df_to_csv(df)
|
|
||||||
return csv
|
|
||||||
|
|
||||||
def create_add_to_tcgplayer_csv(self, box_id: str = None, upload_id: str = None, algo: Callable = None) -> str:
|
|
||||||
actual_algo = algo if algo is not None else self.default_pricing_algo
|
|
||||||
if box_id and upload_id:
|
|
||||||
raise ValueError("Cannot specify both box_id and upload_id")
|
|
||||||
elif not box_id and not upload_id:
|
|
||||||
raise ValueError("Must specify either box_id or upload_id")
|
|
||||||
elif box_id:
|
|
||||||
logger.debug("creating df")
|
|
||||||
df = self.get_box_with_most_recent_prices(box_id)
|
|
||||||
elif upload_id:
|
|
||||||
raise NotImplementedError("Not yet implemented")
|
|
||||||
df = actual_algo(df)
|
|
||||||
csv = self.convert_add_df_to_csv(df)
|
|
||||||
return csv
|
|
@ -1,452 +0,0 @@
|
|||||||
from db.models import ManaboxExportData, Box, TCGPlayerGroups, TCGPlayerInventory, TCGPlayerExportHistory, TCGPlayerPricingHistory, TCGPlayerProduct, ManaboxTCGPlayerMapping
|
|
||||||
import requests
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from db.utils import db_transaction
|
|
||||||
import uuid
|
|
||||||
import browser_cookie3
|
|
||||||
import webbrowser
|
|
||||||
from typing import Optional, Dict ,List
|
|
||||||
from enum import Enum
|
|
||||||
import logging
|
|
||||||
from dataclasses import dataclass
|
|
||||||
import urllib.parse
|
|
||||||
import json
|
|
||||||
from datetime import datetime
|
|
||||||
import time
|
|
||||||
import csv
|
|
||||||
from typing import List, Dict, Optional
|
|
||||||
from io import StringIO, BytesIO
|
|
||||||
from services.pricing import PricingService
|
|
||||||
from sqlalchemy.sql import exists
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
class Browser(Enum):
|
|
||||||
"""Supported browser types for cookie extraction"""
|
|
||||||
BRAVE = "brave"
|
|
||||||
CHROME = "chrome"
|
|
||||||
FIREFOX = "firefox"
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class TCGPlayerConfig:
|
|
||||||
"""Configuration for TCGPlayer API interactions"""
|
|
||||||
tcgplayer_base_url: str = "https://store.tcgplayer.com"
|
|
||||||
tcgplayer_login_path: str = "/oauth/login"
|
|
||||||
staged_inventory_download_path: str = "/Admin/Pricing/DownloadStagedInventoryExportCSV?type=Pricing"
|
|
||||||
live_inventory_download_path = "/Admin/Pricing/DownloadMyExportCSV?type=Pricing"
|
|
||||||
pricing_export_path: str = "/admin/pricing/downloadexportcsv"
|
|
||||||
max_retries: int = 1
|
|
||||||
|
|
||||||
class TCGPlayerService:
|
|
||||||
def __init__(self, db: Session,
|
|
||||||
pricing_service: PricingService,
|
|
||||||
config: TCGPlayerConfig=TCGPlayerConfig(),
|
|
||||||
browser_type: Browser=Browser.BRAVE):
|
|
||||||
self.db = db
|
|
||||||
self.config = config
|
|
||||||
self.browser_type = browser_type
|
|
||||||
self.cookies = None
|
|
||||||
self.previous_request_time = None
|
|
||||||
self.pricing_service = pricing_service
|
|
||||||
|
|
||||||
def _insert_groups(self, groups):
|
|
||||||
for group in groups:
|
|
||||||
db_group = TCGPlayerGroups(
|
|
||||||
id=str(uuid.uuid4()),
|
|
||||||
group_id=group['groupId'],
|
|
||||||
name=group['name'],
|
|
||||||
abbreviation=group['abbreviation'],
|
|
||||||
is_supplemental=group['isSupplemental'],
|
|
||||||
published_on=group['publishedOn'],
|
|
||||||
modified_on=group['modifiedOn'],
|
|
||||||
category_id=group['categoryId']
|
|
||||||
)
|
|
||||||
self.db.add(db_group)
|
|
||||||
|
|
||||||
def populate_tcgplayer_groups(self):
|
|
||||||
group_endpoint = "https://tcgcsv.com/tcgplayer/1/groups"
|
|
||||||
response = requests.get(group_endpoint)
|
|
||||||
response.raise_for_status()
|
|
||||||
groups = response.json()['results']
|
|
||||||
# manually add broken groups
|
|
||||||
groups.append({
|
|
||||||
"groupId": 2422,
|
|
||||||
"name": "Modern Horizons 2 Timeshifts",
|
|
||||||
"abbreviation": "H2R",
|
|
||||||
"isSupplemental": "false",
|
|
||||||
"publishedOn": "2018-11-08T00:00:00",
|
|
||||||
"modifiedOn": "2018-11-08T00:00:00",
|
|
||||||
"categoryId": 1
|
|
||||||
})
|
|
||||||
# Insert groups into db
|
|
||||||
with db_transaction(self.db):
|
|
||||||
self._insert_groups(groups)
|
|
||||||
|
|
||||||
def _get_browser_cookies(self) -> Optional[Dict]:
|
|
||||||
"""Retrieve cookies from the specified browser"""
|
|
||||||
try:
|
|
||||||
cookie_getter = getattr(browser_cookie3, self.browser_type.value, None)
|
|
||||||
if not cookie_getter:
|
|
||||||
raise ValueError(f"Unsupported browser type: {self.browser_type.value}")
|
|
||||||
return cookie_getter()
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get browser cookies: {str(e)}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _send_request(self, url: str, method: str, data=None, except_302=False) -> requests.Response:
|
|
||||||
"""Send a request with the specified cookies"""
|
|
||||||
# if previous request was made less than 10 seconds ago, wait until current time is 10 seconds after previous request
|
|
||||||
if self.previous_request_time:
|
|
||||||
time_diff = (datetime.now() - self.previous_request_time).total_seconds()
|
|
||||||
if time_diff < 10:
|
|
||||||
logger.info(f"Waiting 10 seconds before next request...")
|
|
||||||
time.sleep(10 - time_diff)
|
|
||||||
headers = self._set_headers(method)
|
|
||||||
|
|
||||||
if not self.cookies:
|
|
||||||
self.cookies = self._get_browser_cookies()
|
|
||||||
if not self.cookies:
|
|
||||||
raise ValueError("Failed to retrieve browser cookies")
|
|
||||||
|
|
||||||
try:
|
|
||||||
#logger.info(f"debug: request url {url}, method {method}, data {data}")
|
|
||||||
response = requests.request(method, url, headers=headers, cookies=self.cookies, data=data)
|
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
if response.status_code == 302 and not except_302:
|
|
||||||
logger.warning("Redirecting to login page...")
|
|
||||||
self._refresh_authentication()
|
|
||||||
return self._send_request(url, method, except_302=True)
|
|
||||||
|
|
||||||
elif response.status_code == 302 and except_302:
|
|
||||||
raise ValueError("Redirected to login page after authentication refresh")
|
|
||||||
|
|
||||||
self.previous_request_time = datetime.now()
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
except requests.RequestException as e:
|
|
||||||
logger.error(f"Request failed: {str(e)}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _set_headers(self, method: str) -> Dict:
|
|
||||||
base_headers = {
|
|
||||||
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8',
|
|
||||||
'accept-language': 'en-US,en;q=0.8',
|
|
||||||
'priority': 'u=0, i',
|
|
||||||
'referer': 'https://store.tcgplayer.com/admin/pricing',
|
|
||||||
'sec-ch-ua': '"Not A(Brand";v="8", "Chromium";v="132", "Brave";v="132"',
|
|
||||||
'sec-ch-ua-mobile': '?0',
|
|
||||||
'sec-ch-ua-platform': '"macOS"',
|
|
||||||
'sec-fetch-dest': 'document',
|
|
||||||
'sec-fetch-mode': 'navigate',
|
|
||||||
'sec-fetch-site': 'same-origin',
|
|
||||||
'sec-fetch-user': '?1',
|
|
||||||
'sec-gpc': '1',
|
|
||||||
'upgrade-insecure-requests': '1',
|
|
||||||
'user-agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/132.0.0.0 Safari/537.36'
|
|
||||||
}
|
|
||||||
|
|
||||||
if method == 'POST':
|
|
||||||
post_headers = {
|
|
||||||
'cache-control': 'max-age=0',
|
|
||||||
'content-type': 'application/x-www-form-urlencoded',
|
|
||||||
'origin': 'https://store.tcgplayer.com'
|
|
||||||
}
|
|
||||||
base_headers.update(post_headers)
|
|
||||||
|
|
||||||
return base_headers
|
|
||||||
|
|
||||||
def _set_pricing_export_payload(self, set_name_ids: List[str]) -> Dict:
|
|
||||||
data = {
|
|
||||||
"PricingType": "Pricing",
|
|
||||||
"CategoryId": "1",
|
|
||||||
"SetNameIds": set_name_ids,
|
|
||||||
"ConditionIds": ["1"],
|
|
||||||
"RarityIds": ["0"],
|
|
||||||
"LanguageIds": ["1"],
|
|
||||||
"PrintingIds": ["0"],
|
|
||||||
"CompareAgainstPrice": False,
|
|
||||||
"PriceToCompare": 3,
|
|
||||||
"ValueToCompare": 1,
|
|
||||||
"PriceValueToCompare": None,
|
|
||||||
"MyInventory": False,
|
|
||||||
"ExcludeListos": False,
|
|
||||||
"ExportLowestListingNotMe": False
|
|
||||||
}
|
|
||||||
payload = "model=" + urllib.parse.quote(json.dumps(data))
|
|
||||||
return payload
|
|
||||||
|
|
||||||
def _refresh_authentication(self) -> None:
|
|
||||||
"""Open browser for user to refresh authentication"""
|
|
||||||
login_url = f"{self.config.tcgplayer_base_url}{self.config.tcgplayer_login_path}"
|
|
||||||
logger.info("Opening browser for authentication refresh...")
|
|
||||||
webbrowser.open(login_url)
|
|
||||||
input('Please login and press Enter to continue...')
|
|
||||||
# Clear existing cookies to force refresh
|
|
||||||
self.cookies = None
|
|
||||||
|
|
||||||
def _get_inventory(self, version) -> bytes:
|
|
||||||
if version == 'staged':
|
|
||||||
inventory_download_url = f"{self.config.tcgplayer_base_url}{self.config.staged_inventory_download_path}"
|
|
||||||
elif version == 'live':
|
|
||||||
inventory_download_url = f"{self.config.tcgplayer_base_url}{self.config.live_inventory_download_path}"
|
|
||||||
else:
|
|
||||||
raise ValueError("Invalid inventory version")
|
|
||||||
response = self._send_request(inventory_download_url, 'GET')
|
|
||||||
if response:
|
|
||||||
return self._process_content(response.content)
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _process_content(self, content: bytes) -> List[Dict]:
|
|
||||||
if not content:
|
|
||||||
return []
|
|
||||||
|
|
||||||
try:
|
|
||||||
text_content = content.decode('utf-8')
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
for encoding in ['latin-1', 'cp1252', 'iso-8859-1']:
|
|
||||||
try:
|
|
||||||
text_content = content.decode(encoding)
|
|
||||||
break
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
csv_file = StringIO(text_content)
|
|
||||||
try:
|
|
||||||
reader = csv.DictReader(csv_file)
|
|
||||||
inventory = [
|
|
||||||
{k: v.strip() if v else None for k, v in row.items()}
|
|
||||||
for row in reader
|
|
||||||
if any(v.strip() for v in row.values())
|
|
||||||
]
|
|
||||||
return inventory
|
|
||||||
finally:
|
|
||||||
csv_file.close()
|
|
||||||
|
|
||||||
def update_inventory(self, version: str) -> Dict:
|
|
||||||
if version not in ['staged', 'live']:
|
|
||||||
raise ValueError("Invalid inventory version")
|
|
||||||
export_id = str(uuid.uuid4())
|
|
||||||
inventory = self._get_inventory(version)
|
|
||||||
if not inventory:
|
|
||||||
return {"message": "No inventory to update"}
|
|
||||||
|
|
||||||
# add snapshot id
|
|
||||||
for item in inventory:
|
|
||||||
item['export_id'] = export_id
|
|
||||||
# check if product exists for tcgplayer_id
|
|
||||||
product_exists = self.db.query(TCGPlayerProduct).filter_by(tcgplayer_id=item['TCGplayer Id']).first()
|
|
||||||
if product_exists:
|
|
||||||
item['tcgplayer_product_id'] = product_exists.id
|
|
||||||
else:
|
|
||||||
item['tcgplayer_product_id'] = None
|
|
||||||
|
|
||||||
inventory_fields = {
|
|
||||||
'TCGplayer Id': 'tcgplayer_id',
|
|
||||||
'tcgplayer_product_id': 'tcgplayer_product_id',
|
|
||||||
'export_id': 'export_id',
|
|
||||||
'Product Line': 'product_line',
|
|
||||||
'Set Name': 'set_name',
|
|
||||||
'Product Name': 'product_name',
|
|
||||||
'Title': 'title',
|
|
||||||
'Number': 'number',
|
|
||||||
'Rarity': 'rarity',
|
|
||||||
'Condition': 'condition',
|
|
||||||
'TCG Market Price': 'tcg_market_price',
|
|
||||||
'TCG Direct Low': 'tcg_direct_low',
|
|
||||||
'TCG Low Price With Shipping': 'tcg_low_price_with_shipping',
|
|
||||||
'TCG Low Price': 'tcg_low_price',
|
|
||||||
'Total Quantity': 'total_quantity',
|
|
||||||
'Add to Quantity': 'add_to_quantity',
|
|
||||||
'TCG Marketplace Price': 'tcg_marketplace_price'
|
|
||||||
}
|
|
||||||
|
|
||||||
with db_transaction(self.db):
|
|
||||||
export_history = TCGPlayerExportHistory(
|
|
||||||
id=str(uuid.uuid4()),
|
|
||||||
type=version + '_inventory',
|
|
||||||
inventory_export_id=export_id
|
|
||||||
)
|
|
||||||
self.db.add(export_history)
|
|
||||||
for item in inventory:
|
|
||||||
db_item = TCGPlayerInventory(
|
|
||||||
id=str(uuid.uuid4()),
|
|
||||||
**{db_field: item.get(csv_field)
|
|
||||||
for csv_field, db_field in inventory_fields.items()}
|
|
||||||
)
|
|
||||||
self.db.add(db_item)
|
|
||||||
|
|
||||||
return {"message": "Inventory updated successfully", "export_id": export_id}
|
|
||||||
|
|
||||||
def _get_export_csv(self, set_name_ids: List[str]) -> bytes:
|
|
||||||
"""
|
|
||||||
Download export CSV and save to specified path
|
|
||||||
Returns True if successful, False otherwise
|
|
||||||
"""
|
|
||||||
payload = self._set_pricing_export_payload(set_name_ids)
|
|
||||||
export_csv_download_url = f"{self.config.tcgplayer_base_url}{self.config.pricing_export_path}"
|
|
||||||
response = self._send_request(export_csv_download_url, method='POST', data=payload)
|
|
||||||
csv = self._process_content(response.content)
|
|
||||||
return csv
|
|
||||||
|
|
||||||
def _update_tcgplayer_products(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def update_pricing(self, set_name_ids: Dict[str, List[str]]) -> Dict:
|
|
||||||
export_id = str(uuid.uuid4())
|
|
||||||
product_fields = {
|
|
||||||
'TCGplayer Id': 'tcgplayer_id',
|
|
||||||
'group_id': 'group_id',
|
|
||||||
'Product Line': 'product_line',
|
|
||||||
'Set Name': 'set_name',
|
|
||||||
'Product Name': 'product_name',
|
|
||||||
'Title': 'title',
|
|
||||||
'Number': 'number',
|
|
||||||
'Rarity': 'rarity',
|
|
||||||
'Condition': 'condition'
|
|
||||||
}
|
|
||||||
pricing_fields = {
|
|
||||||
'TCGplayer Id': 'tcgplayer_id',
|
|
||||||
'tcgplayer_product_id': 'tcgplayer_product_id',
|
|
||||||
'export_id': 'export_id',
|
|
||||||
'group_id': 'group_id',
|
|
||||||
'TCG Market Price': 'tcg_market_price',
|
|
||||||
'TCG Direct Low': 'tcg_direct_low',
|
|
||||||
'TCG Low Price With Shipping': 'tcg_low_price_with_shipping',
|
|
||||||
'TCG Low Price': 'tcg_low_price',
|
|
||||||
'TCG Marketplace Price': 'tcg_marketplace_price'
|
|
||||||
}
|
|
||||||
|
|
||||||
for set_name_id in set_name_ids['set_name_ids']:
|
|
||||||
export_csv = self._get_export_csv([set_name_id])
|
|
||||||
for item in export_csv:
|
|
||||||
item['export_id'] = export_id
|
|
||||||
item['group_id'] = set_name_id
|
|
||||||
# check if product already exists
|
|
||||||
product_exists = self.db.query(TCGPlayerProduct).filter_by(tcgplayer_id=item['TCGplayer Id']).first()
|
|
||||||
if product_exists:
|
|
||||||
item['tcgplayer_product_id'] = product_exists.id
|
|
||||||
else:
|
|
||||||
with db_transaction(self.db):
|
|
||||||
product = TCGPlayerProduct(
|
|
||||||
id=str(uuid.uuid4()),
|
|
||||||
**{db_field: item.get(csv_field)
|
|
||||||
for csv_field, db_field in product_fields.items()}
|
|
||||||
)
|
|
||||||
self.db.add(product)
|
|
||||||
item['tcgplayer_product_id'] = product.id
|
|
||||||
|
|
||||||
with db_transaction(self.db):
|
|
||||||
ph_item = TCGPlayerPricingHistory(
|
|
||||||
id=str(uuid.uuid4()),
|
|
||||||
**{db_field: item.get(csv_field)
|
|
||||||
for csv_field, db_field in pricing_fields.items()}
|
|
||||||
)
|
|
||||||
self.db.add(ph_item)
|
|
||||||
|
|
||||||
|
|
||||||
with db_transaction(self.db):
|
|
||||||
export_history = TCGPlayerExportHistory(
|
|
||||||
id=str(uuid.uuid4()),
|
|
||||||
type='pricing',
|
|
||||||
pricing_export_id=export_id
|
|
||||||
)
|
|
||||||
self.db.add(export_history)
|
|
||||||
|
|
||||||
return {"message": "Pricing updated successfully"}
|
|
||||||
|
|
||||||
def update_pricing_all(self) -> Dict:
|
|
||||||
set_name_ids = self.db.query(TCGPlayerGroups.group_id).all()
|
|
||||||
set_name_ids = [str(group_id) for group_id, in set_name_ids]
|
|
||||||
return self.update_pricing({'set_name_ids': set_name_ids})
|
|
||||||
|
|
||||||
def update_pricing_for_existing_product_groups(self) -> Dict:
|
|
||||||
set_name_ids = self.db.query(TCGPlayerProduct.group_id).distinct().all()
|
|
||||||
set_name_ids = [str(group_id) for group_id, in set_name_ids]
|
|
||||||
return self.update_pricing({'set_name_ids': set_name_ids})
|
|
||||||
|
|
||||||
def tcg_set_tcg_inventory_product_relationship(self, export_id: str) -> None:
|
|
||||||
inventory_without_product = (
|
|
||||||
self.db.query(TCGPlayerInventory.tcgplayer_id, TCGPlayerInventory.set_name)
|
|
||||||
.filter(TCGPlayerInventory.total_quantity > 0)
|
|
||||||
.filter(TCGPlayerInventory.product_line == "Magic")
|
|
||||||
.filter(TCGPlayerInventory.export_id == export_id)
|
|
||||||
.filter(TCGPlayerInventory.tcgplayer_product_id.is_(None))
|
|
||||||
.filter(~exists().where(
|
|
||||||
TCGPlayerProduct.id == TCGPlayerInventory.tcgplayer_product_id
|
|
||||||
))
|
|
||||||
.all()
|
|
||||||
)
|
|
||||||
|
|
||||||
set_names = list(set(inv.set_name for inv in inventory_without_product
|
|
||||||
if inv.set_name is not None and isinstance(inv.set_name, str)))
|
|
||||||
|
|
||||||
group_ids = self.db.query(TCGPlayerGroups.group_id).filter(
|
|
||||||
TCGPlayerGroups.name.in_(set_names)
|
|
||||||
).all()
|
|
||||||
|
|
||||||
group_ids = [str(group_id[0]) for group_id in group_ids]
|
|
||||||
|
|
||||||
self.update_pricing(set_name_ids={"set_name_ids": group_ids})
|
|
||||||
|
|
||||||
for inventory in inventory_without_product:
|
|
||||||
product = self.db.query(TCGPlayerProduct).filter(
|
|
||||||
TCGPlayerProduct.tcgplayer_id == inventory.tcgplayer_id
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if product:
|
|
||||||
with db_transaction(self.db):
|
|
||||||
inventory_record = self.db.query(TCGPlayerInventory).filter(
|
|
||||||
TCGPlayerInventory.tcgplayer_id == inventory.tcgplayer_id,
|
|
||||||
TCGPlayerInventory.export_id == export_id
|
|
||||||
).first()
|
|
||||||
|
|
||||||
if inventory_record:
|
|
||||||
inventory_record.tcgplayer_product_id = product.id
|
|
||||||
self.db.add(inventory_record)
|
|
||||||
|
|
||||||
|
|
||||||
def get_live_inventory_pricing_update_csv(self):
|
|
||||||
export_id = self.update_inventory("live")['export_id']
|
|
||||||
self.tcg_set_tcg_inventory_product_relationship(export_id)
|
|
||||||
self.update_pricing_for_existing_product_groups()
|
|
||||||
update_csv = self.pricing_service.create_live_inventory_pricing_update_csv()
|
|
||||||
return update_csv
|
|
||||||
|
|
||||||
def get_group_ids_for_box(self, box_id: str) -> List[str]:
|
|
||||||
# use manabox_export_data.box_id and tcgplayer_product.group_id to filter
|
|
||||||
# use manabox_tcgplayer_mapping.manabox_id and manabox_tcgplayer_mapping.tcgplayer_id to join
|
|
||||||
group_ids = self.db.query(ManaboxExportData.box_id, TCGPlayerProduct.group_id).join(
|
|
||||||
ManaboxTCGPlayerMapping, ManaboxExportData.id == ManaboxTCGPlayerMapping.manabox_id
|
|
||||||
).join(
|
|
||||||
TCGPlayerProduct, ManaboxTCGPlayerMapping.tcgplayer_id == TCGPlayerProduct.id
|
|
||||||
).filter(ManaboxExportData.box_id == box_id).all()
|
|
||||||
group_ids = list(set(str(group_id) for box_id, group_id in group_ids))
|
|
||||||
return group_ids
|
|
||||||
|
|
||||||
def get_group_ids_for_upload(self, upload_id: str) -> List[str]:
|
|
||||||
group_ids = self.db.query(ManaboxExportData.upload_id, TCGPlayerProduct.group_id).join(
|
|
||||||
ManaboxTCGPlayerMapping, ManaboxExportData.id == ManaboxTCGPlayerMapping.manabox_id
|
|
||||||
).join(
|
|
||||||
TCGPlayerProduct, ManaboxTCGPlayerMapping.tcgplayer_id == TCGPlayerProduct.id
|
|
||||||
).filter(ManaboxExportData.upload_id == upload_id).all()
|
|
||||||
group_ids = list(set(str(group_id) for upload_id, group_id in group_ids))
|
|
||||||
return group_ids
|
|
||||||
|
|
||||||
|
|
||||||
def add_to_tcgplayer(self, box_id: str = None, upload_id: str = None) :
|
|
||||||
if box_id and upload_id:
|
|
||||||
raise ValueError("Cannot provide both box_id and upload_id")
|
|
||||||
elif box_id:
|
|
||||||
group_ids = self.get_group_ids_for_box(box_id)
|
|
||||||
elif upload_id:
|
|
||||||
group_ids = self.get_group_ids_for_upload(upload_id)
|
|
||||||
else:
|
|
||||||
raise ValueError("Must provide either box_id or upload_id")
|
|
||||||
self.update_pricing({'set_name_ids': group_ids})
|
|
||||||
add_csv = self.pricing_service.create_add_to_tcgplayer_csv(box_id)
|
|
||||||
return add_csv
|
|
@ -1,97 +0,0 @@
|
|||||||
from db.models import ManaboxExportData, UploadHistory
|
|
||||||
import pandas as pd
|
|
||||||
from io import StringIO
|
|
||||||
import uuid
|
|
||||||
from sqlalchemy.orm import Session
|
|
||||||
from db.utils import db_transaction
|
|
||||||
from exceptions import FailedUploadException
|
|
||||||
|
|
||||||
|
|
||||||
import logging
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
class UploadObject:
|
|
||||||
def __init__(self,
|
|
||||||
content: bytes = None,
|
|
||||||
upload_id: str = None,
|
|
||||||
filename: str = None,
|
|
||||||
df: pd.DataFrame = None):
|
|
||||||
self.content = content
|
|
||||||
self.upload_id = upload_id
|
|
||||||
self.filename = filename
|
|
||||||
self.df = df
|
|
||||||
|
|
||||||
|
|
||||||
class UploadService:
|
|
||||||
def __init__(self, db: Session):
|
|
||||||
self.db = db
|
|
||||||
|
|
||||||
def _content_to_df(self, content: bytes) -> pd.DataFrame:
|
|
||||||
df = pd.read_csv(StringIO(content.decode('utf-8')))
|
|
||||||
df.columns = df.columns.str.lower().str.replace(' ', '_')
|
|
||||||
return df
|
|
||||||
|
|
||||||
def _create_upload_id(self) -> str:
|
|
||||||
return str(uuid.uuid4())
|
|
||||||
|
|
||||||
def _prepare_manabox_df(self, content: bytes, upload_id: str) -> pd.DataFrame:
|
|
||||||
df = self._content_to_df(content)
|
|
||||||
df['upload_id'] = upload_id
|
|
||||||
df['box_id'] = None
|
|
||||||
|
|
||||||
return df
|
|
||||||
|
|
||||||
def _create_file_upload_record(self, upload_id: str, filename: str) -> UploadHistory:
|
|
||||||
file_upload_record = UploadHistory(
|
|
||||||
id = str(uuid.uuid4()),
|
|
||||||
upload_id = upload_id,
|
|
||||||
filename = filename,
|
|
||||||
status = "pending"
|
|
||||||
)
|
|
||||||
self.db.add(file_upload_record)
|
|
||||||
return file_upload_record
|
|
||||||
|
|
||||||
def _update_manabox_data(self, df: pd.DataFrame) -> bool:
|
|
||||||
for index, row in df.iterrows():
|
|
||||||
try:
|
|
||||||
add_row = ManaboxExportData(
|
|
||||||
id = str(uuid.uuid4()),
|
|
||||||
upload_id = row['upload_id'],
|
|
||||||
box_id = row['box_id'],
|
|
||||||
name = row['name'],
|
|
||||||
set_code = row['set_code'],
|
|
||||||
set_name = row['set_name'],
|
|
||||||
collector_number = row['collector_number'],
|
|
||||||
foil = row['foil'],
|
|
||||||
rarity = row['rarity'],
|
|
||||||
quantity = row['quantity'],
|
|
||||||
manabox_id = row['manabox_id'],
|
|
||||||
scryfall_id = row['scryfall_id'],
|
|
||||||
purchase_price = row['purchase_price'],
|
|
||||||
misprint = row['misprint'],
|
|
||||||
altered = row['altered'],
|
|
||||||
condition = row['condition'],
|
|
||||||
language = row['language'],
|
|
||||||
purchase_price_currency = row['purchase_price_currency']
|
|
||||||
)
|
|
||||||
self.db.add(add_row)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error adding row to ManaboxExportData")
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def process_manabox_upload(self, content: bytes, filename: str):
|
|
||||||
upload = UploadObject(content=content, filename=filename)
|
|
||||||
upload.upload_id = self._create_upload_id()
|
|
||||||
upload.df = self._prepare_manabox_df(upload.content, upload.upload_id)
|
|
||||||
|
|
||||||
with db_transaction(self.db):
|
|
||||||
file_upload_record = self._create_file_upload_record(upload.upload_id, upload.filename)
|
|
||||||
if not self._update_manabox_data(upload.df):
|
|
||||||
# set upload to failed
|
|
||||||
file_upload_record.status = "failed"
|
|
||||||
raise FailedUploadException(file_upload_record)
|
|
||||||
else:
|
|
||||||
# set upload_history status to success
|
|
||||||
file_upload_record.status = "success"
|
|
||||||
return {"message": f"Manabox upload successful. Upload ID: {upload.upload_id}"}, upload.upload_id
|
|
Loading…
x
Reference in New Issue
Block a user