Compare commits
6 Commits
721b26ce97
...
clean
Author | SHA1 | Date | |
---|---|---|---|
1f5361da88 | |||
511b070cbb | |||
964fdd641b | |||
a78c3bcba3 | |||
bd9cfca7a9 | |||
85510a4671 |
1
.gitignore
vendored
1
.gitignore
vendored
@@ -173,4 +173,3 @@ cython_debug/
|
|||||||
temp/
|
temp/
|
||||||
.DS_Store
|
.DS_Store
|
||||||
*.db-journal
|
*.db-journal
|
||||||
cookies/
|
|
15
Dockerfile
15
Dockerfile
@@ -1,15 +0,0 @@
|
|||||||
FROM python:3.13-slim
|
|
||||||
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
ENV DATABASE_URL=postgresql://poggers:giga!@192.168.1.41:5432/omegatcgdb
|
|
||||||
|
|
||||||
COPY requirements.txt .
|
|
||||||
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
COPY . .
|
|
||||||
|
|
||||||
EXPOSE 8000
|
|
||||||
|
|
||||||
CMD ["uvicorn", "app.main:app", "--host", "0.0.0.0", "--port", "8000"]
|
|
119
alembic.ini
119
alembic.ini
@@ -1,119 +0,0 @@
|
|||||||
# A generic, single database configuration.
|
|
||||||
|
|
||||||
[alembic]
|
|
||||||
# path to migration scripts
|
|
||||||
# Use forward slashes (/) also on windows to provide an os agnostic path
|
|
||||||
script_location = alembic
|
|
||||||
|
|
||||||
# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
|
|
||||||
# Uncomment the line below if you want the files to be prepended with date and time
|
|
||||||
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
|
|
||||||
# for all available tokens
|
|
||||||
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# sys.path path, will be prepended to sys.path if present.
|
|
||||||
# defaults to the current working directory.
|
|
||||||
prepend_sys_path = .
|
|
||||||
|
|
||||||
# timezone to use when rendering the date within the migration file
|
|
||||||
# as well as the filename.
|
|
||||||
# If specified, requires the python>=3.9 or backports.zoneinfo library and tzdata library.
|
|
||||||
# Any required deps can installed by adding `alembic[tz]` to the pip requirements
|
|
||||||
# string value is passed to ZoneInfo()
|
|
||||||
# leave blank for localtime
|
|
||||||
# timezone =
|
|
||||||
|
|
||||||
# max length of characters to apply to the "slug" field
|
|
||||||
# truncate_slug_length = 40
|
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
|
||||||
# the 'revision' command, regardless of autogenerate
|
|
||||||
# revision_environment = false
|
|
||||||
|
|
||||||
# set to 'true' to allow .pyc and .pyo files without
|
|
||||||
# a source .py file to be detected as revisions in the
|
|
||||||
# versions/ directory
|
|
||||||
# sourceless = false
|
|
||||||
|
|
||||||
# version location specification; This defaults
|
|
||||||
# to alembic/versions. When using multiple version
|
|
||||||
# directories, initial revisions must be specified with --version-path.
|
|
||||||
# The path separator used here should be the separator specified by "version_path_separator" below.
|
|
||||||
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions
|
|
||||||
|
|
||||||
# version path separator; As mentioned above, this is the character used to split
|
|
||||||
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
|
|
||||||
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
|
|
||||||
# Valid values for version_path_separator are:
|
|
||||||
#
|
|
||||||
# version_path_separator = :
|
|
||||||
# version_path_separator = ;
|
|
||||||
# version_path_separator = space
|
|
||||||
# version_path_separator = newline
|
|
||||||
#
|
|
||||||
# Use os.pathsep. Default configuration used for new projects.
|
|
||||||
version_path_separator = os
|
|
||||||
|
|
||||||
# set to 'true' to search source files recursively
|
|
||||||
# in each "version_locations" directory
|
|
||||||
# new in Alembic version 1.10
|
|
||||||
# recursive_version_locations = false
|
|
||||||
|
|
||||||
# the output encoding used when revision files
|
|
||||||
# are written from script.py.mako
|
|
||||||
# output_encoding = utf-8
|
|
||||||
|
|
||||||
sqlalchemy.url = sqlite:///omegacard.db
|
|
||||||
|
|
||||||
|
|
||||||
[post_write_hooks]
|
|
||||||
# post_write_hooks defines scripts or Python functions that are run
|
|
||||||
# on newly generated revision scripts. See the documentation for further
|
|
||||||
# detail and examples
|
|
||||||
|
|
||||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
|
||||||
# hooks = black
|
|
||||||
# black.type = console_scripts
|
|
||||||
# black.entrypoint = black
|
|
||||||
# black.options = -l 79 REVISION_SCRIPT_FILENAME
|
|
||||||
|
|
||||||
# lint with attempts to fix using "ruff" - use the exec runner, execute a binary
|
|
||||||
# hooks = ruff
|
|
||||||
# ruff.type = exec
|
|
||||||
# ruff.executable = %(here)s/.venv/bin/ruff
|
|
||||||
# ruff.options = --fix REVISION_SCRIPT_FILENAME
|
|
||||||
|
|
||||||
# Logging configuration
|
|
||||||
[loggers]
|
|
||||||
keys = root,sqlalchemy,alembic
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = console
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys = generic
|
|
||||||
|
|
||||||
[logger_root]
|
|
||||||
level = WARNING
|
|
||||||
handlers = console
|
|
||||||
qualname =
|
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
|
||||||
level = WARNING
|
|
||||||
handlers =
|
|
||||||
qualname = sqlalchemy.engine
|
|
||||||
|
|
||||||
[logger_alembic]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = alembic
|
|
||||||
|
|
||||||
[handler_console]
|
|
||||||
class = StreamHandler
|
|
||||||
args = (sys.stderr,)
|
|
||||||
level = NOTSET
|
|
||||||
formatter = generic
|
|
||||||
|
|
||||||
[formatter_generic]
|
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
|
||||||
datefmt = %H:%M:%S
|
|
@@ -1 +0,0 @@
|
|||||||
Generic single-database configuration.
|
|
@@ -1,90 +0,0 @@
|
|||||||
from logging.config import fileConfig
|
|
||||||
|
|
||||||
from sqlalchemy import engine_from_config
|
|
||||||
from sqlalchemy import pool
|
|
||||||
|
|
||||||
from alembic import context
|
|
||||||
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
|
|
||||||
|
|
||||||
from app.db.models import Base
|
|
||||||
from app.db.database import DATABASE_URL
|
|
||||||
|
|
||||||
# this is the Alembic Config object, which provides
|
|
||||||
# access to the values within the .ini file in use.
|
|
||||||
config = context.config
|
|
||||||
|
|
||||||
config.set_main_option('sqlalchemy.url', DATABASE_URL)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Interpret the config file for Python logging.
|
|
||||||
# This line sets up loggers basically.
|
|
||||||
if config.config_file_name is not None:
|
|
||||||
fileConfig(config.config_file_name)
|
|
||||||
|
|
||||||
# add your model's MetaData object here
|
|
||||||
# for 'autogenerate' support
|
|
||||||
# from myapp import mymodel
|
|
||||||
# target_metadata = mymodel.Base.metadata
|
|
||||||
target_metadata = Base.metadata
|
|
||||||
|
|
||||||
# other values from the config, defined by the needs of env.py,
|
|
||||||
# can be acquired:
|
|
||||||
# my_important_option = config.get_main_option("my_important_option")
|
|
||||||
# ... etc.
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_offline() -> None:
|
|
||||||
"""Run migrations in 'offline' mode.
|
|
||||||
|
|
||||||
This configures the context with just a URL
|
|
||||||
and not an Engine, though an Engine is acceptable
|
|
||||||
here as well. By skipping the Engine creation
|
|
||||||
we don't even need a DBAPI to be available.
|
|
||||||
|
|
||||||
Calls to context.execute() here emit the given string to the
|
|
||||||
script output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
url = config.get_main_option("sqlalchemy.url")
|
|
||||||
context.configure(
|
|
||||||
url=url,
|
|
||||||
target_metadata=target_metadata,
|
|
||||||
literal_binds=True,
|
|
||||||
dialect_opts={"paramstyle": "named"},
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_online() -> None:
|
|
||||||
"""Run migrations in 'online' mode.
|
|
||||||
|
|
||||||
In this scenario we need to create an Engine
|
|
||||||
and associate a connection with the context.
|
|
||||||
|
|
||||||
"""
|
|
||||||
connectable = engine_from_config(
|
|
||||||
config.get_section(config.config_ini_section, {}),
|
|
||||||
prefix="sqlalchemy.",
|
|
||||||
poolclass=pool.NullPool,
|
|
||||||
)
|
|
||||||
|
|
||||||
with connectable.connect() as connection:
|
|
||||||
context.configure(
|
|
||||||
connection=connection, target_metadata=target_metadata
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
if context.is_offline_mode():
|
|
||||||
run_migrations_offline()
|
|
||||||
else:
|
|
||||||
run_migrations_online()
|
|
@@ -1,26 +0,0 @@
|
|||||||
"""${message}
|
|
||||||
|
|
||||||
Revision ID: ${up_revision}
|
|
||||||
Revises: ${down_revision | comma,n}
|
|
||||||
Create Date: ${create_date}
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
${imports if imports else ""}
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = ${repr(up_revision)}
|
|
||||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
|
||||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
${upgrades if upgrades else "pass"}
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
${downgrades if downgrades else "pass"}
|
|
@@ -1,30 +0,0 @@
|
|||||||
"""empty message
|
|
||||||
|
|
||||||
Revision ID: f629adc7e597
|
|
||||||
Revises:
|
|
||||||
Create Date: 2025-02-07 20:13:32.559672
|
|
||||||
|
|
||||||
"""
|
|
||||||
from typing import Sequence, Union
|
|
||||||
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision: str = 'f629adc7e597'
|
|
||||||
down_revision: Union[str, None] = None
|
|
||||||
branch_labels: Union[str, Sequence[str], None] = None
|
|
||||||
depends_on: Union[str, Sequence[str], None] = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
pass
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade() -> None:
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
pass
|
|
||||||
# ### end Alembic commands ###
|
|
@@ -1,168 +0,0 @@
|
|||||||
from brother_ql.conversion import convert
|
|
||||||
from brother_ql.backends.helpers import send
|
|
||||||
from brother_ql.raster import BrotherQLRaster
|
|
||||||
from PIL import Image, ImageDraw, ImageFont
|
|
||||||
import platform
|
|
||||||
import pandas as pd
|
|
||||||
from time import sleep
|
|
||||||
import pdf2image
|
|
||||||
import io
|
|
||||||
|
|
||||||
# Printer settings
|
|
||||||
printer_model = "QL-1100"
|
|
||||||
backend = 'pyusb' # Changed from network to USB
|
|
||||||
printer = 'usb://0x04f9:0x20a7'
|
|
||||||
|
|
||||||
def convert_pdf_to_image(pdf_path):
|
|
||||||
"""Converts a PDF to PIL Image"""
|
|
||||||
try:
|
|
||||||
# Convert PDF to image
|
|
||||||
images = pdf2image.convert_from_path(pdf_path)
|
|
||||||
if images:
|
|
||||||
return images[0] # Return first page
|
|
||||||
return None
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error converting PDF: {str(e)}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def create_address_label(input_data, font_size=30, is_pdf=False):
|
|
||||||
"""Creates and returns the label image without printing"""
|
|
||||||
if is_pdf:
|
|
||||||
if isinstance(input_data, str): # If path is provided
|
|
||||||
return convert_pdf_to_image(input_data)
|
|
||||||
else: # If PIL Image is provided
|
|
||||||
return input_data
|
|
||||||
|
|
||||||
# Regular text-based label creation
|
|
||||||
label_width = 991
|
|
||||||
label_height = 306
|
|
||||||
|
|
||||||
image = Image.new('L', (label_width, label_height), 'white')
|
|
||||||
draw = ImageDraw.Draw(image)
|
|
||||||
|
|
||||||
# Font selection based on OS
|
|
||||||
if platform.system() == 'Windows':
|
|
||||||
font = ImageFont.truetype("C:\\Windows\\Fonts\\arial.ttf", size=font_size)
|
|
||||||
elif platform.system() == 'Darwin':
|
|
||||||
font = ImageFont.truetype("/Library/Fonts/Arial.ttf", size=font_size)
|
|
||||||
|
|
||||||
margin = 20
|
|
||||||
lines = input_data.split('\n')
|
|
||||||
line_height = font_size + 5
|
|
||||||
total_height = line_height * len(lines)
|
|
||||||
start_y = (label_height - total_height) // 2
|
|
||||||
|
|
||||||
for i, line in enumerate(lines):
|
|
||||||
y = start_y + (i * line_height)
|
|
||||||
draw.text((margin, y), line, font=font, fill='black')
|
|
||||||
|
|
||||||
return image
|
|
||||||
|
|
||||||
def preview_label(input_data, font_size=30, is_pdf=False):
|
|
||||||
"""Creates and displays the label preview"""
|
|
||||||
image = create_address_label(input_data, font_size, is_pdf)
|
|
||||||
if image:
|
|
||||||
image.show()
|
|
||||||
|
|
||||||
def print_address_label(input_data, font_size=30, is_pdf=False, label_size='29x90'):
|
|
||||||
"""Prints the label with support for both text and PDF inputs"""
|
|
||||||
try:
|
|
||||||
image = create_address_label(input_data, font_size, is_pdf)
|
|
||||||
if not image:
|
|
||||||
raise Exception("Failed to create label image")
|
|
||||||
|
|
||||||
# For 4x6 shipping labels from Pirate Ship
|
|
||||||
if label_size == '4x6':
|
|
||||||
# Resize image to fit 4x6 format if needed
|
|
||||||
target_width = 1164 # Adjusted for 4x6 format
|
|
||||||
target_height = 1660
|
|
||||||
image = image.resize((target_width, target_height), Image.LANCZOS)
|
|
||||||
|
|
||||||
qlr = BrotherQLRaster(printer_model)
|
|
||||||
qlr.exception_on_warning = True
|
|
||||||
|
|
||||||
print("Converting image to printer instructions...")
|
|
||||||
instructions = convert(
|
|
||||||
qlr=qlr,
|
|
||||||
images=[image],
|
|
||||||
label='29x90' if label_size == '29x90' else '102x152',
|
|
||||||
threshold=70.0,
|
|
||||||
dither=False,
|
|
||||||
compress=False,
|
|
||||||
red=False,
|
|
||||||
dpi_600=False,
|
|
||||||
hq=True,
|
|
||||||
#cut=True
|
|
||||||
cut=False
|
|
||||||
)
|
|
||||||
|
|
||||||
print("Sending to printer...")
|
|
||||||
send(
|
|
||||||
instructions=instructions,
|
|
||||||
printer_identifier=printer,
|
|
||||||
backend_identifier=backend,
|
|
||||||
blocking=True
|
|
||||||
)
|
|
||||||
print("Print job sent successfully")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error during printing: {str(e)}")
|
|
||||||
|
|
||||||
def process_pirate_ship_pdf(pdf_path, preview=False):
|
|
||||||
"""Process and print a Pirate Ship PDF shipping label"""
|
|
||||||
if preview:
|
|
||||||
preview_label(pdf_path, is_pdf=True)
|
|
||||||
else:
|
|
||||||
print_address_label(pdf_path, is_pdf=True, label_size='4x6')
|
|
||||||
|
|
||||||
def process_tcg_shipping_export(file_path, require_input=False, font_size=60, preview=False):
|
|
||||||
# Load the CSV file, all columns are strings
|
|
||||||
df = pd.read_csv(file_path, dtype=str)
|
|
||||||
print(df.dtypes)
|
|
||||||
for i, row in df.iterrows():
|
|
||||||
line1 = str(row['FirstName']) + ' ' + str(row['LastName'])
|
|
||||||
line2 = str(row['Address1'])
|
|
||||||
if not pd.isna(row['Address2']):
|
|
||||||
line2 += ' ' + str(row['Address2'])
|
|
||||||
line3 = str(row['City']) + ', ' + str(row['State']) + ' ' + str(row['PostalCode'])
|
|
||||||
address = f"{line1}\n{line2}\n{line3}"
|
|
||||||
if preview:
|
|
||||||
preview_label(address, font_size=font_size)
|
|
||||||
else:
|
|
||||||
print_address_label(address, font_size=font_size)
|
|
||||||
if require_input:
|
|
||||||
input("Press Enter to continue...")
|
|
||||||
else:
|
|
||||||
sleep(1)
|
|
||||||
|
|
||||||
# Example usage
|
|
||||||
if __name__ == "__main__":
|
|
||||||
# Example for regular address label
|
|
||||||
address = """John Doe
|
|
||||||
123 Main Street
|
|
||||||
Apt 4B
|
|
||||||
City, State 12345"""
|
|
||||||
|
|
||||||
# Example for TCG Player export
|
|
||||||
shipping_export_file = "_TCGplayer_ShippingExport_20250201_115949.csv"
|
|
||||||
|
|
||||||
# Example for Pirate Ship PDF
|
|
||||||
pirate_ship_pdf = "C:\\Users\\joshu\\Downloads\\2025-02-10---greg-creek---9400136208070411592215.pdf"
|
|
||||||
|
|
||||||
# Choose which type to process
|
|
||||||
label_type = input("Enter label type (1 for regular, 2 for TCG, 3 for Pirate Ship): ")
|
|
||||||
|
|
||||||
if label_type == "1":
|
|
||||||
preview_label(address, font_size=60)
|
|
||||||
user_input = input("Press 'p' to print the label or any other key to cancel: ")
|
|
||||||
if user_input.lower() == 'p':
|
|
||||||
print_address_label(address, font_size=60)
|
|
||||||
|
|
||||||
elif label_type == "2":
|
|
||||||
process_tcg_shipping_export(shipping_export_file, font_size=60, preview=False)
|
|
||||||
|
|
||||||
elif label_type == "3":
|
|
||||||
process_pirate_ship_pdf(pirate_ship_pdf, preview=True)
|
|
||||||
user_input = input("Press 'p' to print the label or any other key to cancel: ")
|
|
||||||
if user_input.lower() == 'p':
|
|
||||||
process_pirate_ship_pdf(pirate_ship_pdf, preview=False)
|
|
@@ -4,10 +4,10 @@ from contextlib import contextmanager
|
|||||||
from typing import Generator
|
from typing import Generator
|
||||||
import os
|
import os
|
||||||
from sqlalchemy import inspect
|
from sqlalchemy import inspect
|
||||||
from app.services.tcgplayer import TCGPlayerService
|
from services.tcgplayer import TCGPlayerService
|
||||||
from app.services.pricing import PricingService
|
from services.pricing import PricingService
|
||||||
from app.services.file import FileService
|
from services.file import FileService
|
||||||
from app.db.models import Price
|
from db.models import Price
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
|
|
@@ -132,7 +132,7 @@ class Card(Base):
|
|||||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
class CardManabox(Base):
|
class CardManabox(Base):
|
||||||
__tablename__ = "manabox_cards"
|
__tablename__ = "card_manabox"
|
||||||
|
|
||||||
product_id = Column(String, ForeignKey("cards.product_id"), primary_key=True)
|
product_id = Column(String, ForeignKey("cards.product_id"), primary_key=True)
|
||||||
name = Column(String)
|
name = Column(String)
|
||||||
@@ -147,10 +147,10 @@ class CardManabox(Base):
|
|||||||
language = Column(String)
|
language = Column(String)
|
||||||
|
|
||||||
class CardTCGPlayer(Base):
|
class CardTCGPlayer(Base):
|
||||||
__tablename__ = "tcgplayer_cards"
|
__tablename__ = "card_tcgplayer"
|
||||||
|
|
||||||
product_id = Column(String, ForeignKey("cards.product_id"), primary_key=True)
|
product_id = Column(String, ForeignKey("cards.product_id"), primary_key=True)
|
||||||
group_id = Column(Integer)
|
group_id = Column(Integer, ForeignKey("tcgplayer_groups.group_id"))
|
||||||
tcgplayer_id = Column(Integer)
|
tcgplayer_id = Column(Integer)
|
||||||
product_line = Column(String)
|
product_line = Column(String)
|
||||||
set_name = Column(String)
|
set_name = Column(String)
|
||||||
@@ -164,7 +164,7 @@ class Warehouse(Base):
|
|||||||
"""
|
"""
|
||||||
container that is associated with a user and contains inventory and stock
|
container that is associated with a user and contains inventory and stock
|
||||||
"""
|
"""
|
||||||
__tablename__ = "warehouses"
|
__tablename__ = "warehouse"
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
id = Column(String, primary_key=True)
|
||||||
user_id = Column(String, ForeignKey("users.id"))
|
user_id = Column(String, ForeignKey("users.id"))
|
||||||
@@ -175,10 +175,10 @@ class Stock(Base):
|
|||||||
"""
|
"""
|
||||||
contains products that are listed for sale
|
contains products that are listed for sale
|
||||||
"""
|
"""
|
||||||
__tablename__ = "stocks"
|
__tablename__ = "stock"
|
||||||
|
|
||||||
product_id = Column(String, ForeignKey("products.id"), primary_key=True)
|
product_id = Column(String, ForeignKey("products.id"), primary_key=True)
|
||||||
warehouse_id = Column(String, ForeignKey("warehouses.id"), default="default")
|
warehouse_id = Column(String, ForeignKey("warehouse.id"), default="default")
|
||||||
marketplace_id = Column(String, ForeignKey("marketplaces.id"))
|
marketplace_id = Column(String, ForeignKey("marketplaces.id"))
|
||||||
quantity = Column(Integer)
|
quantity = Column(Integer)
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
@@ -190,10 +190,10 @@ class Inventory(Base):
|
|||||||
sealed product in breakdown queue, held sealed product, speculatively held singles, etc.
|
sealed product in breakdown queue, held sealed product, speculatively held singles, etc.
|
||||||
inventory can contain products across multiple marketplaces
|
inventory can contain products across multiple marketplaces
|
||||||
"""
|
"""
|
||||||
__tablename__ = "inventories"
|
__tablename__ = "inventory"
|
||||||
|
|
||||||
product_id = Column(String, ForeignKey("products.id"), primary_key=True)
|
product_id = Column(String, ForeignKey("products.id"), primary_key=True)
|
||||||
warehouse_id = Column(String, ForeignKey("warehouses.id"), default="default")
|
warehouse_id = Column(String, ForeignKey("warehouse.id"), default="default")
|
||||||
quantity = Column(Integer)
|
quantity = Column(Integer)
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
@@ -243,12 +243,12 @@ class File(Base):
|
|||||||
filepath = Column(String) # backup location
|
filepath = Column(String) # backup location
|
||||||
filesize_kb = Column(Float)
|
filesize_kb = Column(Float)
|
||||||
status = Column(String)
|
status = Column(String)
|
||||||
box_id = Column(String, nullable=True)
|
box_id = Column(String, ForeignKey("boxes.product_id"), nullable=True)
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
date_modified = Column(DateTime, default=datetime.now, onupdate=datetime.now)
|
||||||
|
|
||||||
class Price(Base):
|
class Price(Base):
|
||||||
__tablename__ = "prices"
|
__tablename__ = "price"
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
id = Column(String, primary_key=True)
|
||||||
product_id = Column(String, ForeignKey("products.id"))
|
product_id = Column(String, ForeignKey("products.id"))
|
||||||
@@ -271,7 +271,7 @@ class StorageBlock(Base):
|
|||||||
return type
|
return type
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
id = Column(String, primary_key=True)
|
||||||
warehouse_id = Column(String, ForeignKey("warehouses.id"))
|
warehouse_id = Column(String, ForeignKey("warehouse.id"))
|
||||||
name = Column(String)
|
name = Column(String)
|
||||||
type = Column(String) # rare or common
|
type = Column(String) # rare or common
|
||||||
date_created = Column(DateTime, default=datetime.now)
|
date_created = Column(DateTime, default=datetime.now)
|
||||||
@@ -282,7 +282,7 @@ class ProductBlock(Base):
|
|||||||
ProductBlock represents the relationship between a product and a storage block
|
ProductBlock represents the relationship between a product and a storage block
|
||||||
which products are in a block and at what index
|
which products are in a block and at what index
|
||||||
"""
|
"""
|
||||||
__tablename__ = "product_blocks"
|
__tablename__ = "product_block"
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
id = Column(String, primary_key=True)
|
||||||
product_id = Column(String, ForeignKey("products.id"))
|
product_id = Column(String, ForeignKey("products.id"))
|
||||||
@@ -295,7 +295,7 @@ class OpenBoxCard(Base):
|
|||||||
"""
|
"""
|
||||||
OpenedBoxCard represents the relationship between an opened box and the cards it contains
|
OpenedBoxCard represents the relationship between an opened box and the cards it contains
|
||||||
"""
|
"""
|
||||||
__tablename__ = "open_box_cards"
|
__tablename__ = "open_box_card"
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
id = Column(String, primary_key=True)
|
||||||
open_box_id = Column(String, ForeignKey("open_boxes.id"))
|
open_box_id = Column(String, ForeignKey("open_boxes.id"))
|
||||||
@@ -308,7 +308,7 @@ class ProductSale(Base):
|
|||||||
"""
|
"""
|
||||||
ProductSale represents the relationship between products and sales
|
ProductSale represents the relationship between products and sales
|
||||||
"""
|
"""
|
||||||
__tablename__ = "product_sales"
|
__tablename__ = "product_sale"
|
||||||
|
|
||||||
id = Column(String, primary_key=True)
|
id = Column(String, primary_key=True)
|
||||||
product_id = Column(String, ForeignKey("products.id"))
|
product_id = Column(String, ForeignKey("products.id"))
|
@@ -1,6 +1,6 @@
|
|||||||
from contextlib import contextmanager
|
from contextlib import contextmanager
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from app.exceptions import FailedUploadException
|
from exceptions import FailedUploadException
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
@@ -2,17 +2,17 @@ from typing import Annotated
|
|||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from fastapi import Depends, Form
|
from fastapi import Depends, Form
|
||||||
|
|
||||||
from app.services.box import BoxService
|
from services.box import BoxService
|
||||||
from app.services.tcgplayer import TCGPlayerService
|
from services.tcgplayer import TCGPlayerService
|
||||||
from app.services.pricing import PricingService
|
from services.pricing import PricingService
|
||||||
from app.services.file import FileService
|
from services.file import FileService
|
||||||
from app.services.product import ProductService
|
from services.product import ProductService
|
||||||
from app.services.inventory import InventoryService
|
from services.inventory import InventoryService
|
||||||
from app.services.task import TaskService
|
from services.task import TaskService
|
||||||
from app.services.storage import StorageService
|
from services.storage import StorageService
|
||||||
from app.db.database import get_db
|
from db.database import get_db
|
||||||
from app.schemas.file import CreateFileRequest
|
from schemas.file import CreateFileRequest
|
||||||
from app.schemas.box import CreateBoxRequest, UpdateBoxRequest, CreateOpenBoxRequest
|
from schemas.box import CreateBoxRequest, UpdateBoxRequest, CreateOpenBoxRequest
|
||||||
|
|
||||||
# Common type annotation for database dependency
|
# Common type annotation for database dependency
|
||||||
DB = Annotated[Session, Depends(get_db)]
|
DB = Annotated[Session, Depends(get_db)]
|
6
dns.txt
6
dns.txt
@@ -1,6 +0,0 @@
|
|||||||
@ IN MX 1 aspmx.l.google.com.
|
|
||||||
@ IN MX 5 alt1.aspmx.l.google.com.
|
|
||||||
@ IN MX 5 alt2.aspmx.l.google.com.
|
|
||||||
@ IN MX 10 alt3.aspmx.l.google.com.
|
|
||||||
@ IN MX 10 alt4.aspmx.l.google.com.
|
|
||||||
@ IN TXT "v=spf1 include:_spf.google.com ~all"
|
|
@@ -1,13 +1,13 @@
|
|||||||
from fastapi import FastAPI
|
from fastapi import FastAPI
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
import uvicorn
|
import uvicorn
|
||||||
from app.routes.routes import router
|
from routes.routes import router
|
||||||
from app.db.database import init_db, check_db_connection, get_db
|
from db.database import init_db, check_db_connection, get_db
|
||||||
import logging
|
import logging
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
# Import your dependency functions
|
# Import your dependency functions
|
||||||
from app.dependencies import (
|
from dependencies import (
|
||||||
get_task_service,
|
get_task_service,
|
||||||
get_tcgplayer_service,
|
get_tcgplayer_service,
|
||||||
get_pricing_service,
|
get_pricing_service,
|
17
requests.md
17
requests.md
@@ -1,17 +0,0 @@
|
|||||||
curl -J http://192.168.1.41:8000/api/tcgplayer/inventory/update --remote-name
|
|
||||||
|
|
||||||
curl -J -X POST http://192.168.1.41:8000/api/tcgplayer/inventory/add \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d '{"open_box_ids": ["fb629d9d-13d2-405e-9a69-6c44294d55de"]}' \
|
|
||||||
--remote-name
|
|
||||||
|
|
||||||
curl -X POST http://192.168.1.41:8000/api/boxes \
|
|
||||||
-F "type=draft" \
|
|
||||||
-F "set_code=CLB" \
|
|
||||||
-F "sku=195166181127" \
|
|
||||||
-F "num_cards_expected=480"
|
|
||||||
|
|
||||||
curl -X POST "http://192.168.1.41:8000/api/boxes/0d31b9c3-3093-438a-9e8c-b6b70a2d437e/open" \
|
|
||||||
-F "product_id=0d31b9c3-3093-438a-9e8c-b6b70a2d437e" \
|
|
||||||
-F "file_ids=bb4a022c-5427-49b5-b57c-0147b5e9c4a9" \
|
|
||||||
-F "date_opened=2025-02-15"
|
|
@@ -1,32 +1,18 @@
|
|||||||
alembic==1.14.1
|
|
||||||
annotated-types==0.7.0
|
annotated-types==0.7.0
|
||||||
anyio==4.8.0
|
anyio==4.8.0
|
||||||
APScheduler==3.11.0
|
|
||||||
browser-cookie3==0.20.1
|
browser-cookie3==0.20.1
|
||||||
certifi==2025.1.31
|
certifi==2025.1.31
|
||||||
charset-normalizer==3.4.1
|
charset-normalizer==3.4.1
|
||||||
click==8.1.8
|
click==8.1.8
|
||||||
coverage==7.6.10
|
|
||||||
fastapi==0.115.8
|
fastapi==0.115.8
|
||||||
h11==0.14.0
|
h11==0.14.0
|
||||||
httpcore==1.0.7
|
|
||||||
httpx==0.28.1
|
|
||||||
idna==3.10
|
idna==3.10
|
||||||
iniconfig==2.0.0
|
|
||||||
lz4==4.4.3
|
lz4==4.4.3
|
||||||
Mako==1.3.9
|
|
||||||
MarkupSafe==3.0.2
|
|
||||||
numpy==2.2.2
|
numpy==2.2.2
|
||||||
packaging==24.2
|
|
||||||
pandas==2.2.3
|
pandas==2.2.3
|
||||||
pluggy==1.5.0
|
|
||||||
psycopg2-binary==2.9.10
|
|
||||||
pycryptodomex==3.21.0
|
pycryptodomex==3.21.0
|
||||||
pydantic==2.10.6
|
pydantic==2.10.6
|
||||||
pydantic_core==2.27.2
|
pydantic_core==2.27.2
|
||||||
pytest==8.3.4
|
|
||||||
pytest-asyncio==0.25.3
|
|
||||||
pytest-cov==6.0.0
|
|
||||||
python-dateutil==2.9.0.post0
|
python-dateutil==2.9.0.post0
|
||||||
python-multipart==0.0.20
|
python-multipart==0.0.20
|
||||||
pytz==2025.1
|
pytz==2025.1
|
||||||
@@ -37,6 +23,5 @@ SQLAlchemy==2.0.37
|
|||||||
starlette==0.45.3
|
starlette==0.45.3
|
||||||
typing_extensions==4.12.2
|
typing_extensions==4.12.2
|
||||||
tzdata==2025.1
|
tzdata==2025.1
|
||||||
tzlocal==5.2
|
|
||||||
urllib3==2.3.0
|
urllib3==2.3.0
|
||||||
uvicorn==0.34.0
|
uvicorn==0.34.0
|
||||||
|
@@ -1,14 +1,11 @@
|
|||||||
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, BackgroundTasks, Request
|
from fastapi import APIRouter, Depends, HTTPException, UploadFile, File, BackgroundTasks
|
||||||
from fastapi.responses import StreamingResponse
|
from fastapi.responses import StreamingResponse
|
||||||
from typing import Optional, List
|
from typing import Optional, List
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import os
|
|
||||||
import json
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
from app.schemas.file import (
|
from schemas.file import (
|
||||||
FileSchema,
|
FileSchema,
|
||||||
CreateFileRequest,
|
CreateFileRequest,
|
||||||
CreateFileResponse,
|
CreateFileResponse,
|
||||||
@@ -16,7 +13,7 @@ from app.schemas.file import (
|
|||||||
DeleteFileResponse,
|
DeleteFileResponse,
|
||||||
GetFileQueryParams
|
GetFileQueryParams
|
||||||
)
|
)
|
||||||
from app.schemas.box import (
|
from schemas.box import (
|
||||||
CreateBoxResponse,
|
CreateBoxResponse,
|
||||||
CreateBoxRequest,
|
CreateBoxRequest,
|
||||||
BoxSchema,
|
BoxSchema,
|
||||||
@@ -25,11 +22,11 @@ from app.schemas.box import (
|
|||||||
CreateOpenBoxResponse,
|
CreateOpenBoxResponse,
|
||||||
OpenBoxSchema
|
OpenBoxSchema
|
||||||
)
|
)
|
||||||
from app.services.file import FileService
|
from services.file import FileService
|
||||||
from app.services.box import BoxService
|
from services.box import BoxService
|
||||||
from app.services.task import TaskService
|
from services.task import TaskService
|
||||||
from app.services.pricing import PricingService
|
from services.pricing import PricingService
|
||||||
from app.dependencies import (
|
from dependencies import (
|
||||||
get_file_service,
|
get_file_service,
|
||||||
get_box_service,
|
get_box_service,
|
||||||
get_task_service,
|
get_task_service,
|
||||||
@@ -233,18 +230,16 @@ async def delete_open_box(
|
|||||||
raise HTTPException(status_code=400, detail=str(e)
|
raise HTTPException(status_code=400, detail=str(e)
|
||||||
)
|
)
|
||||||
|
|
||||||
class InventoryAddRequest(BaseModel):
|
|
||||||
open_box_ids: List[str]
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/tcgplayer/inventory/add", response_class=StreamingResponse)
|
@router.post("/tcgplayer/inventory/add", response_class=StreamingResponse)
|
||||||
async def create_inventory_add_file(
|
async def create_inventory_add_file(
|
||||||
body: InventoryAddRequest,
|
request: dict, # Just use a dict instead
|
||||||
pricing_service: PricingService = Depends(get_pricing_service),
|
pricing_service: PricingService = Depends(get_pricing_service),
|
||||||
):
|
):
|
||||||
"""Create a new inventory add file for download."""
|
"""Create a new inventory add file for download."""
|
||||||
try:
|
try:
|
||||||
content = pricing_service.generate_tcgplayer_inventory_update_file_with_pricing(body.open_box_ids)
|
# Get IDs directly from the dict
|
||||||
|
open_box_ids = request.get('open_box_ids', [])
|
||||||
|
content = pricing_service.generate_tcgplayer_inventory_update_file_with_pricing(open_box_ids)
|
||||||
|
|
||||||
stream = BytesIO(content)
|
stream = BytesIO(content)
|
||||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||||
@@ -281,35 +276,3 @@ async def create_inventory_update_file(
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Create inventory update file failed: {str(e)}")
|
logger.error(f"Create inventory update file failed: {str(e)}")
|
||||||
raise HTTPException(status_code=400, detail=str(e))
|
raise HTTPException(status_code=400, detail=str(e))
|
||||||
|
|
||||||
|
|
||||||
class CookieUpdate(BaseModel):
|
|
||||||
cookies: dict
|
|
||||||
|
|
||||||
# cookies
|
|
||||||
@router.post("/cookies", response_model=dict)
|
|
||||||
async def update_cookies(
|
|
||||||
cookie_data: CookieUpdate
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
# see if cookie file exists
|
|
||||||
if not os.path.exists('cookies') or os.path.exists('cookies/tcg_cookies.json'):
|
|
||||||
logger.info("Cannot find cookies")
|
|
||||||
# Create cookies directory if it doesn't exist
|
|
||||||
os.makedirs('cookies', exist_ok=True)
|
|
||||||
|
|
||||||
# Save cookies with timestamp
|
|
||||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
||||||
cookie_path = 'cookies/tcg_cookies.json'
|
|
||||||
|
|
||||||
# Save new cookies
|
|
||||||
with open(cookie_path, 'w') as f:
|
|
||||||
json.dump(cookie_data.cookies, f, indent=2)
|
|
||||||
|
|
||||||
return {"message": "Cookies updated successfully"}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=500,
|
|
||||||
detail=f"Failed to update cookies: {str(e)}"
|
|
||||||
)
|
|
@@ -1,5 +1,5 @@
|
|||||||
from pydantic import BaseModel, Field, ConfigDict
|
from pydantic import BaseModel, Field, ConfigDict
|
||||||
from app.schemas.base import BaseSchema
|
from schemas.base import BaseSchema
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
@@ -57,7 +57,7 @@ class CreateOpenBoxRequest(BaseModel):
|
|||||||
product_id: str = Field(..., title="Product ID")
|
product_id: str = Field(..., title="Product ID")
|
||||||
file_ids: list[str] = Field(None, title="File IDs")
|
file_ids: list[str] = Field(None, title="File IDs")
|
||||||
num_cards_actual: Optional[int] = Field(None, title="Number of cards actual")
|
num_cards_actual: Optional[int] = Field(None, title="Number of cards actual")
|
||||||
date_opened: Optional[str] = Field(None, title="Date Opened")
|
date_opened: Optional [str] = Field(None, title="Date Opened")
|
||||||
|
|
||||||
# RESPONSE
|
# RESPONSE
|
||||||
class CreateOpenBoxResponse(BaseModel):
|
class CreateOpenBoxResponse(BaseModel):
|
@@ -1,38 +0,0 @@
|
|||||||
import browser_cookie3
|
|
||||||
import requests
|
|
||||||
import json
|
|
||||||
|
|
||||||
def send_tcg_cookies(api_url: str, browser_type='brave'):
|
|
||||||
"""Get TCGPlayer cookies and send them to the API"""
|
|
||||||
try:
|
|
||||||
# Get cookies from browser
|
|
||||||
cookie_getter = getattr(browser_cookie3, browser_type)
|
|
||||||
cookie_jar = cookie_getter(domain_name='tcgplayer.com')
|
|
||||||
|
|
||||||
# Filter essential cookies
|
|
||||||
cookies = {}
|
|
||||||
for cookie in cookie_jar:
|
|
||||||
if any(key in cookie.name.lower() for key in ['.aspnet', 'tcg', 'session']):
|
|
||||||
cookies[cookie.name] = cookie.value
|
|
||||||
|
|
||||||
# Send to API
|
|
||||||
headers = {
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
}
|
|
||||||
|
|
||||||
response = requests.post(
|
|
||||||
f"{api_url}",
|
|
||||||
headers=headers,
|
|
||||||
json={'cookies': cookies}
|
|
||||||
)
|
|
||||||
|
|
||||||
response.raise_for_status()
|
|
||||||
print("Cookies updated successfully!")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error updating cookies: {e}")
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
API_URL = "http://192.168.1.41:8000/api/cookies" # Update with your API URL
|
|
||||||
|
|
||||||
send_tcg_cookies(API_URL)
|
|
@@ -5,7 +5,7 @@ from sqlalchemy import or_
|
|||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from app.db.models import (
|
from db.models import (
|
||||||
Box,
|
Box,
|
||||||
File,
|
File,
|
||||||
StagedFileProduct,
|
StagedFileProduct,
|
||||||
@@ -15,9 +15,9 @@ from app.db.models import (
|
|||||||
TCGPlayerGroups,
|
TCGPlayerGroups,
|
||||||
Inventory
|
Inventory
|
||||||
)
|
)
|
||||||
from app.db.utils import db_transaction
|
from db.utils import db_transaction
|
||||||
from app.schemas.box import CreateBoxRequest, UpdateBoxRequest, CreateOpenBoxRequest
|
from schemas.box import CreateBoxRequest, UpdateBoxRequest, CreateOpenBoxRequest
|
||||||
from app.services.inventory import InventoryService
|
from services.inventory import InventoryService
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@@ -45,7 +45,7 @@ class BoxService:
|
|||||||
|
|
||||||
def add_products_to_open_box(self, open_box: OpenBox, product_data: Dict[Product, int]) -> None:
|
def add_products_to_open_box(self, open_box: OpenBox, product_data: Dict[Product, int]) -> None:
|
||||||
"""Add products to an open box."""
|
"""Add products to an open box."""
|
||||||
for product, quantity in product_data.items(): # TODO BATCH THIS
|
for product, quantity in product_data.items():
|
||||||
open_box_card = OpenBoxCard(
|
open_box_card = OpenBoxCard(
|
||||||
id=str(uuid4()),
|
id=str(uuid4()),
|
||||||
open_box_id=open_box.id,
|
open_box_id=open_box.id,
|
||||||
@@ -86,8 +86,6 @@ class BoxService:
|
|||||||
type='box',
|
type='box',
|
||||||
product_line='mtg'
|
product_line='mtg'
|
||||||
)
|
)
|
||||||
self.db.add(product)
|
|
||||||
self.db.flush()
|
|
||||||
box = Box(
|
box = Box(
|
||||||
product_id=product.id,
|
product_id=product.id,
|
||||||
type=create_box_data.type,
|
type=create_box_data.type,
|
||||||
@@ -95,6 +93,7 @@ class BoxService:
|
|||||||
sku=create_box_data.sku,
|
sku=create_box_data.sku,
|
||||||
num_cards_expected=create_box_data.num_cards_expected
|
num_cards_expected=create_box_data.num_cards_expected
|
||||||
)
|
)
|
||||||
|
self.db.add(product)
|
||||||
self.db.add(box)
|
self.db.add(box)
|
||||||
|
|
||||||
return box, True
|
return box, True
|
@@ -6,15 +6,15 @@ import logging
|
|||||||
import os
|
import os
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
from app.db.utils import db_transaction
|
from db.utils import db_transaction
|
||||||
from app.db.models import File, StagedFileProduct
|
from db.models import File, StagedFileProduct
|
||||||
from app.schemas.file import CreateFileRequest
|
from schemas.file import CreateFileRequest
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
class FileConfig:
|
class FileConfig:
|
||||||
"""Configuration constants for file processing"""
|
"""Configuration constants for file processing"""
|
||||||
TEMP_DIR = os.path.join(os.getcwd(), 'app/' + 'temp')
|
TEMP_DIR = os.path.join(os.getcwd(), 'temp')
|
||||||
|
|
||||||
MANABOX_HEADERS = [
|
MANABOX_HEADERS = [
|
||||||
'Name', 'Set code', 'Set name', 'Collector number', 'Foil',
|
'Name', 'Set code', 'Set name', 'Collector number', 'Foil',
|
@@ -3,9 +3,9 @@ from typing import Dict
|
|||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
|
|
||||||
from app.db.models import Product, Inventory
|
from db.models import Product, Inventory
|
||||||
from app.schemas.inventory import UpdateInventoryResponse
|
from schemas.inventory import UpdateInventoryResponse
|
||||||
from app.db.utils import db_transaction
|
from db.utils import db_transaction
|
||||||
|
|
||||||
|
|
||||||
class InventoryService:
|
class InventoryService:
|
||||||
@@ -38,8 +38,7 @@ class InventoryService:
|
|||||||
if inventory is None:
|
if inventory is None:
|
||||||
inventory = Inventory(
|
inventory = Inventory(
|
||||||
product_id=product.id,
|
product_id=product.id,
|
||||||
quantity=quantity,
|
quantity=quantity
|
||||||
warehouse_id="0f0d01b1-97ba-4ab2-9082-22062bca9b06" # TODO FIX
|
|
||||||
)
|
)
|
||||||
self.db.add(inventory)
|
self.db.add(inventory)
|
||||||
else:
|
else:
|
||||||
@@ -62,7 +61,7 @@ class InventoryService:
|
|||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
with db_transaction(self.db):
|
with db_transaction(self.db):
|
||||||
for product, quantity in product_data.items(): # TODO BATCH THIS
|
for product, quantity in product_data.items():
|
||||||
self.add_inventory(product, quantity)
|
self.add_inventory(product, quantity)
|
||||||
return UpdateInventoryResponse(success=True)
|
return UpdateInventoryResponse(success=True)
|
||||||
except SQLAlchemyError:
|
except SQLAlchemyError:
|
@@ -1,10 +1,10 @@
|
|||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from app.db.models import File, CardTCGPlayer, Price
|
from db.models import File, CardTCGPlayer, Price
|
||||||
from app.services.util._dataframe import TCGPlayerPricingRow, DataframeUtil
|
from services.util._dataframe import TCGPlayerPricingRow, DataframeUtil
|
||||||
from app.services.file import FileService
|
from services.file import FileService
|
||||||
from app.services.tcgplayer import TCGPlayerService
|
from services.tcgplayer import TCGPlayerService
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
from app.db.utils import db_transaction
|
from db.utils import db_transaction
|
||||||
from typing import List, Dict
|
from typing import List, Dict
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import logging
|
import logging
|
||||||
@@ -117,7 +117,6 @@ class PricingService:
|
|||||||
"""Default pricing algorithm with complex pricing rules"""
|
"""Default pricing algorithm with complex pricing rules"""
|
||||||
tcg_low = row.get('tcg_low_price')
|
tcg_low = row.get('tcg_low_price')
|
||||||
tcg_low_shipping = row.get('tcg_low_price_with_shipping')
|
tcg_low_shipping = row.get('tcg_low_price_with_shipping')
|
||||||
tcg_market_price = row.get('tcg_market_price')
|
|
||||||
|
|
||||||
if pd.isna(tcg_low) or pd.isna(tcg_low_shipping):
|
if pd.isna(tcg_low) or pd.isna(tcg_low_shipping):
|
||||||
logger.warning(f"Missing pricing data for row: {row}")
|
logger.warning(f"Missing pricing data for row: {row}")
|
||||||
@@ -125,22 +124,14 @@ class PricingService:
|
|||||||
return row
|
return row
|
||||||
|
|
||||||
# Apply pricing rules
|
# Apply pricing rules
|
||||||
if tcg_market_price < 1 and tcg_market_price > 0.25:
|
if tcg_low < 0.35:
|
||||||
new_price = tcg_market_price * 1.05
|
new_price = 0.35
|
||||||
elif tcg_market_price < 0.25:
|
|
||||||
new_price = 0.25
|
|
||||||
elif tcg_low < 5 or tcg_low_shipping < 5:
|
elif tcg_low < 5 or tcg_low_shipping < 5:
|
||||||
new_price = round(tcg_low+((abs(tcg_market_price-tcg_low))*.75), 2)
|
new_price = round(tcg_low * 1.25, 2)
|
||||||
elif tcg_low_shipping > 20:
|
elif tcg_low_shipping > 25:
|
||||||
new_price = round(tcg_low_shipping * 1.0125, 2)
|
new_price = round(tcg_low_shipping * 1.025, 2)
|
||||||
else:
|
else:
|
||||||
# new_price = round(tcg_low_shipping * 1.08, 2)
|
new_price = round(tcg_low_shipping * 1.10, 2)
|
||||||
new_price = round(tcg_market_price * 1.03)
|
|
||||||
# if new price is less than half of market price, set to 90% market
|
|
||||||
if new_price < (tcg_market_price / 2):
|
|
||||||
new_price = round(tcg_market_price * 0.85, 2)
|
|
||||||
if new_price < 0.25:
|
|
||||||
new_price = 0.25
|
|
||||||
|
|
||||||
row['new_price'] = new_price
|
row['new_price'] = new_price
|
||||||
return row
|
return row
|
@@ -3,12 +3,12 @@ from uuid import uuid4
|
|||||||
from pandas import DataFrame
|
from pandas import DataFrame
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from app.db.utils import db_transaction
|
from db.utils import db_transaction
|
||||||
from app.db.models import CardManabox, CardTCGPlayer, StagedFileProduct, TCGPlayerGroups
|
from db.models import CardManabox, CardTCGPlayer, StagedFileProduct, TCGPlayerGroups
|
||||||
from app.services.util._dataframe import ManaboxRow, DataframeUtil
|
from services.util._dataframe import ManaboxRow, DataframeUtil
|
||||||
from app.services.file import FileService
|
from services.file import FileService
|
||||||
from app.services.tcgplayer import TCGPlayerService
|
from services.tcgplayer import TCGPlayerService
|
||||||
from app.services.storage import StorageService
|
from services.storage import StorageService
|
||||||
|
|
||||||
logger = getLogger(__name__)
|
logger = getLogger(__name__)
|
||||||
|
|
@@ -2,8 +2,8 @@ from uuid import uuid4
|
|||||||
from typing import List, TypedDict
|
from typing import List, TypedDict
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
from app.db.utils import db_transaction
|
from db.utils import db_transaction
|
||||||
from app.db.models import (
|
from db.models import (
|
||||||
Warehouse,
|
Warehouse,
|
||||||
User,
|
User,
|
||||||
StagedFileProduct,
|
StagedFileProduct,
|
@@ -2,9 +2,9 @@ from apscheduler.schedulers.background import BackgroundScheduler
|
|||||||
import logging
|
import logging
|
||||||
from typing import Dict, Callable
|
from typing import Dict, Callable
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from app.services.product import ProductService
|
from services.product import ProductService
|
||||||
from app.db.models import File
|
from db.models import File
|
||||||
from app.services.pricing import PricingService
|
from services.pricing import PricingService
|
||||||
|
|
||||||
|
|
||||||
class TaskService:
|
class TaskService:
|
||||||
@@ -23,7 +23,7 @@ class TaskService:
|
|||||||
# self.pricing_service.generate_tcgplayer_inventory_update_file_with_pricing(['e20cc342-23cb-4593-89cb-56a0cb3ed3f3'])
|
# self.pricing_service.generate_tcgplayer_inventory_update_file_with_pricing(['e20cc342-23cb-4593-89cb-56a0cb3ed3f3'])
|
||||||
|
|
||||||
def register_scheduled_tasks(self):
|
def register_scheduled_tasks(self):
|
||||||
self.scheduler.add_job(self.hourly_pricing, 'cron', minute='45')
|
self.scheduler.add_job(self.hourly_pricing, 'cron', minute='0')
|
||||||
self.logger.info("Scheduled tasks registered.")
|
self.logger.info("Scheduled tasks registered.")
|
||||||
|
|
||||||
def hourly_pricing(self):
|
def hourly_pricing(self):
|
@@ -1,10 +1,10 @@
|
|||||||
from app.db.models import TCGPlayerGroups, CardTCGPlayer, Product, Card, File, Inventory, OpenBox, OpenBoxCard
|
from db.models import TCGPlayerGroups, CardTCGPlayer, Product, Card, File, Inventory, OpenBox, OpenBoxCard
|
||||||
import requests
|
import requests
|
||||||
from app.services.util._dataframe import TCGPlayerPricingRow, DataframeUtil, ManaboxRow
|
from services.util._dataframe import TCGPlayerPricingRow, DataframeUtil, ManaboxRow
|
||||||
from app.services.file import FileService
|
from services.file import FileService
|
||||||
from app.services.inventory import InventoryService
|
from services.inventory import InventoryService
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from app.db.utils import db_transaction
|
from db.utils import db_transaction
|
||||||
from uuid import uuid4 as uuid
|
from uuid import uuid4 as uuid
|
||||||
import browser_cookie3
|
import browser_cookie3
|
||||||
import webbrowser
|
import webbrowser
|
||||||
@@ -19,8 +19,7 @@ import time
|
|||||||
from typing import List, Dict, Optional
|
from typing import List, Dict, Optional
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from sqlalchemy.exc import SQLAlchemyError
|
from sqlalchemy.exc import SQLAlchemyError
|
||||||
from app.schemas.file import CreateFileRequest
|
from schemas.file import CreateFileRequest
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -99,15 +98,6 @@ class TCGPlayerService:
|
|||||||
with db_transaction(self.db):
|
with db_transaction(self.db):
|
||||||
self._insert_groups(groups)
|
self._insert_groups(groups)
|
||||||
|
|
||||||
def get_cookies_from_file(self) -> Dict:
|
|
||||||
# check if cookies file exists
|
|
||||||
if not os.path.exists('cookies/tcg_cookies.json'):
|
|
||||||
raise ValueError("Cookies file not found")
|
|
||||||
with open('cookies/tcg_cookies.json', 'r') as f:
|
|
||||||
logger.debug("Loading cookies from file")
|
|
||||||
cookies = json.load(f)
|
|
||||||
return cookies
|
|
||||||
|
|
||||||
def _get_browser_cookies(self) -> Optional[Dict]:
|
def _get_browser_cookies(self) -> Optional[Dict]:
|
||||||
"""Retrieve cookies from the specified browser"""
|
"""Retrieve cookies from the specified browser"""
|
||||||
try:
|
try:
|
||||||
@@ -119,69 +109,20 @@ class TCGPlayerService:
|
|||||||
logger.error(f"Failed to get browser cookies: {str(e)}")
|
logger.error(f"Failed to get browser cookies: {str(e)}")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def is_in_docker(self) -> bool:
|
|
||||||
"""Check if we're running inside a Docker container using multiple methods"""
|
|
||||||
# Method 1: Check cgroup
|
|
||||||
try:
|
|
||||||
with open('/proc/1/cgroup', 'r') as f:
|
|
||||||
content = f.read().lower()
|
|
||||||
if any(container_id in content for container_id in ['docker', 'containerd', 'kubepods']):
|
|
||||||
logger.debug("Docker detected via cgroup")
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
logger.debug(f"Could not read cgroup file: {e}")
|
|
||||||
|
|
||||||
# Method 2: Check /.dockerenv file
|
|
||||||
if os.path.exists('/.dockerenv'):
|
|
||||||
logger.debug("Docker detected via /.dockerenv file")
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Method 3: Check environment variables
|
|
||||||
docker_env = any(os.environ.get(var, False) for var in [
|
|
||||||
'DOCKER_CONTAINER',
|
|
||||||
'IN_DOCKER',
|
|
||||||
'KUBERNETES_SERVICE_HOST', # For k8s
|
|
||||||
'DOCKER_HOST'
|
|
||||||
])
|
|
||||||
if docker_env:
|
|
||||||
logger.debug("Docker detected via environment variables")
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Method 4: Check container runtime
|
|
||||||
try:
|
|
||||||
with open('/proc/self/mountinfo', 'r') as f:
|
|
||||||
content = f.read().lower()
|
|
||||||
if any(rt in content for rt in ['docker', 'containerd', 'kubernetes']):
|
|
||||||
logger.debug("Docker detected via mountinfo")
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
logger.debug(f"Could not read mountinfo: {e}")
|
|
||||||
|
|
||||||
logger.debug("No Docker environment detected")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _send_request(self, url: str, method: str, data=None, except_302=False) -> requests.Response:
|
def _send_request(self, url: str, method: str, data=None, except_302=False) -> requests.Response:
|
||||||
"""Send a request with the specified cookies"""
|
"""Send a request with the specified cookies"""
|
||||||
# Rate limiting logic
|
# if previous request was made less than 10 seconds ago, wait until current time is 10 seconds after previous request
|
||||||
if self.previous_request_time:
|
if self.previous_request_time:
|
||||||
time_diff = (datetime.now() - self.previous_request_time).total_seconds()
|
time_diff = (datetime.now() - self.previous_request_time).total_seconds()
|
||||||
if time_diff < 10:
|
if time_diff < 10:
|
||||||
logger.info(f"Waiting 10 seconds before next request...")
|
logger.info(f"Waiting 10 seconds before next request...")
|
||||||
time.sleep(10 - time_diff)
|
time.sleep(10 - time_diff)
|
||||||
|
|
||||||
headers = self._set_headers(method)
|
headers = self._set_headers(method)
|
||||||
|
|
||||||
# Move cookie initialization outside and make it more explicit
|
|
||||||
if not self.cookies:
|
if not self.cookies:
|
||||||
if self.is_in_docker():
|
|
||||||
logger.debug("Running in Docker - using cookies from file")
|
|
||||||
self.cookies = self.get_cookies_from_file()
|
|
||||||
else:
|
|
||||||
logger.debug("Not in Docker - using browser cookies")
|
|
||||||
self.cookies = self._get_browser_cookies()
|
self.cookies = self._get_browser_cookies()
|
||||||
|
|
||||||
if not self.cookies:
|
if not self.cookies:
|
||||||
raise ValueError("Failed to retrieve cookies")
|
raise ValueError("Failed to retrieve browser cookies")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
#logger.info(f"debug: request url {url}, method {method}, data {data}")
|
#logger.info(f"debug: request url {url}, method {method}, data {data}")
|
||||||
@@ -518,7 +459,7 @@ class TCGPlayerService:
|
|||||||
def get_pricing_export_for_all_products(self) -> File:
|
def get_pricing_export_for_all_products(self) -> File:
|
||||||
"""
|
"""
|
||||||
"""
|
"""
|
||||||
DEBUG = False
|
DEBUG = True
|
||||||
if DEBUG:
|
if DEBUG:
|
||||||
logger.debug("DEBUG: Using existing pricing export file")
|
logger.debug("DEBUG: Using existing pricing export file")
|
||||||
file = self.db.query(File).filter(File.type == 'tcgplayer_pricing_export').first()
|
file = self.db.query(File).filter(File.type == 'tcgplayer_pricing_export').first()
|
@@ -1,6 +1,6 @@
|
|||||||
import pandas as pd
|
import pandas as pd
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
from app.db.models import File
|
from db.models import File
|
||||||
|
|
||||||
|
|
||||||
class ManaboxRow:
|
class ManaboxRow:
|
@@ -2,7 +2,7 @@ from fastapi.testclient import TestClient
|
|||||||
from fastapi import BackgroundTasks
|
from fastapi import BackgroundTasks
|
||||||
import pytest
|
import pytest
|
||||||
import os
|
import os
|
||||||
from app.main import app
|
from main import app
|
||||||
|
|
||||||
|
|
||||||
|
|
@@ -4,9 +4,9 @@ import pytest
|
|||||||
from unittest.mock import Mock, patch
|
from unittest.mock import Mock, patch
|
||||||
import asyncio
|
import asyncio
|
||||||
import os
|
import os
|
||||||
from app.main import app
|
from main import app
|
||||||
from app.services.file import FileService
|
from services.file import FileService
|
||||||
from app.services.task import TaskService
|
from services.task import TaskService
|
||||||
|
|
||||||
client = TestClient(app)
|
client = TestClient(app)
|
||||||
|
|
Reference in New Issue
Block a user