Compare commits
No commits in common. "9a666df52c692bc9403beb0166a7b0cce15c6da4" and "91d55efd20ce4979a0b40cdee0793839bc2916dd" have entirely different histories.
9a666df52c
...
91d55efd20
3
.gitignore
vendored
3
.gitignore
vendored
@ -1,5 +1,4 @@
|
|||||||
*.db
|
*.db
|
||||||
__pycache__
|
__pycache__
|
||||||
.venv
|
.venv
|
||||||
*.sqlite3
|
*.sqlite3
|
||||||
*.log
|
|
@ -1,19 +1,20 @@
|
|||||||
version: '3.8'
|
version: '3.8'
|
||||||
|
|
||||||
services:
|
services:
|
||||||
scraper:
|
web:
|
||||||
build: ./scraper
|
build: ./server
|
||||||
|
ports:
|
||||||
|
- "5000:5000"
|
||||||
volumes:
|
volumes:
|
||||||
- ./scraper:/app
|
- ./server:/app
|
||||||
environment:
|
environment:
|
||||||
|
- DEBUG=1
|
||||||
- POKEMANS_DB_URL
|
- POKEMANS_DB_URL
|
||||||
- PRAW_CLIENT_ID
|
- PRAW_CLIENT_ID
|
||||||
- PRAW_CLIENT_SECRET
|
- PRAW_CLIENT_SECRET
|
||||||
- PRAW_USERNAME
|
- PRAW_USERNAME
|
||||||
- PRAW_PASSWORD
|
- PRAW_PASSWORD
|
||||||
- POKEMANS_WEBHOOK_URL
|
- POKEMANS_WEBHOOK_URL
|
||||||
depends_on:
|
|
||||||
- db
|
|
||||||
command:
|
command:
|
||||||
python main.py
|
python main.py
|
||||||
|
|
||||||
@ -33,17 +34,4 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- ./client:/usr/share/nginx/html
|
- ./client:/usr/share/nginx/html
|
||||||
depends_on:
|
depends_on:
|
||||||
- server
|
- web
|
||||||
|
|
||||||
server:
|
|
||||||
build: ./server
|
|
||||||
ports:
|
|
||||||
- "8000:8000"
|
|
||||||
volumes:
|
|
||||||
- ./server:/app
|
|
||||||
depends_on:
|
|
||||||
- db
|
|
||||||
- scraper
|
|
||||||
command:
|
|
||||||
python manage.py runserver 0.0.0.0:8000
|
|
||||||
|
|
||||||
|
@ -1,14 +0,0 @@
|
|||||||
FROM python:3.11
|
|
||||||
|
|
||||||
# Set environment variables
|
|
||||||
ENV PYTHONDONTWRITEBYTECODE 1
|
|
||||||
ENV PYTHONUNBUFFERED 1
|
|
||||||
|
|
||||||
# Set the working directory in the container
|
|
||||||
WORKDIR /app
|
|
||||||
|
|
||||||
# Install any needed packages specified in requirements.txt
|
|
||||||
COPY requirements.txt .
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
CMD ["python", "main.py"]
|
|
247
scraper/app.py
247
scraper/app.py
@ -1,247 +0,0 @@
|
|||||||
from datetime import datetime, timedelta
|
|
||||||
import requests
|
|
||||||
from models import Post
|
|
||||||
import praw
|
|
||||||
from zoneinfo import ZoneInfo
|
|
||||||
from exceptions import InvalidMethodError, InvalidDataTypeError, APIRequestError
|
|
||||||
from app_log import LoggingManager
|
|
||||||
from threads import Scheduler, ThreadManager
|
|
||||||
|
|
||||||
|
|
||||||
class ApiRequestHandler:
|
|
||||||
def __init__(self, api_url: str):
|
|
||||||
self.api_url = api_url
|
|
||||||
self.log_manager = LoggingManager("scraper.log")
|
|
||||||
|
|
||||||
def send_api_request(
|
|
||||||
self, method: str, api_url: str, data=None, params=None
|
|
||||||
) -> dict:
|
|
||||||
if method not in ["GET", "POST", "PUT", "DELETE"]:
|
|
||||||
raise InvalidMethodError(f"Invalid method: {method}")
|
|
||||||
if data is not None and not isinstance(data, dict):
|
|
||||||
raise InvalidDataTypeError(f"Invalid data type: {type(data)} expected dict")
|
|
||||||
if params is not None and not isinstance(params, dict):
|
|
||||||
raise InvalidDataTypeError(
|
|
||||||
f"Invalid data type: {type(params)} expected dict"
|
|
||||||
)
|
|
||||||
response = requests.request(method, api_url, data=data, params=params)
|
|
||||||
success_codes = [200, 201, 204]
|
|
||||||
if response.status_code not in success_codes:
|
|
||||||
self.log_manager.error(
|
|
||||||
f"API request failed: {response.status_code} - {response.text}"
|
|
||||||
)
|
|
||||||
raise APIRequestError(response.status_code, response.text)
|
|
||||||
return response.json()
|
|
||||||
|
|
||||||
|
|
||||||
class PostManager:
|
|
||||||
def __init__(self, api_request_handler: ApiRequestHandler):
|
|
||||||
self.api_request_handler = api_request_handler
|
|
||||||
self.log_manager = LoggingManager("scraper.log")
|
|
||||||
|
|
||||||
def get_post_by_reddit_id(self, reddit_id: str) -> dict:
|
|
||||||
self.log_manager.log(f"Getting post by reddit id: {reddit_id}")
|
|
||||||
response = self.api_request_handler.send_api_request(
|
|
||||||
"GET", f"{self.api_request_handler.api_url}posts/?reddit_id={reddit_id}"
|
|
||||||
)
|
|
||||||
return response
|
|
||||||
|
|
||||||
def post_exists(self, reddit_id: str) -> bool:
|
|
||||||
self.log_manager.log(f"Checking if post exists: {reddit_id}")
|
|
||||||
response = self.get_post_by_reddit_id(reddit_id)
|
|
||||||
if len(response) == 0:
|
|
||||||
return False
|
|
||||||
return True
|
|
||||||
|
|
||||||
def insert_post(self, post) -> dict:
|
|
||||||
self.log_manager.log(f"Inserting post: {post.reddit_id}")
|
|
||||||
self.post = post
|
|
||||||
data = {
|
|
||||||
"reddit_id": self.post.reddit_id,
|
|
||||||
"title": self.post.title,
|
|
||||||
"name": self.post.name,
|
|
||||||
"url": self.post.url,
|
|
||||||
"created_utc": self.post.created_utc,
|
|
||||||
"selftext": self.post.selftext,
|
|
||||||
"permalink": self.post.permalink,
|
|
||||||
}
|
|
||||||
response = self.api_request_handler.send_api_request(
|
|
||||||
"POST", f"{self.api_request_handler.api_url}posts/", data=data
|
|
||||||
)
|
|
||||||
return response
|
|
||||||
|
|
||||||
def get_posts_from_last_7_days(self) -> dict:
|
|
||||||
self.log_manager.log("Getting posts from last 7 days")
|
|
||||||
posts_from_last_7_days = self.api_request_handler.send_api_request(
|
|
||||||
"GET", f"{self.api_request_handler.api_url}posts/?last_7_days=1"
|
|
||||||
)
|
|
||||||
return posts_from_last_7_days
|
|
||||||
|
|
||||||
|
|
||||||
class PostAnalyticsManager:
|
|
||||||
def __init__(
|
|
||||||
self, api_request_handler: ApiRequestHandler, post_manager: PostManager
|
|
||||||
):
|
|
||||||
self.api_request_handler = api_request_handler
|
|
||||||
self.post_manager = post_manager
|
|
||||||
self.log_manager = LoggingManager("scraper.log")
|
|
||||||
|
|
||||||
def check_update_requirements(self, reddit_id: str) -> bool:
|
|
||||||
self.log_manager.log(f"Checking update requirements for {reddit_id}")
|
|
||||||
|
|
||||||
# Specify your desired timezone, e.g., UTC
|
|
||||||
timezone = ZoneInfo("UTC")
|
|
||||||
|
|
||||||
# Make your datetime objects timezone-aware
|
|
||||||
fifteen_minutes_ago = datetime.now(timezone) - timedelta(minutes=15)
|
|
||||||
now = datetime.now(timezone)
|
|
||||||
|
|
||||||
# Format datetime objects for the API request
|
|
||||||
time_begin_str = fifteen_minutes_ago.isoformat(timespec="seconds")
|
|
||||||
time_end_str = now.isoformat(timespec="seconds")
|
|
||||||
|
|
||||||
post_id = self.post_manager.get_post_by_reddit_id(reddit_id)
|
|
||||||
post_id = post_id[0]["id"]
|
|
||||||
self.log_manager.log(
|
|
||||||
f"{self.api_request_handler.api_url}post_analytics/?post={post_id}&time_begin={time_begin_str}&time_end={time_end_str}"
|
|
||||||
)
|
|
||||||
|
|
||||||
response = self.api_request_handler.send_api_request(
|
|
||||||
"GET",
|
|
||||||
f"{self.api_request_handler.api_url}post_analytics/?post={post_id}&time_begin={time_begin_str}&time_end={time_end_str}",
|
|
||||||
)
|
|
||||||
|
|
||||||
if len(response) > 0:
|
|
||||||
# post should not be updated
|
|
||||||
return False
|
|
||||||
|
|
||||||
# post should be updated
|
|
||||||
return True
|
|
||||||
|
|
||||||
def update_post_analytics(self, post: Post) -> dict:
|
|
||||||
self.log_manager.log(f"Updating post analytics for {post.reddit_id}")
|
|
||||||
post_id = self.post_manager.get_post_by_reddit_id(post.reddit_id)
|
|
||||||
post_id = post_id[0]["id"]
|
|
||||||
data = {
|
|
||||||
"post": post_id,
|
|
||||||
"score": post.score,
|
|
||||||
"num_comments": post.num_comments,
|
|
||||||
"upvote_ratio": post.upvote_ratio,
|
|
||||||
}
|
|
||||||
response = self.api_request_handler.send_api_request(
|
|
||||||
"POST", f"{self.api_request_handler.api_url}post_analytics/", data=data
|
|
||||||
)
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
class RedditMonitor:
|
|
||||||
def __init__(
|
|
||||||
self, client_id, client_secret, user_agent, username, password, subreddit_name
|
|
||||||
):
|
|
||||||
self.reddit = praw.Reddit(
|
|
||||||
client_id=client_id,
|
|
||||||
client_secret=client_secret,
|
|
||||||
user_agent=user_agent,
|
|
||||||
username=username,
|
|
||||||
password=password,
|
|
||||||
)
|
|
||||||
self.subreddit = self.reddit.subreddit(subreddit_name)
|
|
||||||
self.log_manager = LoggingManager("scraper.log")
|
|
||||||
|
|
||||||
def stream_submissions(self):
|
|
||||||
self.log_manager.info("Starting submission stream")
|
|
||||||
for submission in self.subreddit.stream.submissions():
|
|
||||||
yield submission
|
|
||||||
|
|
||||||
def update_submissions(self, posts_to_update):
|
|
||||||
self.log_manager.info("Updating submissions")
|
|
||||||
for post in posts_to_update:
|
|
||||||
submission = self.reddit.submission(id=post["reddit_id"])
|
|
||||||
yield submission
|
|
||||||
|
|
||||||
|
|
||||||
class SubmissionManager:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
reddit_monitor: RedditMonitor,
|
|
||||||
post_manager: PostManager,
|
|
||||||
post_analytics_manager: PostAnalyticsManager,
|
|
||||||
WebhookNotifier,
|
|
||||||
):
|
|
||||||
self.reddit_monitor = reddit_monitor
|
|
||||||
self.post_manager = post_manager
|
|
||||||
self.post_analytics_manager = post_analytics_manager
|
|
||||||
self.webhook_notifier = WebhookNotifier
|
|
||||||
self.log_manager = LoggingManager("scraper.log")
|
|
||||||
|
|
||||||
def convert_submission_to_post(self, submission):
|
|
||||||
post = Post(
|
|
||||||
reddit_id=submission.id,
|
|
||||||
title=submission.title,
|
|
||||||
name=submission.name,
|
|
||||||
url=submission.url,
|
|
||||||
score=submission.score,
|
|
||||||
num_comments=submission.num_comments,
|
|
||||||
created_utc=submission.created_utc,
|
|
||||||
selftext=submission.selftext,
|
|
||||||
permalink=submission.permalink,
|
|
||||||
upvote_ratio=submission.upvote_ratio,
|
|
||||||
)
|
|
||||||
return post
|
|
||||||
|
|
||||||
def process_submissions(self, submissions):
|
|
||||||
for submission in submissions:
|
|
||||||
self.log_manager.log(submission)
|
|
||||||
if self.post_manager.post_exists(submission.id):
|
|
||||||
self.log_manager.log("Post exists")
|
|
||||||
self.log_manager.log(f"post id: {submission.id}")
|
|
||||||
if self.post_analytics_manager.check_update_requirements(submission.id):
|
|
||||||
self.log_manager.log("Update requirements met")
|
|
||||||
post = self.convert_submission_to_post(submission)
|
|
||||||
self.post_analytics_manager.update_post_analytics(post)
|
|
||||||
else:
|
|
||||||
post = self.convert_submission_to_post(submission)
|
|
||||||
self.post_manager.insert_post(post)
|
|
||||||
self.post_analytics_manager.update_post_analytics(post)
|
|
||||||
self.webhook_notifier.send_notification(post)
|
|
||||||
|
|
||||||
|
|
||||||
class Application:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
reddit_monitor,
|
|
||||||
webhook_notifier,
|
|
||||||
api_conn,
|
|
||||||
post_manager,
|
|
||||||
post_analytics_manager,
|
|
||||||
submission_manager,
|
|
||||||
):
|
|
||||||
self.reddit_monitor = reddit_monitor
|
|
||||||
self.webhook_notifier = webhook_notifier
|
|
||||||
self.api_conn = api_conn
|
|
||||||
self.post_manager = post_manager
|
|
||||||
self.post_analytics_manager = post_analytics_manager
|
|
||||||
self.log_manager = LoggingManager("scraper.log")
|
|
||||||
self.submission_manager = submission_manager
|
|
||||||
self.scheduler = None
|
|
||||||
self.thread_manager = None
|
|
||||||
|
|
||||||
def periodic_update(self):
|
|
||||||
self.log_manager.info("Running periodic update")
|
|
||||||
to_be_updated = self.post_manager.get_posts_from_last_7_days()
|
|
||||||
submissions = self.reddit_monitor.update_submissions(to_be_updated)
|
|
||||||
self.submission_manager.process_submissions(submissions)
|
|
||||||
|
|
||||||
def run_periodic_update(self, interval):
|
|
||||||
self.scheduler = Scheduler(interval, self.periodic_update)
|
|
||||||
self.scheduler.run()
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
self.log_manager.info("Application started")
|
|
||||||
update_frequency = 60 * 15 # 15 minutes in seconds
|
|
||||||
self.thread_manager = ThreadManager(
|
|
||||||
target=self.run_periodic_update, args=(update_frequency,)
|
|
||||||
)
|
|
||||||
self.thread_manager.run()
|
|
||||||
submissions = self.reddit_monitor.stream_submissions()
|
|
||||||
self.submission_manager.process_submissions(submissions)
|
|
@ -1,46 +0,0 @@
|
|||||||
import logging
|
|
||||||
from logging.handlers import RotatingFileHandler
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
class SingletonMeta(type):
|
|
||||||
_instances = {}
|
|
||||||
|
|
||||||
def __call__(cls, *args, **kwargs):
|
|
||||||
if cls not in cls._instances:
|
|
||||||
cls._instances[cls] = super(SingletonMeta, cls).__call__(*args, **kwargs)
|
|
||||||
return cls._instances[cls]
|
|
||||||
|
|
||||||
|
|
||||||
class LoggingManager(metaclass=SingletonMeta):
|
|
||||||
def __init__(self, log_file):
|
|
||||||
if not hasattr(self, "logger"):
|
|
||||||
self.log_file = log_file
|
|
||||||
self.logger = logging.getLogger("scraper")
|
|
||||||
self.logger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
file_handler = RotatingFileHandler(
|
|
||||||
self.log_file, maxBytes=1024 * 1024 * 5, backupCount=5
|
|
||||||
)
|
|
||||||
file_handler.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
stream_handler = logging.StreamHandler(sys.stdout)
|
|
||||||
stream_handler.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
formatter = logging.Formatter(
|
|
||||||
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
|
||||||
)
|
|
||||||
file_handler.setFormatter(formatter)
|
|
||||||
stream_handler.setFormatter(formatter)
|
|
||||||
|
|
||||||
self.logger.addHandler(file_handler)
|
|
||||||
self.logger.addHandler(stream_handler)
|
|
||||||
|
|
||||||
def log(self, message):
|
|
||||||
self.logger.debug(message)
|
|
||||||
|
|
||||||
def error(self, message):
|
|
||||||
self.logger.error(message)
|
|
||||||
|
|
||||||
def info(self, message):
|
|
||||||
self.logger.info(message)
|
|
@ -1,19 +0,0 @@
|
|||||||
class InvalidMethodError(Exception):
|
|
||||||
"""Exception raised for unsupported HTTP methods."""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidDataTypeError(Exception):
|
|
||||||
"""Exception raised for unsupported data types."""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class APIRequestError(Exception):
|
|
||||||
"""Exception raised for API request errors."""
|
|
||||||
|
|
||||||
def __init__(self, status_code, message):
|
|
||||||
self.status_code = status_code
|
|
||||||
self.message = message
|
|
||||||
super().__init__(f"API Request Failed: {status_code} - {message}")
|
|
@ -1,27 +0,0 @@
|
|||||||
class Post:
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
reddit_id,
|
|
||||||
title,
|
|
||||||
name,
|
|
||||||
url,
|
|
||||||
score,
|
|
||||||
num_comments,
|
|
||||||
created_utc,
|
|
||||||
selftext,
|
|
||||||
permalink,
|
|
||||||
upvote_ratio,
|
|
||||||
):
|
|
||||||
self.reddit_id = reddit_id
|
|
||||||
self.title = title
|
|
||||||
self.name = name
|
|
||||||
self.url = url
|
|
||||||
self.score = score
|
|
||||||
self.num_comments = num_comments
|
|
||||||
self.created_utc = created_utc
|
|
||||||
self.selftext = selftext
|
|
||||||
self.permalink = permalink
|
|
||||||
self.upvote_ratio = upvote_ratio
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return f"{self.reddit_id} {self.title} {self.name} {self.url} {self.score} {self.num_comments} {self.created_utc} {self.selftext} {self.permalink} {self.upvote_ratio}"
|
|
@ -1,16 +0,0 @@
|
|||||||
asgiref==3.7.2
|
|
||||||
certifi==2024.2.2
|
|
||||||
charset-normalizer==3.3.2
|
|
||||||
Django==5.0.2
|
|
||||||
djangorestframework==3.14.0
|
|
||||||
greenlet==3.0.3
|
|
||||||
idna==3.6
|
|
||||||
praw==7.7.1
|
|
||||||
prawcore==2.4.0
|
|
||||||
pytz==2024.1
|
|
||||||
requests==2.31.0
|
|
||||||
sqlparse==0.4.4
|
|
||||||
typing_extensions==4.10.0
|
|
||||||
update-checker==0.18.0
|
|
||||||
urllib3==2.2.1
|
|
||||||
websocket-client==1.7.0
|
|
@ -1,26 +0,0 @@
|
|||||||
import threading
|
|
||||||
|
|
||||||
|
|
||||||
class Scheduler:
|
|
||||||
def __init__(self, interval, function):
|
|
||||||
self.interval = interval
|
|
||||||
self.function = function
|
|
||||||
self.stop_event = threading.Event()
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
while not self.stop_event.wait(self.interval):
|
|
||||||
self.function()
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self.stop_event.set()
|
|
||||||
|
|
||||||
|
|
||||||
class ThreadManager:
|
|
||||||
def __init__(self, target, args: tuple = ()) -> None:
|
|
||||||
self.target = target
|
|
||||||
self.args = args
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
thread = threading.Thread(target=self.target, args=self.args)
|
|
||||||
thread.daemon = True
|
|
||||||
thread.start()
|
|
@ -1,27 +0,0 @@
|
|||||||
import requests
|
|
||||||
from app_log import LoggingManager
|
|
||||||
|
|
||||||
|
|
||||||
class WebhookNotifier:
|
|
||||||
def __init__(self, webhook_url, disable_webhook=False):
|
|
||||||
self.webhook_url = webhook_url
|
|
||||||
self.disable_webhook = disable_webhook
|
|
||||||
self.log_manager = LoggingManager("scraper.log")
|
|
||||||
|
|
||||||
def send_notification(self, post):
|
|
||||||
title = post.title
|
|
||||||
url = post.url
|
|
||||||
permalink = post.permalink
|
|
||||||
selftext = post.selftext
|
|
||||||
content = f"""
|
|
||||||
**New Deal!**
|
|
||||||
**Title:** {title}
|
|
||||||
**URL:** {url}
|
|
||||||
**Permalink:** https://old.reddit.com{permalink}
|
|
||||||
**Selftext:** {selftext}"""
|
|
||||||
if not self.disable_webhook:
|
|
||||||
self.log_manager.log(f"Sending notification to {self.webhook_url}")
|
|
||||||
try:
|
|
||||||
requests.post(self.webhook_url, data={"content": content})
|
|
||||||
except Exception as e:
|
|
||||||
self.log_manager.error(f"Failed to send notification: {e}")
|
|
@ -1,4 +1,3 @@
|
|||||||
# Use an official Python runtime as a base image
|
|
||||||
FROM python:3.11
|
FROM python:3.11
|
||||||
|
|
||||||
# Set environment variables
|
# Set environment variables
|
||||||
@ -16,5 +15,4 @@ RUN pip install --no-cache-dir -r requirements.txt
|
|||||||
EXPOSE 8000
|
EXPOSE 8000
|
||||||
|
|
||||||
# Run python manage.py runserver 0.0.0.0:8000 when the container launches
|
# Run python manage.py runserver 0.0.0.0:8000 when the container launches
|
||||||
CMD ["python", "manage.py", "runserver", "0.0.0.0:8000"]
|
CMD ["python", "main.py"]
|
||||||
|
|
49
server/app.py
Normal file
49
server/app.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
import threading
|
||||||
|
import time
|
||||||
|
from datetime import datetime
|
||||||
|
from models import Submission, session_scope, submission_exists, update_submission, insert_submission
|
||||||
|
|
||||||
|
|
||||||
|
class Application:
|
||||||
|
def __init__(self, reddit_monitor, webhook_notifier):
|
||||||
|
self.reddit_monitor = reddit_monitor
|
||||||
|
self.webhook_notifier = webhook_notifier
|
||||||
|
|
||||||
|
def process_submissions(self, submissions):
|
||||||
|
with session_scope() as session:
|
||||||
|
for submission in submissions:
|
||||||
|
if submission_exists(session, submission.id):
|
||||||
|
update_submission(session, submission)
|
||||||
|
else:
|
||||||
|
submission = Submission(
|
||||||
|
id=submission.id,
|
||||||
|
title=submission.title,
|
||||||
|
name=submission.name,
|
||||||
|
url=submission.url,
|
||||||
|
score=submission.score,
|
||||||
|
num_comments=submission.num_comments,
|
||||||
|
created_utc=submission.created_utc,
|
||||||
|
selftext=submission.selftext,
|
||||||
|
permalink=submission.permalink,
|
||||||
|
upvote_ratio=submission.upvote_ratio
|
||||||
|
)
|
||||||
|
insert_submission(session, submission)
|
||||||
|
self.webhook_notifier.send_notification(submission)
|
||||||
|
|
||||||
|
def periodic_update(self):
|
||||||
|
submissions = self.reddit_monitor.update_submissions()
|
||||||
|
self.process_submissions(submissions)
|
||||||
|
|
||||||
|
def run_periodic_update(self, interval=3600):
|
||||||
|
while True:
|
||||||
|
self.periodic_update()
|
||||||
|
print(f"Existing posts Updated at {datetime.now()}")
|
||||||
|
time.sleep(interval)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
update_frequency = 3600 # 3600
|
||||||
|
update_thread = threading.Thread(target=self.run_periodic_update, args=(update_frequency, ))
|
||||||
|
update_thread.daemon = True
|
||||||
|
update_thread.start()
|
||||||
|
submissions = self.reddit_monitor.stream_submissions()
|
||||||
|
self.process_submissions(submissions)
|
@ -2,13 +2,14 @@ import os
|
|||||||
|
|
||||||
|
|
||||||
class Config:
|
class Config:
|
||||||
|
POKEMANS_DB_URL = os.getenv("POKEMANS_DB_URL", "sqlite:///pokemans.db")
|
||||||
PRAW_CLIENT_ID = os.getenv("PRAW_CLIENT_ID")
|
PRAW_CLIENT_ID = os.getenv("PRAW_CLIENT_ID")
|
||||||
PRAW_CLIENT_SECRET = os.getenv("PRAW_CLIENT_SECRET")
|
PRAW_CLIENT_SECRET = os.getenv("PRAW_CLIENT_SECRET")
|
||||||
PRAW_USERNAME = os.getenv("PRAW_USERNAME")
|
PRAW_USERNAME = os.getenv("PRAW_USERNAME")
|
||||||
PRAW_PASSWORD = os.getenv("PRAW_PASSWORD")
|
PRAW_PASSWORD = os.getenv("PRAW_PASSWORD")
|
||||||
POKEMANS_WEBHOOK_URL = os.getenv("POKEMANS_WEBHOOK_URL")
|
POKEMANS_WEBHOOK_URL = os.getenv("POKEMANS_WEBHOOK_URL")
|
||||||
PKMN_ENV = "dev" # os.getenv("PKMN_ENV")
|
PKMN_ENV = 'dev' # os.getenv("PKMN_ENV")
|
||||||
SUBREDDIT_NAME = "pkmntcgdeals"
|
SUBREDDIT_NAME = "pkmntcgdeals"
|
||||||
USER_AGENT = "praw:zman.video_repost_bot:v0.1.0 (by u/jzman21)"
|
USER_AGENT = "praw:zman.video_repost_bot:v0.1.0 (by u/jzman21)"
|
||||||
DISABLE_WEBHOOK = False
|
DISABLE_WEBHOOK = False
|
||||||
API_URL = "http://server:8000/api/"
|
DESTROY_DB = False
|
@ -1,18 +1,11 @@
|
|||||||
|
from models import create_db, reset_db
|
||||||
|
from reddit_monitor import RedditMonitor
|
||||||
from webhook import WebhookNotifier
|
from webhook import WebhookNotifier
|
||||||
from app import (
|
from app import Application
|
||||||
Application,
|
|
||||||
RedditMonitor,
|
|
||||||
ApiRequestHandler,
|
|
||||||
PostManager,
|
|
||||||
PostAnalyticsManager,
|
|
||||||
SubmissionManager,
|
|
||||||
)
|
|
||||||
from config import Config
|
from config import Config
|
||||||
from app_log import LoggingManager
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
log_manager = LoggingManager("scraper.log")
|
|
||||||
client_id = Config.PRAW_CLIENT_ID
|
client_id = Config.PRAW_CLIENT_ID
|
||||||
client_secret = Config.PRAW_CLIENT_SECRET
|
client_secret = Config.PRAW_CLIENT_SECRET
|
||||||
user_agent = Config.USER_AGENT
|
user_agent = Config.USER_AGENT
|
||||||
@ -21,36 +14,25 @@ if __name__ == "__main__":
|
|||||||
subreddit_name = Config.SUBREDDIT_NAME
|
subreddit_name = Config.SUBREDDIT_NAME
|
||||||
discord_webhook_url = Config.POKEMANS_WEBHOOK_URL
|
discord_webhook_url = Config.POKEMANS_WEBHOOK_URL
|
||||||
disable_webhook = Config.DISABLE_WEBHOOK
|
disable_webhook = Config.DISABLE_WEBHOOK
|
||||||
|
destroy_db = Config.DESTROY_DB
|
||||||
pkmn_env = Config.PKMN_ENV
|
pkmn_env = Config.PKMN_ENV
|
||||||
api_url = Config.API_URL
|
|
||||||
|
|
||||||
reddit_monitor = RedditMonitor(
|
if destroy_db and pkmn_env == 'dev':
|
||||||
client_id, client_secret, user_agent, username, password, subreddit_name
|
reset_db()
|
||||||
)
|
else:
|
||||||
|
create_db()
|
||||||
|
|
||||||
|
reddit_monitor = RedditMonitor(client_id, client_secret, user_agent, username, password, subreddit_name)
|
||||||
webhook_notifier = WebhookNotifier(discord_webhook_url, disable_webhook)
|
webhook_notifier = WebhookNotifier(discord_webhook_url, disable_webhook)
|
||||||
api_conn = ApiRequestHandler(api_url)
|
app = Application(reddit_monitor, webhook_notifier)
|
||||||
post_manager = PostManager(api_conn)
|
print("Starting app")
|
||||||
post_analytics_manager = PostAnalyticsManager(api_conn, post_manager)
|
|
||||||
submission_manager = SubmissionManager(
|
|
||||||
reddit_monitor, post_manager, post_analytics_manager, webhook_notifier
|
|
||||||
)
|
|
||||||
app = Application(
|
|
||||||
reddit_monitor,
|
|
||||||
webhook_notifier,
|
|
||||||
api_conn,
|
|
||||||
post_manager,
|
|
||||||
post_analytics_manager,
|
|
||||||
submission_manager,
|
|
||||||
)
|
|
||||||
app.run()
|
app.run()
|
||||||
|
|
||||||
"""
|
"""
|
||||||
TODO:
|
TODO:
|
||||||
- pull upvote ration into analytics?
|
- implement django
|
||||||
- sqlite vs postgres figure out
|
|
||||||
- basic front end (react)
|
- basic front end (react)
|
||||||
- tests
|
- tests
|
||||||
- logging
|
|
||||||
- Filter out canadian/uk deals
|
- Filter out canadian/uk deals
|
||||||
- track score and number of comments over time in db
|
- track score and number of comments over time in db
|
||||||
- try to identify product, number of cards, price per card, etc
|
- try to identify product, number of cards, price per card, etc
|
||||||
@ -58,4 +40,4 @@ TODO:
|
|||||||
- try to identify platform ie. costco for gift card, tiktok for coupons, etc.
|
- try to identify platform ie. costco for gift card, tiktok for coupons, etc.
|
||||||
- support for craigslist, ebay, etc.
|
- support for craigslist, ebay, etc.
|
||||||
- front end - vizualization, classification, lookup, etc.
|
- front end - vizualization, classification, lookup, etc.
|
||||||
"""
|
"""
|
@ -1,22 +0,0 @@
|
|||||||
#!/usr/bin/env python
|
|
||||||
"""Django's command-line utility for administrative tasks."""
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Run administrative tasks."""
|
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pokemans_django.settings")
|
|
||||||
try:
|
|
||||||
from django.core.management import execute_from_command_line
|
|
||||||
except ImportError as exc:
|
|
||||||
raise ImportError(
|
|
||||||
"Couldn't import Django. Are you sure it's installed and "
|
|
||||||
"available on your PYTHONPATH environment variable? Did you "
|
|
||||||
"forget to activate a virtual environment?"
|
|
||||||
) from exc
|
|
||||||
execute_from_command_line(sys.argv)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
79
server/models.py
Normal file
79
server/models.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
from sqlalchemy import create_engine, Column, Integer, String, Float
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy.orm import sessionmaker
|
||||||
|
import os
|
||||||
|
from contextlib import contextmanager
|
||||||
|
|
||||||
|
|
||||||
|
Base = declarative_base()
|
||||||
|
Session = sessionmaker()
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def session_scope():
|
||||||
|
session = get_session()
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
session.commit()
|
||||||
|
except:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
class Submission(Base):
|
||||||
|
__tablename__ = 'submissions'
|
||||||
|
id = Column(String, primary_key=True)
|
||||||
|
title = Column(String)
|
||||||
|
name = Column(String)
|
||||||
|
url = Column(String)
|
||||||
|
score = Column(Integer)
|
||||||
|
num_comments = Column(Integer)
|
||||||
|
created_utc = Column(Float)
|
||||||
|
selftext = Column(String)
|
||||||
|
permalink = Column(String)
|
||||||
|
upvote_ratio = Column(Float)
|
||||||
|
|
||||||
|
def get_engine(database_url=os.getenv("POKEMANS_DB_URL", "sqlite:///pokemans.db")):
|
||||||
|
engine = create_engine(database_url)
|
||||||
|
Session.configure(bind=engine)
|
||||||
|
return engine
|
||||||
|
|
||||||
|
def create_db():
|
||||||
|
engine = get_engine()
|
||||||
|
Base.metadata.create_all(engine)
|
||||||
|
|
||||||
|
def reset_db():
|
||||||
|
engine = get_engine()
|
||||||
|
Base.metadata.drop_all(engine)
|
||||||
|
Base.metadata.create_all(engine)
|
||||||
|
|
||||||
|
def get_session():
|
||||||
|
return Session()
|
||||||
|
|
||||||
|
def insert_submission(session, submission):
|
||||||
|
session.add(submission)
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
def submission_exists(session, submission_id):
|
||||||
|
return session.query(Submission).filter(Submission.id == submission_id).first() is not None
|
||||||
|
|
||||||
|
def get_all_submissions(session):
|
||||||
|
return session.query(Submission).all()
|
||||||
|
|
||||||
|
def delete_submission(session, submission_id):
|
||||||
|
session.query(Submission).filter(Submission.id == submission_id).delete()
|
||||||
|
session.commit()
|
||||||
|
|
||||||
|
def update_submission(session, submission):
|
||||||
|
session.query(Submission).filter(Submission.id == submission.id).update({
|
||||||
|
'title': submission.title,
|
||||||
|
'name': submission.name,
|
||||||
|
'url': submission.url,
|
||||||
|
'score': submission.score,
|
||||||
|
'num_comments': submission.num_comments,
|
||||||
|
'created_utc': submission.created_utc,
|
||||||
|
'selftext': submission.selftext,
|
||||||
|
'permalink': submission.permalink,
|
||||||
|
'upvote_ratio': submission.upvote_ratio
|
||||||
|
})
|
||||||
|
session.commit()
|
@ -1,3 +0,0 @@
|
|||||||
from django.contrib import admin
|
|
||||||
|
|
||||||
# Register your models here.
|
|
@ -1,6 +0,0 @@
|
|||||||
from django.apps import AppConfig
|
|
||||||
|
|
||||||
|
|
||||||
class PokemansConfig(AppConfig):
|
|
||||||
default_auto_field = "django.db.models.BigAutoField"
|
|
||||||
name = "pokemans_app"
|
|
@ -1,40 +0,0 @@
|
|||||||
# Generated by Django 5.0.2 on 2024-03-04 05:15
|
|
||||||
|
|
||||||
import django.db.models.deletion
|
|
||||||
from django.db import migrations, models
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(migrations.Migration):
|
|
||||||
|
|
||||||
initial = True
|
|
||||||
|
|
||||||
dependencies = [
|
|
||||||
]
|
|
||||||
|
|
||||||
operations = [
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='Post',
|
|
||||||
fields=[
|
|
||||||
('id', models.AutoField(primary_key=True, serialize=False)),
|
|
||||||
('reddit_id', models.CharField(max_length=255, unique=True)),
|
|
||||||
('title', models.CharField(max_length=255)),
|
|
||||||
('name', models.CharField(max_length=255)),
|
|
||||||
('url', models.CharField(max_length=555)),
|
|
||||||
('created_utc', models.FloatField()),
|
|
||||||
('selftext', models.CharField(blank=True, max_length=2555, null=True)),
|
|
||||||
('permalink', models.CharField(max_length=255)),
|
|
||||||
('updated_at', models.DateTimeField(auto_now=True)),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
migrations.CreateModel(
|
|
||||||
name='PostAnalytics',
|
|
||||||
fields=[
|
|
||||||
('id', models.AutoField(primary_key=True, serialize=False)),
|
|
||||||
('num_comments', models.IntegerField()),
|
|
||||||
('score', models.IntegerField()),
|
|
||||||
('upvote_ratio', models.FloatField()),
|
|
||||||
('created_at', models.DateTimeField(auto_now=True)),
|
|
||||||
('post', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='pokemans_app.post')),
|
|
||||||
],
|
|
||||||
),
|
|
||||||
]
|
|
@ -1,22 +0,0 @@
|
|||||||
from django.db import models
|
|
||||||
|
|
||||||
|
|
||||||
class Post(models.Model):
|
|
||||||
id = models.AutoField(primary_key=True)
|
|
||||||
reddit_id = models.CharField(max_length=255, unique=True)
|
|
||||||
title = models.CharField(max_length=255)
|
|
||||||
name = models.CharField(max_length=255)
|
|
||||||
url = models.CharField(max_length=555)
|
|
||||||
created_utc = models.FloatField()
|
|
||||||
selftext = models.CharField(max_length=2555, blank=True, null=True)
|
|
||||||
permalink = models.CharField(max_length=255)
|
|
||||||
updated_at = models.DateTimeField(auto_now=True)
|
|
||||||
|
|
||||||
|
|
||||||
class PostAnalytics(models.Model):
|
|
||||||
id = models.AutoField(primary_key=True)
|
|
||||||
post = models.ForeignKey(Post, on_delete=models.CASCADE)
|
|
||||||
num_comments = models.IntegerField()
|
|
||||||
score = models.IntegerField()
|
|
||||||
upvote_ratio = models.FloatField()
|
|
||||||
created_at = models.DateTimeField(auto_now=True)
|
|
@ -1,13 +0,0 @@
|
|||||||
from rest_framework import serializers
|
|
||||||
from .models import Post, PostAnalytics
|
|
||||||
|
|
||||||
|
|
||||||
class PostSerializer(serializers.ModelSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = Post
|
|
||||||
fields = '__all__'
|
|
||||||
|
|
||||||
class PostAnalyticsSerializer(serializers.ModelSerializer):
|
|
||||||
class Meta:
|
|
||||||
model = PostAnalytics
|
|
||||||
fields = '__all__'
|
|
@ -1,3 +0,0 @@
|
|||||||
from django.test import TestCase
|
|
||||||
|
|
||||||
# Create your tests here.
|
|
@ -1,57 +0,0 @@
|
|||||||
from django.shortcuts import render
|
|
||||||
from rest_framework import viewsets
|
|
||||||
from .models import Post, PostAnalytics
|
|
||||||
from .serializers import PostSerializer, PostAnalyticsSerializer
|
|
||||||
from datetime import timedelta
|
|
||||||
from django.utils import timezone
|
|
||||||
from django.utils.dateparse import parse_datetime
|
|
||||||
|
|
||||||
|
|
||||||
class PostViewSet(viewsets.ModelViewSet):
|
|
||||||
queryset = Post.objects.all()
|
|
||||||
serializer_class = PostSerializer
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
queryset = Post.objects.all()
|
|
||||||
reddit_id = self.request.query_params.get('reddit_id', None)
|
|
||||||
last_7_days = self.request.query_params.get('last_7_days', None)
|
|
||||||
|
|
||||||
if reddit_id is not None:
|
|
||||||
queryset = queryset.filter(reddit_id=reddit_id)
|
|
||||||
|
|
||||||
if last_7_days is not None:
|
|
||||||
# Get the current time and subtract 7 days, convert to Unix timestamp
|
|
||||||
date_threshold = timezone.now() - timedelta(days=7)
|
|
||||||
date_threshold_unix = date_threshold.timestamp()
|
|
||||||
# Filter using the Unix timestamp
|
|
||||||
queryset = queryset.filter(created_utc__gte=date_threshold_unix)
|
|
||||||
|
|
||||||
return queryset
|
|
||||||
|
|
||||||
class PostAnalyticsViewSet(viewsets.ModelViewSet):
|
|
||||||
queryset = PostAnalytics.objects.all()
|
|
||||||
serializer_class = PostAnalyticsSerializer
|
|
||||||
|
|
||||||
def get_queryset(self):
|
|
||||||
queryset = PostAnalytics.objects.all()
|
|
||||||
post_id = self.request.query_params.get('post', None)
|
|
||||||
time_begin = self.request.query_params.get('time_begin', None)
|
|
||||||
time_end = self.request.query_params.get('time_end', None)
|
|
||||||
|
|
||||||
if post_id is not None:
|
|
||||||
queryset = queryset.filter(post=post_id)
|
|
||||||
|
|
||||||
if time_begin is not None and time_end is not None:
|
|
||||||
# Parse the datetime strings to timezone-aware datetime objects
|
|
||||||
time_begin_parsed = parse_datetime(time_begin)
|
|
||||||
time_end_parsed = parse_datetime(time_end)
|
|
||||||
|
|
||||||
# Ensure datetime objects are timezone-aware
|
|
||||||
if time_begin_parsed is not None and time_end_parsed is not None:
|
|
||||||
queryset = queryset.filter(created_at__gte=time_begin_parsed, created_at__lte=time_end_parsed)
|
|
||||||
else:
|
|
||||||
# Handle invalid datetime format
|
|
||||||
# This is where you could log an error or handle the case where datetime strings are invalid
|
|
||||||
pass
|
|
||||||
|
|
||||||
return queryset
|
|
@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
ASGI config for pokemans_server project.
|
|
||||||
|
|
||||||
It exposes the ASGI callable as a module-level variable named ``application``.
|
|
||||||
|
|
||||||
For more information on this file, see
|
|
||||||
https://docs.djangoproject.com/en/4.2/howto/deployment/asgi/
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from django.core.asgi import get_asgi_application
|
|
||||||
|
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pokemans_server.settings")
|
|
||||||
|
|
||||||
application = get_asgi_application()
|
|
@ -1,125 +0,0 @@
|
|||||||
"""
|
|
||||||
Django settings for pokemans_server project.
|
|
||||||
|
|
||||||
Generated by 'django-admin startproject' using Django 4.2.4.
|
|
||||||
|
|
||||||
For more information on this file, see
|
|
||||||
https://docs.djangoproject.com/en/4.2/topics/settings/
|
|
||||||
|
|
||||||
For the full list of settings and their values, see
|
|
||||||
https://docs.djangoproject.com/en/4.2/ref/settings/
|
|
||||||
"""
|
|
||||||
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
# Build paths inside the project like this: BASE_DIR / 'subdir'.
|
|
||||||
BASE_DIR = Path(__file__).resolve().parent.parent
|
|
||||||
|
|
||||||
|
|
||||||
# Quick-start development settings - unsuitable for production
|
|
||||||
# See https://docs.djangoproject.com/en/4.2/howto/deployment/checklist/
|
|
||||||
|
|
||||||
# SECURITY WARNING: keep the secret key used in production secret!
|
|
||||||
SECRET_KEY = "django-insecure-$zr_vau$5sj8cpz1srj#hm37#h-48l571mwy!@x!p4jv)@5xwn"
|
|
||||||
|
|
||||||
# SECURITY WARNING: don't run with debug turned on in production!
|
|
||||||
DEBUG = True
|
|
||||||
|
|
||||||
ALLOWED_HOSTS = ['server', 'localhost',]
|
|
||||||
|
|
||||||
|
|
||||||
# Application definition
|
|
||||||
|
|
||||||
INSTALLED_APPS = [
|
|
||||||
"django.contrib.admin",
|
|
||||||
"django.contrib.auth",
|
|
||||||
"django.contrib.contenttypes",
|
|
||||||
"django.contrib.sessions",
|
|
||||||
"django.contrib.messages",
|
|
||||||
"django.contrib.staticfiles",
|
|
||||||
"rest_framework",
|
|
||||||
"pokemans_app",
|
|
||||||
]
|
|
||||||
|
|
||||||
MIDDLEWARE = [
|
|
||||||
"django.middleware.security.SecurityMiddleware",
|
|
||||||
"django.contrib.sessions.middleware.SessionMiddleware",
|
|
||||||
"django.middleware.common.CommonMiddleware",
|
|
||||||
"django.middleware.csrf.CsrfViewMiddleware",
|
|
||||||
"django.contrib.auth.middleware.AuthenticationMiddleware",
|
|
||||||
"django.contrib.messages.middleware.MessageMiddleware",
|
|
||||||
"django.middleware.clickjacking.XFrameOptionsMiddleware",
|
|
||||||
]
|
|
||||||
|
|
||||||
ROOT_URLCONF = "pokemans_django.urls"
|
|
||||||
|
|
||||||
TEMPLATES = [
|
|
||||||
{
|
|
||||||
"BACKEND": "django.template.backends.django.DjangoTemplates",
|
|
||||||
"DIRS": [],
|
|
||||||
"APP_DIRS": True,
|
|
||||||
"OPTIONS": {
|
|
||||||
"context_processors": [
|
|
||||||
"django.template.context_processors.debug",
|
|
||||||
"django.template.context_processors.request",
|
|
||||||
"django.contrib.auth.context_processors.auth",
|
|
||||||
"django.contrib.messages.context_processors.messages",
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
WSGI_APPLICATION = "pokemans_django.wsgi.application"
|
|
||||||
|
|
||||||
|
|
||||||
# Database
|
|
||||||
# https://docs.djangoproject.com/en/4.2/ref/settings/#databases
|
|
||||||
|
|
||||||
DATABASES = {
|
|
||||||
"default": {
|
|
||||||
"ENGINE": "django.db.backends.sqlite3",
|
|
||||||
"NAME": BASE_DIR / "db.sqlite3",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Password validation
|
|
||||||
# https://docs.djangoproject.com/en/4.2/ref/settings/#auth-password-validators
|
|
||||||
|
|
||||||
AUTH_PASSWORD_VALIDATORS = [
|
|
||||||
{
|
|
||||||
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
# Internationalization
|
|
||||||
# https://docs.djangoproject.com/en/4.2/topics/i18n/
|
|
||||||
|
|
||||||
LANGUAGE_CODE = "en-us"
|
|
||||||
|
|
||||||
TIME_ZONE = "UTC"
|
|
||||||
|
|
||||||
USE_I18N = True
|
|
||||||
|
|
||||||
USE_TZ = True
|
|
||||||
|
|
||||||
|
|
||||||
# Static files (CSS, JavaScript, Images)
|
|
||||||
# https://docs.djangoproject.com/en/4.2/howto/static-files/
|
|
||||||
|
|
||||||
STATIC_URL = "static/"
|
|
||||||
|
|
||||||
# Default primary key field type
|
|
||||||
# https://docs.djangoproject.com/en/4.2/ref/settings/#default-auto-field
|
|
||||||
|
|
||||||
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"
|
|
@ -1,30 +0,0 @@
|
|||||||
"""
|
|
||||||
URL configuration for pokemans_server project.
|
|
||||||
|
|
||||||
The `urlpatterns` list routes URLs to views. For more information please see:
|
|
||||||
https://docs.djangoproject.com/en/4.2/topics/http/urls/
|
|
||||||
Examples:
|
|
||||||
Function views
|
|
||||||
1. Add an import: from my_app import views
|
|
||||||
2. Add a URL to urlpatterns: path('', views.home, name='home')
|
|
||||||
Class-based views
|
|
||||||
1. Add an import: from other_app.views import Home
|
|
||||||
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
|
|
||||||
Including another URLconf
|
|
||||||
1. Import the include() function: from django.urls import include, path
|
|
||||||
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
|
|
||||||
"""
|
|
||||||
from django.contrib import admin
|
|
||||||
from django.urls import path, include
|
|
||||||
from rest_framework.routers import DefaultRouter
|
|
||||||
from pokemans_app.views import PostViewSet, PostAnalyticsViewSet
|
|
||||||
|
|
||||||
|
|
||||||
router = DefaultRouter()
|
|
||||||
router.register(r"posts", PostViewSet)
|
|
||||||
router.register(r"post_analytics", PostAnalyticsViewSet)
|
|
||||||
|
|
||||||
urlpatterns = [
|
|
||||||
path("admin/", admin.site.urls),
|
|
||||||
path("api/", include(router.urls)),
|
|
||||||
]
|
|
@ -1,16 +0,0 @@
|
|||||||
"""
|
|
||||||
WSGI config for pokemans_server project.
|
|
||||||
|
|
||||||
It exposes the WSGI callable as a module-level variable named ``application``.
|
|
||||||
|
|
||||||
For more information on this file, see
|
|
||||||
https://docs.djangoproject.com/en/4.2/howto/deployment/wsgi/
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from django.core.wsgi import get_wsgi_application
|
|
||||||
|
|
||||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "pokemans_django.settings")
|
|
||||||
|
|
||||||
application = get_wsgi_application()
|
|
27
server/reddit_monitor.py
Normal file
27
server/reddit_monitor.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
import praw
|
||||||
|
from models import Submission, session_scope
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
|
||||||
|
class RedditMonitor:
|
||||||
|
def __init__(self, client_id, client_secret, user_agent, username, password, subreddit_name):
|
||||||
|
self.reddit = praw.Reddit(
|
||||||
|
client_id=client_id,
|
||||||
|
client_secret=client_secret,
|
||||||
|
user_agent=user_agent,
|
||||||
|
username=username,
|
||||||
|
password=password
|
||||||
|
)
|
||||||
|
self.subreddit = self.reddit.subreddit(subreddit_name)
|
||||||
|
|
||||||
|
def stream_submissions(self):
|
||||||
|
for submission in self.subreddit.stream.submissions():
|
||||||
|
yield submission
|
||||||
|
|
||||||
|
def update_submissions(self):
|
||||||
|
with session_scope() as session:
|
||||||
|
one_week_ago = datetime.utcnow() - timedelta(weeks=1)
|
||||||
|
submissions_to_update = session.query(Submission).filter(Submission.created_utc >= one_week_ago.timestamp()).all()
|
||||||
|
for db_submission in submissions_to_update:
|
||||||
|
praw_submission = self.reddit.submission(id=db_submission.id)
|
||||||
|
yield praw_submission
|
Binary file not shown.
21
server/webhook.py
Normal file
21
server/webhook.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import requests
|
||||||
|
|
||||||
|
|
||||||
|
class WebhookNotifier:
|
||||||
|
def __init__(self, webhook_url, disable_webhook=False):
|
||||||
|
self.webhook_url = webhook_url
|
||||||
|
self.disable_webhook = disable_webhook
|
||||||
|
|
||||||
|
def send_notification(self, submission):
|
||||||
|
title = submission.title
|
||||||
|
url = submission.url
|
||||||
|
permalink = submission.permalink
|
||||||
|
selftext = submission.selftext
|
||||||
|
content = f"""
|
||||||
|
**New Deal!**
|
||||||
|
**Title:** {title}
|
||||||
|
**URL:** {url}
|
||||||
|
**Permalink:** https://old.reddit.com{permalink}
|
||||||
|
**Selftext:** {selftext}"""
|
||||||
|
if not self.disable_webhook:
|
||||||
|
requests.post(self.webhook_url, data={"content": content})
|
Loading…
x
Reference in New Issue
Block a user