first commit
This commit is contained in:
commit
d9a360d031
5
.example.env
Normal file
5
.example.env
Normal file
@ -0,0 +1,5 @@
|
||||
REDDIT_CLIENT_ID=
|
||||
REDDIT_CLIENT_SECRET=
|
||||
REDDIT_USER_AGENT=
|
||||
IFTTT_WEBHOOK_URL=
|
||||
IFTTT_WEBHOOK_URL_NIGHT=
|
37
.gitignore
vendored
Normal file
37
.gitignore
vendored
Normal file
@ -0,0 +1,37 @@
|
||||
# Environment variables
|
||||
.env
|
||||
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
*.so
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# Virtual Environment
|
||||
venv/
|
||||
ENV/
|
||||
env/
|
||||
.env/
|
||||
|
||||
# IDE specific files
|
||||
.idea/
|
||||
.vscode/
|
||||
*.swp
|
||||
*.swo
|
||||
.DS_Store
|
21
Dockerfile
Normal file
21
Dockerfile
Normal file
@ -0,0 +1,21 @@
|
||||
# Use Python 3.11 slim image as base
|
||||
FROM python:3.11-slim
|
||||
|
||||
# Set working directory
|
||||
WORKDIR /app
|
||||
|
||||
# Copy requirements first to leverage Docker cache
|
||||
COPY requirements.txt .
|
||||
|
||||
# Install dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Copy application code and .env
|
||||
COPY main.py .
|
||||
COPY .env .
|
||||
|
||||
# Expose health check port
|
||||
EXPOSE 5000
|
||||
|
||||
# Run the bot
|
||||
CMD ["python", "main.py"]
|
242
main.py
Normal file
242
main.py
Normal file
@ -0,0 +1,242 @@
|
||||
# Standard library imports
|
||||
import os
|
||||
import time
|
||||
from datetime import datetime
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
# Third-party imports
|
||||
import praw
|
||||
import pyfiglet
|
||||
import requests
|
||||
from dotenv import load_dotenv
|
||||
from flask import Flask, jsonify
|
||||
from threading import Thread
|
||||
|
||||
# Load environment variables
|
||||
load_dotenv()
|
||||
|
||||
# Constants
|
||||
REDDIT_CLIENT_ID = os.getenv('REDDIT_CLIENT_ID')
|
||||
REDDIT_CLIENT_SECRET = os.getenv('REDDIT_CLIENT_SECRET')
|
||||
REDDIT_USER_AGENT = os.getenv('REDDIT_USER_AGENT')
|
||||
|
||||
IFTTT_WEBHOOK_URL = os.getenv('IFTTT_WEBHOOK_URL')
|
||||
IFTTT_WEBHOOK_URL_NIGHT = os.getenv('IFTTT_WEBHOOK_URL_NIGHT')
|
||||
|
||||
INTERESTING_SCORE_THRESHOLD = 5
|
||||
VIRAL_MULTIPLIER = 2
|
||||
SCORE_INCREMENT = 5
|
||||
SUBMISSION_LIMIT = 5
|
||||
CHECK_INTERVAL = 300 # 5 minutes in seconds
|
||||
|
||||
NIGHT_START_HOUR = 22
|
||||
NIGHT_END_HOUR = 7
|
||||
TIMEZONE = "Pacific/Auckland"
|
||||
|
||||
MONITORED_SUBREDDITS = [
|
||||
("NintendoSwitch2", "hour"),
|
||||
("nintendoswitch", "day"),
|
||||
]
|
||||
|
||||
# List to track posts we've already notified about
|
||||
notified_posts = []
|
||||
|
||||
# Initialize Reddit API client
|
||||
reddit = praw.Reddit(
|
||||
client_id=REDDIT_CLIENT_ID,
|
||||
client_secret=REDDIT_CLIENT_SECRET,
|
||||
user_agent=REDDIT_USER_AGENT,
|
||||
)
|
||||
|
||||
# Keywords that indicate potentially interesting posts
|
||||
interesting_words = [
|
||||
"youtube",
|
||||
"Furukawa",
|
||||
"President",
|
||||
"Trailer",
|
||||
"Announcement",
|
||||
"Nintendo Direct",
|
||||
"Pre-order Trailer"
|
||||
]
|
||||
|
||||
# Official Nintendo X (Twitter) accounts to monitor
|
||||
nintendo_x_accounts = [
|
||||
"x.com/Nintendo",
|
||||
"x.com/NintendoAmerica",
|
||||
"x.com/NintendoUK",
|
||||
"x.com/NintendoEU",
|
||||
"x.com/NintendoFrance",
|
||||
"x.com/NintendoGermany",
|
||||
"x.com/NintendoJapan",
|
||||
"x.com/NintendoCoLtd",
|
||||
]
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
@app.route('/health')
|
||||
def health_check():
|
||||
"""Health check endpoint that returns status and notified posts"""
|
||||
return jsonify({
|
||||
'status': 'healthy',
|
||||
'notified_posts': notified_posts,
|
||||
'monitored_subreddits': MONITORED_SUBREDDITS
|
||||
})
|
||||
|
||||
def run_flask():
|
||||
"""Run Flask server on port 5000"""
|
||||
app.run(host='0.0.0.0', port=5000, debug=False, threaded=True)
|
||||
|
||||
class CommonDataObject:
|
||||
"""
|
||||
A class to process and analyze Reddit submissions for interesting content.
|
||||
Handles checking for keywords, X accounts, and viral metrics.
|
||||
"""
|
||||
def __init__(self, submission):
|
||||
"""Initialize with a Reddit submission and extract key data"""
|
||||
self.submission = submission
|
||||
self.body = submission.selftext.lower()
|
||||
self.title = submission.title.lower()
|
||||
self.updoots = submission.ups
|
||||
self.url = submission.url.lower()
|
||||
|
||||
def contains_nintendo_x_account(self):
|
||||
"""Check if submission contains reference to official Nintendo X accounts"""
|
||||
for x_acct in nintendo_x_accounts:
|
||||
if x_acct.lower() in self.body or x_acct.lower() in self.url:
|
||||
return True
|
||||
return False
|
||||
|
||||
def contains_interesting_word(self):
|
||||
"""Check if submission contains any monitored keywords"""
|
||||
for word in interesting_words:
|
||||
if word.lower() in self.body or word.lower() in self.title:
|
||||
return True
|
||||
return False
|
||||
|
||||
def is_viral(self, epoch_avg):
|
||||
"""Determine if post has gone viral based on upvote comparison"""
|
||||
if epoch_avg > 0 and self.updoots > epoch_avg * VIRAL_MULTIPLIER:
|
||||
return True
|
||||
return False
|
||||
|
||||
def likelihood_of_being_announcement(self, epoch_avg):
|
||||
"""
|
||||
Calculate a score indicating likelihood this is an important announcement
|
||||
Returns tuple of (score, reason string)
|
||||
"""
|
||||
score = 0
|
||||
reason = ""
|
||||
|
||||
if self.contains_nintendo_x_account():
|
||||
score += SCORE_INCREMENT
|
||||
reason += "Contains Nintendo X account|"
|
||||
|
||||
if self.contains_interesting_word():
|
||||
score += SCORE_INCREMENT
|
||||
reason += "Contains interesting word|"
|
||||
|
||||
if self.is_viral(epoch_avg):
|
||||
score += SCORE_INCREMENT
|
||||
reason += "Is viral|"
|
||||
|
||||
if "youtube" in self.url or "youtube" in self.body:
|
||||
score += SCORE_INCREMENT
|
||||
reason += "Contains youtube|"
|
||||
|
||||
return score, reason
|
||||
|
||||
def print_splash(subreddit):
|
||||
"""Print ASCII art title and monitoring info"""
|
||||
ascii_banner = pyfiglet.figlet_format("SwitchBot")
|
||||
print(ascii_banner)
|
||||
print(f"Monitoring subreddit: {subreddit}")
|
||||
|
||||
def submission_is_interesting(submission, epoch_avg=0):
|
||||
"""
|
||||
Analyze a submission to determine if it's interesting
|
||||
Returns tuple of (is_interesting, score, reason)
|
||||
"""
|
||||
cdo = CommonDataObject(submission)
|
||||
score, reason = cdo.likelihood_of_being_announcement(epoch_avg)
|
||||
|
||||
if score >= INTERESTING_SCORE_THRESHOLD:
|
||||
return True, score, reason
|
||||
|
||||
return False, score, reason
|
||||
|
||||
def check_reddit(subreddit_name, time_filter):
|
||||
"""
|
||||
Check a subreddit for interesting posts and process them
|
||||
Handles notification and IFTTT webhook calls for interesting content
|
||||
"""
|
||||
global notified_posts
|
||||
|
||||
subreddit = reddit.subreddit(subreddit_name)
|
||||
|
||||
print(".", end="", flush=True)
|
||||
submissions = list(subreddit.top(limit=SUBMISSION_LIMIT, time_filter=time_filter))
|
||||
|
||||
if len(submissions) == 0:
|
||||
return
|
||||
|
||||
# Calculate average upvotes for this time period
|
||||
epoch_avg = sum(submission.ups for submission in submissions) / len(submissions)
|
||||
|
||||
# Check each submission for interesting content
|
||||
for submission in submissions:
|
||||
interesting, score, reason = submission_is_interesting(submission, epoch_avg)
|
||||
|
||||
if interesting:
|
||||
if submission.id in notified_posts:
|
||||
continue
|
||||
|
||||
notified_posts.append(submission.id)
|
||||
|
||||
print("\n" + "=" * 50)
|
||||
print("🎯 INTERESTING POST DETECTED! 🎯")
|
||||
print("=" * 50)
|
||||
print(f"📊 Score: {score}")
|
||||
print(f"🔍 Reason: {reason}")
|
||||
print("-" * 40)
|
||||
print(f"📝 Title: {submission.title}")
|
||||
print(f"🔗 URL: {submission.url}")
|
||||
print(f"📄 Content:\n{submission.selftext}")
|
||||
print(f"⬆️ Upvotes: {submission.ups}")
|
||||
print("=" * 50 + "\n")
|
||||
|
||||
# Send to IFTTT webhook
|
||||
send_to_ifttt(submission, score, reason)
|
||||
|
||||
def send_to_ifttt(submission, score, reason):
|
||||
"""Send interesting post data to IFTTT webhook for notifications"""
|
||||
# Check if it's night time in NZDT (between 10pm and 7am)
|
||||
current_time = datetime.now(ZoneInfo(TIMEZONE))
|
||||
is_night = current_time.hour >= NIGHT_START_HOUR or current_time.hour < NIGHT_END_HOUR
|
||||
|
||||
webhook_url = IFTTT_WEBHOOK_URL_NIGHT if is_night else IFTTT_WEBHOOK_URL
|
||||
|
||||
payload = {
|
||||
"value1": f"Score: {score} - {submission.title}",
|
||||
"value2": f"Reason: {reason}\nURL: {submission.url}",
|
||||
"value3": submission.selftext[:1000] if submission.selftext else "No content"
|
||||
}
|
||||
|
||||
try:
|
||||
requests.post(webhook_url, json=payload)
|
||||
except Exception as e:
|
||||
print(f"Failed to send to IFTTT: {e}")
|
||||
|
||||
def boot():
|
||||
"""Main entry point - starts monitoring loop and health check server"""
|
||||
print_splash(str(MONITORED_SUBREDDITS))
|
||||
|
||||
# Start Flask server in a separate thread
|
||||
Thread(target=run_flask, daemon=True).start()
|
||||
|
||||
while True:
|
||||
for subreddit in MONITORED_SUBREDDITS:
|
||||
check_reddit(subreddit[0], subreddit[1])
|
||||
time.sleep(CHECK_INTERVAL)
|
||||
|
||||
if __name__ == "__main__":
|
||||
boot()
|
6
requirements.txt
Normal file
6
requirements.txt
Normal file
@ -0,0 +1,6 @@
|
||||
praw
|
||||
pyfiglet
|
||||
python-dotenv
|
||||
requests
|
||||
flask
|
||||
tzdata
|
Loading…
Reference in New Issue
Block a user