Added better dashboarding.
This commit is contained in:
@@ -1,12 +1,14 @@
|
||||
# rstat_tool/dashboard.py
|
||||
|
||||
from flask import Flask, render_template
|
||||
# --- FIX #1: Import the new function we need ---
|
||||
from datetime import datetime, timedelta
|
||||
from .database import (
|
||||
get_overall_summary,
|
||||
get_subreddit_summary,
|
||||
get_all_scanned_subreddits,
|
||||
get_deep_dive_details
|
||||
get_deep_dive_details,
|
||||
get_image_view_summary,
|
||||
get_weekly_summary_for_subreddit
|
||||
)
|
||||
|
||||
app = Flask(__name__, template_folder='../templates')
|
||||
@@ -50,6 +52,35 @@ def deep_dive(symbol):
|
||||
posts = get_deep_dive_details(symbol)
|
||||
return render_template("deep_dive.html", posts=posts, symbol=symbol)
|
||||
|
||||
@app.route("/image/<name>")
|
||||
def image_view(name):
|
||||
"""The handler for the image-style dashboard."""
|
||||
tickers = get_image_view_summary(name)
|
||||
current_date = datetime.utcnow().strftime("%Y-%m-%d")
|
||||
return render_template(
|
||||
"image_view.html",
|
||||
tickers=tickers,
|
||||
subreddit_name=name,
|
||||
current_date=current_date
|
||||
)
|
||||
|
||||
@app.route("/image/weekly/<name>")
|
||||
def weekly_image_view(name):
|
||||
"""The handler for the WEEKLY image-style dashboard."""
|
||||
tickers = get_weekly_summary_for_subreddit(name)
|
||||
|
||||
# Create the date range string for the title
|
||||
end_date = datetime.utcnow()
|
||||
start_date = end_date - timedelta(days=7)
|
||||
date_range_str = f"{start_date.strftime('%b %d')} - {end_date.strftime('%b %d, %Y')}"
|
||||
|
||||
return render_template(
|
||||
"weekly_image_view.html",
|
||||
tickers=tickers,
|
||||
subreddit_name=name,
|
||||
date_range=date_range_str
|
||||
)
|
||||
|
||||
def start_dashboard():
|
||||
"""The main function called by the 'rstat-dashboard' command."""
|
||||
print("Starting Flask server...")
|
||||
|
@@ -3,6 +3,7 @@
|
||||
import sqlite3
|
||||
import time
|
||||
from .ticker_extractor import COMMON_WORDS_BLACKLIST
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
DB_FILE = "reddit_stocks.db"
|
||||
|
||||
@@ -25,6 +26,7 @@ def initialize_db():
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
symbol TEXT NOT NULL UNIQUE,
|
||||
market_cap INTEGER,
|
||||
closing_price REAL,
|
||||
last_updated INTEGER
|
||||
)
|
||||
""")
|
||||
@@ -44,11 +46,12 @@ def initialize_db():
|
||||
ticker_id INTEGER,
|
||||
subreddit_id INTEGER,
|
||||
post_id TEXT NOT NULL,
|
||||
mention_type TEXT NOT NULL, -- Can be 'post' or 'comment'
|
||||
mention_timestamp INTEGER NOT NULL,
|
||||
sentiment_score REAL,
|
||||
FOREIGN KEY (ticker_id) REFERENCES tickers (id),
|
||||
FOREIGN KEY (subreddit_id) REFERENCES subreddits (id),
|
||||
UNIQUE(ticker_id, post_id, sentiment_score)
|
||||
UNIQUE(ticker_id, post_id, mention_type, sentiment_score)
|
||||
)
|
||||
""")
|
||||
|
||||
@@ -103,13 +106,12 @@ def clean_stale_tickers():
|
||||
print(f"Cleanup complete. Removed {deleted_count} records.")
|
||||
|
||||
|
||||
def add_mention(conn, ticker_id, subreddit_id, post_id, timestamp, sentiment):
|
||||
"""Adds a new mention with its sentiment score to the database."""
|
||||
def add_mention(conn, ticker_id, subreddit_id, post_id, mention_type, timestamp, sentiment):
|
||||
cursor = conn.cursor()
|
||||
try:
|
||||
cursor.execute(
|
||||
"INSERT INTO mentions (ticker_id, subreddit_id, post_id, mention_timestamp, sentiment_score) VALUES (?, ?, ?, ?, ?)",
|
||||
(ticker_id, subreddit_id, post_id, timestamp, sentiment)
|
||||
"INSERT INTO mentions (ticker_id, subreddit_id, post_id, mention_type, mention_timestamp, sentiment_score) VALUES (?, ?, ?, ?, ?, ?)",
|
||||
(ticker_id, subreddit_id, post_id, mention_type, timestamp, sentiment)
|
||||
)
|
||||
conn.commit()
|
||||
except sqlite3.IntegrityError:
|
||||
@@ -127,11 +129,14 @@ def get_or_create_entity(conn, table_name, column_name, value):
|
||||
conn.commit()
|
||||
return cursor.lastrowid
|
||||
|
||||
def update_ticker_market_cap(conn, ticker_id, market_cap):
|
||||
"""Updates the market cap and timestamp for a specific ticker."""
|
||||
def update_ticker_financials(conn, ticker_id, market_cap, closing_price):
|
||||
"""Updates the financials and timestamp for a specific ticker."""
|
||||
cursor = conn.cursor()
|
||||
current_timestamp = int(time.time())
|
||||
cursor.execute("UPDATE tickers SET market_cap = ?, last_updated = ? WHERE id = ?", (market_cap, current_timestamp, ticker_id))
|
||||
cursor.execute(
|
||||
"UPDATE tickers SET market_cap = ?, closing_price = ?, last_updated = ? WHERE id = ?",
|
||||
(market_cap, closing_price, current_timestamp, ticker_id)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
def get_ticker_info(conn, ticker_id):
|
||||
@@ -169,27 +174,28 @@ def generate_summary_report(limit=20):
|
||||
conn.close()
|
||||
|
||||
def get_overall_summary(limit=50):
|
||||
"""Gets the top tickers across all subreddits for the dashboard."""
|
||||
conn = get_db_connection()
|
||||
query = """
|
||||
SELECT
|
||||
t.symbol, t.market_cap, COUNT(m.id) as mention_count,
|
||||
t.symbol, t.market_cap, t.closing_price, -- Added closing_price
|
||||
COUNT(m.id) as mention_count,
|
||||
SUM(CASE WHEN m.sentiment_score > 0.1 THEN 1 ELSE 0 END) as bullish_mentions,
|
||||
SUM(CASE WHEN m.sentiment_score < -0.1 THEN 1 ELSE 0 END) as bearish_mentions,
|
||||
SUM(CASE WHEN m.sentiment_score BETWEEN -0.1 AND 0.1 THEN 1 ELSE 0 END) as neutral_mentions
|
||||
FROM mentions m JOIN tickers t ON m.ticker_id = t.id
|
||||
GROUP BY t.symbol, t.market_cap ORDER BY mention_count DESC LIMIT ?;
|
||||
GROUP BY t.symbol, t.market_cap, t.closing_price -- Added closing_price
|
||||
ORDER BY mention_count DESC LIMIT ?;
|
||||
"""
|
||||
results = conn.execute(query, (limit,)).fetchall()
|
||||
conn.close()
|
||||
return results
|
||||
|
||||
def get_subreddit_summary(subreddit_name, limit=50):
|
||||
"""Gets the top tickers for a specific subreddit for the dashboard."""
|
||||
conn = get_db_connection()
|
||||
query = """
|
||||
SELECT
|
||||
t.symbol, t.market_cap, COUNT(m.id) as mention_count,
|
||||
t.symbol, t.market_cap, t.closing_price, -- Added closing_price
|
||||
COUNT(m.id) as mention_count,
|
||||
SUM(CASE WHEN m.sentiment_score > 0.1 THEN 1 ELSE 0 END) as bullish_mentions,
|
||||
SUM(CASE WHEN m.sentiment_score < -0.1 THEN 1 ELSE 0 END) as bearish_mentions,
|
||||
SUM(CASE WHEN m.sentiment_score BETWEEN -0.1 AND 0.1 THEN 1 ELSE 0 END) as neutral_mentions
|
||||
@@ -197,7 +203,8 @@ def get_subreddit_summary(subreddit_name, limit=50):
|
||||
JOIN tickers t ON m.ticker_id = t.id
|
||||
JOIN subreddits s ON m.subreddit_id = s.id
|
||||
WHERE s.name = ?
|
||||
GROUP BY t.symbol, t.market_cap ORDER BY mention_count DESC LIMIT ?;
|
||||
GROUP BY t.symbol, t.market_cap, t.closing_price -- Added closing_price
|
||||
ORDER BY mention_count DESC LIMIT ?;
|
||||
"""
|
||||
results = conn.execute(query, (subreddit_name, limit)).fetchall()
|
||||
conn.close()
|
||||
@@ -205,7 +212,6 @@ def get_subreddit_summary(subreddit_name, limit=50):
|
||||
|
||||
def get_all_scanned_subreddits():
|
||||
"""Gets a unique list of all subreddits we have data for."""
|
||||
# --- THIS IS THE CORRECTED LINE ---
|
||||
conn = get_db_connection()
|
||||
results = conn.execute("SELECT DISTINCT name FROM subreddits ORDER BY name ASC;").fetchall()
|
||||
conn.close()
|
||||
@@ -245,4 +251,60 @@ def get_deep_dive_details(ticker_symbol):
|
||||
"""
|
||||
results = conn.execute(query, (ticker_symbol,)).fetchall()
|
||||
conn.close()
|
||||
return results
|
||||
|
||||
def get_image_view_summary(subreddit_name):
|
||||
"""
|
||||
Gets a summary of tickers for the image view, including post, comment,
|
||||
and sentiment counts.
|
||||
"""
|
||||
conn = get_db_connection()
|
||||
# This query now also counts sentiment types
|
||||
query = """
|
||||
SELECT
|
||||
t.symbol,
|
||||
COUNT(CASE WHEN m.mention_type = 'post' THEN 1 END) as post_mentions,
|
||||
COUNT(CASE WHEN m.mention_type = 'comment' THEN 1 END) as comment_mentions,
|
||||
COUNT(CASE WHEN m.sentiment_score > 0.1 THEN 1 END) as bullish_mentions,
|
||||
COUNT(CASE WHEN m.sentiment_score < -0.1 THEN 1 END) as bearish_mentions
|
||||
FROM mentions m
|
||||
JOIN tickers t ON m.ticker_id = t.id
|
||||
JOIN subreddits s ON m.subreddit_id = s.id
|
||||
WHERE s.name = ?
|
||||
GROUP BY t.symbol
|
||||
ORDER BY (post_mentions + comment_mentions) DESC
|
||||
LIMIT 10;
|
||||
"""
|
||||
results = conn.execute(query, (subreddit_name,)).fetchall()
|
||||
conn.close()
|
||||
return results
|
||||
|
||||
def get_weekly_summary_for_subreddit(subreddit_name):
|
||||
"""
|
||||
Gets a weekly summary for a specific subreddit for the image view.
|
||||
"""
|
||||
conn = get_db_connection()
|
||||
|
||||
# Calculate the timestamp for 7 days ago
|
||||
seven_days_ago = datetime.utcnow() - timedelta(days=7)
|
||||
seven_days_ago_timestamp = int(seven_days_ago.timestamp())
|
||||
|
||||
# The query is the same as before, but with an added WHERE clause for the timestamp
|
||||
query = """
|
||||
SELECT
|
||||
t.symbol,
|
||||
COUNT(CASE WHEN m.mention_type = 'post' THEN 1 END) as post_mentions,
|
||||
COUNT(CASE WHEN m.mention_type = 'comment' THEN 1 END) as comment_mentions,
|
||||
COUNT(CASE WHEN m.sentiment_score > 0.1 THEN 1 END) as bullish_mentions,
|
||||
COUNT(CASE WHEN m.sentiment_score < -0.1 THEN 1 END) as bearish_mentions
|
||||
FROM mentions m
|
||||
JOIN tickers t ON m.ticker_id = t.id
|
||||
JOIN subreddits s ON m.subreddit_id = s.id
|
||||
WHERE s.name = ? AND m.mention_timestamp >= ?
|
||||
GROUP BY t.symbol
|
||||
ORDER BY (post_mentions + comment_mentions) DESC
|
||||
LIMIT 10;
|
||||
"""
|
||||
results = conn.execute(query, (subreddit_name, seven_days_ago_timestamp)).fetchall()
|
||||
conn.close()
|
||||
return results
|
@@ -1,46 +1,52 @@
|
||||
# The initial unsorted set of words.
|
||||
# Note: In Python, a 'set' is inherently unordered, but we define it here for clarity.
|
||||
COMMON_WORDS_BLACKLIST = {
|
||||
"401K", "403B", "457B", "ABOUT", "ABOVE", "ADAM", "AEDT", "AEST", "AH", "AI",
|
||||
"ALL", "ALPHA", "ALSO", "AM", "AMA", "AMEX", "AND", "ANY", "AR", "ARE",
|
||||
"AROUND", "ASAP", "ASS", "ASSET", "AT", "ATH", "ATL", "ATM", "AUD", "BABY",
|
||||
"BAG", "BAGS", "BE", "BEAR", "BELOW", "BETA", "BIG", "BIS", "BLEND", "BOE",
|
||||
"BOJ", "BOMB", "BOND", "BOTH", "BOTS", "BRB", "BRL", "BS", "BST", "BTC",
|
||||
"BTW", "BULL", "BUST", "BUT", "BUY", "BUZZ", "CAD", "CALL", "CAN", "CAP",
|
||||
"CEO", "CEST", "CET", "CFO", "CHF", "CHIPS", "CIA", "CLOSE", "CNY", "COME",
|
||||
"COST", "COULD", "CPAP", "CPI", "CST", "CTB", "CTO", "CYCLE", "CZK", "DAO",
|
||||
"DATE", "DAX", "DAY", "DCA", "DD", "DEBT", "DIA", "DIV", "DJIA", "DKK",
|
||||
"DM", "DO", "DOE", "DOGE", "DONT", "DR", "EACH", "EARLY", "EARN", "ECB",
|
||||
"EDGAR", "EDIT", "EDT", "END", "EOD", "EOW", "EOY", "EPS", "ER", "ESG",
|
||||
"EST", "ETF", "ETH", "EU", "EUR", "EV", "EVEN", "EVERY", "FAQ", "FAR",
|
||||
"FAST", "FBI", "FDA", "FIHTX", "FINRA", "FINT", "FINTX", "FINTY", "FOMC", "FOMO",
|
||||
"FOR", "FRAUD", "FRG", "FROM", "FSPSX", "FTSE", "FUCK", "FUD", "FULL", "FUND",
|
||||
"FXAIX", "FXIAX", "FY", "FYI", "FZROX", "GAIN", "GBP", "GDP", "GET", "GL",
|
||||
"GLHF", "GMT", "GO", "GOAL", "GOAT", "GOING", "GPT", "GPU", "GRAB", "GTG",
|
||||
"HALF", "HAS", "HATE", "HAVE", "HEAR", "HEDGE", "HIGH", "HINT", "HKD", "HODL",
|
||||
"HOLD", "HOUR", "HSA", "HUF", "IF", "II", "IMHO", "IMO", "IN", "INR",
|
||||
"IP", "IPO", "IRA", "IRS", "IS", "ISM", "IST", "IT", "ITM", "IV",
|
||||
"IVV", "IWM", "JPOW", "JPY", "JST", "JUST", "KARMA", "KEEP", "KNOW", "KO",
|
||||
"KRW", "LARGE", "LAST", "LATE", "LATER", "LBO", "LEAP", "LEAPS", "LETS", "LFG",
|
||||
"LIKE", "LIMIT", "LMAO", "LOL", "LONG", "LOOK", "LOSS", "LOVE", "LOW", "M&A",
|
||||
"MA", "MAKE", "MAX", "MC", "ME", "MID", "MIGHT", "MIN", "ML", "MOASS",
|
||||
"MONTH", "MORE", "MSK", "MUST", "MXN", "MY", "NATO", "NEAR", "NEED", "NEVER",
|
||||
"NEW", "NEXT", "NFA", "NFT", "NGMI", "NIGHT", "NO", "NOK", "NONE", "NOT",
|
||||
"NOW", "NSA", "NULL", "NYSE", "NZD", "OEM", "OF", "OK", "OLD", "ON",
|
||||
"OP", "OR", "OS", "OTC", "OTM", "OUGHT", "OUT", "OVER", "OWN", "PC",
|
||||
"PDT", "PE", "PEAK", "PEG", "PEW", "PLAN", "PLN", "PM", "PMI", "POS",
|
||||
"PPI", "PR", "PRICE", "PROFIT", "PSA", "PST", "PT", "PUT", "Q1", "Q2",
|
||||
"Q3", "Q4", "QQQ", "RBA", "RBNZ", "RE", "REAL", "REIT", "REKT", "RH",
|
||||
"RIP", "RISK", "ROE", "ROFL", "ROI", "ROTH", "RSD", "RUB", "RULE", "SAVE",
|
||||
"401K", "403B", "457B", "ABOUT", "ABOVE", "ADAM", "ADX", "AEDT", "AEST", "AH",
|
||||
"AI", "ALL", "ALPHA", "ALSO", "AM", "AMA", "AMEX", "AND", "ANY", "AR",
|
||||
"ARE", "ARK", "AROUND", "ASAP", "ASS", "ASSET", "AT", "ATH", "ATL", "ATM",
|
||||
"AUD", "AWS", "BABY", "BAG", "BAGS", "BE", "BEAR", "BELOW", "BETA", "BIG",
|
||||
"BIS", "BLEND", "BOE", "BOJ", "BOLL", "BOMB", "BOND", "BOTH", "BOTS", "BRB",
|
||||
"BRL", "BS", "BST", "BSU", "BTC", "BTW", "BULL", "BUST", "BUT", "BUY",
|
||||
"BUZZ", "CAD", "CALL", "CAN", "CAP", "CBS", "CCI", "CEO", "CEST", "CET",
|
||||
"CEX", "CFD", "CFO", "CHF", "CHIPS", "CIA", "CLOSE", "CNBC", "CNY", "COKE",
|
||||
"COME", "COST", "COULD", "CPAP", "CPI", "CSE", "CST", "CTB", "CTO", "CYCLE",
|
||||
"CZK", "DAO", "DATE", "DAX", "DAY", "DCA", "DD", "DEBT", "DEX", "DIA",
|
||||
"DIV", "DJIA", "DKK", "DM", "DO", "DOE", "DOGE", "DOJ", "DONT", "DR",
|
||||
"EACH", "EARLY", "EARN", "ECB", "EDGAR", "EDIT", "EDT", "EMA", "END", "EOD",
|
||||
"EOW", "EOY", "EPA", "EPS", "ER", "ESG", "EST", "ETF", "ETFS", "ETH",
|
||||
"EU", "EUR", "EV", "EVEN", "EVERY", "FAQ", "FAR", "FAST", "FBI", "FD",
|
||||
"FDA", "FIHTX", "FINRA", "FINT", "FINTX", "FINTY", "FIRST", "FOMC", "FOMO", "FOR",
|
||||
"FOREX", "FRAUD", "FRG", "FROM", "FSPSX", "FTSE", "FUCK", "FUD", "FULL", "FUND",
|
||||
"FXAIX", "FXIAX", "FY", "FYI", "FZROX", "GAAP", "GAIN", "GBP", "GDP", "GET",
|
||||
"GL", "GLHF", "GMT", "GO", "GOAL", "GOAT", "GOING", "GPT", "GPU", "GRAB",
|
||||
"GTG", "HALF", "HAS", "HATE", "HAVE", "HEAR", "HEDGE", "HELP", "HIGH", "HINT",
|
||||
"HKD", "HODL", "HOLD", "HOUR", "HSA", "HUF", "IF", "II", "IKZ", "IMHO",
|
||||
"IMO", "IN", "INR", "IP", "IPO", "IRA", "IRS", "IS", "ISA", "ISM",
|
||||
"IST", "IT", "ITM", "IV", "IVV", "IWM", "JD", "JPOW", "JPY", "JST",
|
||||
"JUST", "KARMA", "KEEP", "KNOW", "KO", "KRW", "LANGT", "LARGE", "LAST", "LATE",
|
||||
"LATER", "LBO", "LEAP", "LEAPS", "LETS", "LFG", "LIKE", "LIMIT", "LLC", "LLM",
|
||||
"LMAO", "LOKO", "LOL", "LONG", "LOOK", "LOSS", "LOVE", "LOW", "M&A", "MA",
|
||||
"MACD", "MAKE", "MAX", "MC", "ME", "MEME", "MERK", "MEXC", "MID", "MIGHT",
|
||||
"MIN", "MIND", "ML", "MOASS", "MONTH", "MORE", "MSK", "MUSIC", "MUST", "MXN",
|
||||
"MY", "NATO", "NEAR", "NEED", "NEVER", "NEW", "NEXT", "NFA", "NFC", "NFT",
|
||||
"NGMI", "NIGHT", "NO", "NOK", "NONE", "NOT", "NOW", "NSA", "NULL", "NUT",
|
||||
"NYSE", "NZD", "OBV", "OEM", "OF", "OG", "OK", "OLD", "ON", "ONE",
|
||||
"ONLY", "OP", "OPEX", "OR", "OS", "OSCE", "OTC", "OTM", "OUGHT", "OUT",
|
||||
"OVER", "OWN", "PANIC", "PC", "PDT", "PE", "PEAK", "PEG", "PEW", "PLAN",
|
||||
"PLN", "PM", "PMI", "POC", "POS", "PPI", "PR", "PRICE", "PROFIT", "PSA",
|
||||
"PST", "PT", "PUT", "Q1", "Q2", "Q3", "Q4", "QQQ", "QR", "RBA",
|
||||
"RBNZ", "RE", "REAL", "REIT", "REKT", "RH", "RIGHT", "RIP", "RISK", "ROCK",
|
||||
"ROE", "ROFL", "ROI", "ROTH", "RSD", "RSI", "RUB", "RULE", "SAME", "SAVE",
|
||||
"SCALP", "SCAM", "SCHB", "SEC", "SEE", "SEK", "SELL", "SEP", "SGD", "SHALL",
|
||||
"SHARE", "SHORT", "SL", "SMALL", "SO", "SOLIS", "SOME", "SOON", "SP", "SPAC",
|
||||
"SPEND", "SPLG", "SPX", "SPY", "START", "STILL", "STOCK", "STOP", "SWING", "TAKE",
|
||||
"TERM", "THAT", "THE", "THINK", "THIS", "TIME", "TITS", "TL", "TL;DR", "TLDR",
|
||||
"TO", "TODAY", "TOTAL", "TRADE", "TREND", "TRUE", "TRY", "TTYL", "TWO", "UI",
|
||||
"UK", "UNDER", "UP", "US", "USA", "USD", "UTC", "VALUE", "VOO", "VR",
|
||||
"VTI", "WAGMI", "WANT", "WATCH", "WAY", "WE", "WEB3", "WEEK", "WHO", "WHY",
|
||||
"WILL", "WORTH", "WOULD", "WSB", "WTF", "YES", "YET", "YIELD", "YOLO", "YOU",
|
||||
"YOUR", "YTD", "ZAR"
|
||||
"SHARE", "SHORT", "SL", "SMA", "SMALL", "SO", "SOLIS", "SOME", "SOON", "SP",
|
||||
"SPAC", "SPEND", "SPLG", "SPX", "SPY", "START", "STILL", "STOCK", "STOP", "STOR",
|
||||
"SWING", "TA", "TAG", "TAKE", "TERM", "THANK", "THAT", "THE", "THINK", "THIS",
|
||||
"TIME", "TITS", "TL", "TL;DR", "TLDR", "TO", "TODAY", "TOTAL", "TRADE", "TREND",
|
||||
"TRUE", "TRY", "TTYL", "TWO", "UI", "UK", "UNDER", "UP", "US", "USA",
|
||||
"USD", "UTC", "VALUE", "VOO", "VP", "VR", "VTI", "WAGMI", "WANT", "WATCH",
|
||||
"WAY", "WE", "WEB3", "WEEK", "WHALE", "WHO", "WHY", "WIDE", "WILL", "WORDS",
|
||||
"WORTH", "WOULD", "WSB", "WTF", "XRP", "YES", "YET", "YIELD", "YOLO", "YOU",
|
||||
"YOUR", "YOY", "YT", "YTD", "ZAR", "ZEN", "ZERO"
|
||||
}
|
||||
|
||||
def format_and_print_list(word_set, words_per_line=10):
|
||||
|
@@ -15,21 +15,23 @@ from .sentiment_analyzer import get_sentiment_score
|
||||
|
||||
load_dotenv()
|
||||
MARKET_CAP_REFRESH_INTERVAL = 86400
|
||||
POST_AGE_LIMIT = 86400
|
||||
|
||||
def load_subreddits(filepath):
|
||||
try:
|
||||
with open(filepath, 'r') as f:
|
||||
return json.load(f).get("subreddits", [])
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
print(f"Error loading config: {e}")
|
||||
print(f"Error loading config file '{filepath}': {e}")
|
||||
return None
|
||||
|
||||
def get_market_cap(ticker_symbol):
|
||||
def get_financial_data(ticker_symbol):
|
||||
try:
|
||||
ticker = yf.Ticker(ticker_symbol)
|
||||
return ticker.fast_info.get('marketCap')
|
||||
data = { "market_cap": ticker.fast_info.get('marketCap'), "closing_price": ticker.fast_info.get('previousClose') }
|
||||
return data
|
||||
except Exception:
|
||||
return None
|
||||
return {"market_cap": None, "closing_price": None}
|
||||
|
||||
def get_reddit_instance():
|
||||
client_id = os.getenv("REDDIT_CLIENT_ID")
|
||||
@@ -38,59 +40,55 @@ def get_reddit_instance():
|
||||
if not all([client_id, client_secret, user_agent]):
|
||||
print("Error: Reddit API credentials not found in .env file.")
|
||||
return None
|
||||
|
||||
# --- THIS IS THE CORRECTED LINE ---
|
||||
# The argument is 'client_secret', not 'secret_client'.
|
||||
return praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent=user_agent)
|
||||
|
||||
def scan_subreddits(reddit, subreddits_list, post_limit=25, comment_limit=100):
|
||||
"""Scans subreddits, analyzes posts and comments, and stores results in the database."""
|
||||
def scan_subreddits(reddit, subreddits_list, post_limit=100, comment_limit=100, days_to_scan=1):
|
||||
conn = database.get_db_connection()
|
||||
post_age_limit = days_to_scan * 86400
|
||||
current_time = time.time()
|
||||
|
||||
print(f"\nScanning {len(subreddits_list)} subreddits (Top {post_limit} posts, {comment_limit} comments/post)...")
|
||||
print(f"\nScanning {len(subreddits_list)} subreddit(s) for NEW posts in the last {days_to_scan} day(s)...")
|
||||
for subreddit_name in subreddits_list:
|
||||
try:
|
||||
subreddit_id = database.get_or_create_entity(conn, 'subreddits', 'name', subreddit_name)
|
||||
subreddit = reddit.subreddit(subreddit_name)
|
||||
print(f"Scanning r/{subreddit_name}...")
|
||||
|
||||
for submission in subreddit.hot(limit=post_limit):
|
||||
|
||||
# --- LOGIC PART 1: PROCESS INDIVIDUAL MENTIONS ---
|
||||
# 1a. Process the Post Title and Body for mentions
|
||||
for submission in subreddit.new(limit=post_limit):
|
||||
if (current_time - submission.created_utc) > post_age_limit:
|
||||
print(f" -> Reached posts older than the {days_to_scan}-day limit. Moving to next subreddit.")
|
||||
break
|
||||
|
||||
post_text = submission.title + " " + submission.selftext
|
||||
tickers_in_post = extract_tickers(post_text)
|
||||
post_sentiment = get_sentiment_score(submission.title)
|
||||
|
||||
for ticker_symbol in set(tickers_in_post):
|
||||
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
|
||||
database.add_mention(conn, ticker_id, subreddit_id, submission.id, int(submission.created_utc), post_sentiment)
|
||||
|
||||
ticker_info = database.get_ticker_info(conn, ticker_id)
|
||||
current_time = int(time.time())
|
||||
if not ticker_info['last_updated'] or (current_time - ticker_info['last_updated'] > MARKET_CAP_REFRESH_INTERVAL):
|
||||
print(f" -> Fetching market cap for {ticker_symbol}...")
|
||||
market_cap = get_market_cap(ticker_symbol)
|
||||
database.update_ticker_market_cap(conn, ticker_id, market_cap or ticker_info['market_cap'])
|
||||
|
||||
# 1b. Process Comments for mentions
|
||||
submission.comments.replace_more(limit=0)
|
||||
for comment in submission.comments.list()[:comment_limit]:
|
||||
tickers_in_comment = extract_tickers(comment.body)
|
||||
if not tickers_in_comment:
|
||||
continue
|
||||
comment_sentiment = get_sentiment_score(comment.body)
|
||||
for ticker_symbol in set(tickers_in_comment):
|
||||
if tickers_in_post:
|
||||
post_sentiment = get_sentiment_score(submission.title)
|
||||
for ticker_symbol in set(tickers_in_post):
|
||||
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
|
||||
database.add_mention(conn, ticker_id, subreddit_id, submission.id, int(comment.created_utc), comment_sentiment)
|
||||
|
||||
# --- LOGIC PART 2: DEEP DIVE ANALYSIS ---
|
||||
database.add_mention(conn, ticker_id, subreddit_id, submission.id, 'post', int(submission.created_utc), post_sentiment)
|
||||
|
||||
ticker_info = database.get_ticker_info(conn, ticker_id)
|
||||
if not ticker_info['last_updated'] or (current_time - ticker_info['last_updated'] > MARKET_CAP_REFRESH_INTERVAL):
|
||||
print(f" -> Fetching financial data for {ticker_symbol}...")
|
||||
financials = get_financial_data(ticker_symbol)
|
||||
database.update_ticker_financials(
|
||||
conn, ticker_id,
|
||||
financials['market_cap'] or ticker_info['market_cap'],
|
||||
financials['closing_price'] or ticker_info['closing_price']
|
||||
)
|
||||
|
||||
submission.comments.replace_more(limit=0)
|
||||
all_comment_sentiments = []
|
||||
for comment in submission.comments.list()[:comment_limit]:
|
||||
all_comment_sentiments.append(get_sentiment_score(comment.body))
|
||||
|
||||
tickers_in_comment = extract_tickers(comment.body)
|
||||
if tickers_in_comment:
|
||||
comment_sentiment = get_sentiment_score(comment.body)
|
||||
for ticker_symbol in set(tickers_in_comment):
|
||||
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
|
||||
database.add_mention(conn, ticker_id, subreddit_id, submission.id, 'comment', int(comment.created_utc), comment_sentiment)
|
||||
|
||||
avg_sentiment = sum(all_comment_sentiments) / len(all_comment_sentiments) if all_comment_sentiments else 0
|
||||
|
||||
post_analysis_data = {
|
||||
"post_id": submission.id, "title": submission.title,
|
||||
"post_url": f"https://reddit.com{submission.permalink}",
|
||||
@@ -98,7 +96,7 @@ def scan_subreddits(reddit, subreddits_list, post_limit=25, comment_limit=100):
|
||||
"comment_count": len(all_comment_sentiments), "avg_comment_sentiment": avg_sentiment
|
||||
}
|
||||
database.add_or_update_post_analysis(conn, post_analysis_data)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print(f"Could not scan r/{subreddit_name}. Error: {e}")
|
||||
|
||||
@@ -106,23 +104,47 @@ def scan_subreddits(reddit, subreddits_list, post_limit=25, comment_limit=100):
|
||||
print("\n--- Scan Complete ---")
|
||||
|
||||
def main():
|
||||
"""Main function to run the Reddit stock analysis tool."""
|
||||
parser = argparse.ArgumentParser(description="Analyze stock ticker mentions on Reddit.", formatter_class=argparse.RawTextHelpFormatter)
|
||||
parser.add_argument("config_file", help="Path to the JSON file containing subreddits.")
|
||||
parser.add_argument("-p", "--posts", type=int, default=25, help="Number of posts to scan per subreddit.\n(Default: 25)")
|
||||
|
||||
parser.add_argument("--config", default="subreddits.json", help="Path to the JSON file containing subreddits.\n(Default: subreddits.json)")
|
||||
parser.add_argument("--subreddit", help="Scan a single subreddit, ignoring the config file.")
|
||||
parser.add_argument("--days", type=int, default=1, help="Number of past days to scan for new posts.\n(Default: 1 for last 24 hours)")
|
||||
|
||||
parser.add_argument("-p", "--posts", type=int, default=200, help="Max posts to check per subreddit.\n(Default: 200)")
|
||||
parser.add_argument("-c", "--comments", type=int, default=100, help="Number of comments to scan per post.\n(Default: 100)")
|
||||
parser.add_argument("-l", "--limit", type=int, default=20, help="Number of tickers to show in the final report.\n(Default: 20)")
|
||||
parser.add_argument("-l", "--limit", type=int, default=20, help="Number of tickers to show in the CLI report.\n(Default: 20)")
|
||||
args = parser.parse_args()
|
||||
|
||||
# --- THIS IS THE CORRECTED LOGIC BLOCK ---
|
||||
if args.subreddit:
|
||||
# If --subreddit is used, create a list with just that one.
|
||||
subreddits_to_scan = [args.subreddit]
|
||||
print(f"Targeted Scan Mode: Focusing on r/{args.subreddit}")
|
||||
else:
|
||||
# Otherwise, load from the config file.
|
||||
print(f"Config Scan Mode: Loading subreddits from {args.config}")
|
||||
# Use the correct argument name: args.config
|
||||
subreddits_to_scan = load_subreddits(args.config)
|
||||
|
||||
if not subreddits_to_scan:
|
||||
print("Error: No subreddits to scan. Please check your config file or --subreddit argument.")
|
||||
return
|
||||
|
||||
# --- Initialize and Run ---
|
||||
database.initialize_db()
|
||||
database.clean_stale_tickers()
|
||||
|
||||
subreddits = load_subreddits(args.config_file)
|
||||
if not subreddits: return
|
||||
|
||||
|
||||
reddit = get_reddit_instance()
|
||||
if not reddit: return
|
||||
|
||||
scan_subreddits(reddit, subreddits, post_limit=args.posts, comment_limit=args.comments)
|
||||
scan_subreddits(
|
||||
reddit,
|
||||
subreddits_to_scan,
|
||||
post_limit=args.posts,
|
||||
comment_limit=args.comments,
|
||||
days_to_scan=args.days
|
||||
)
|
||||
database.generate_summary_report(limit=args.limit)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@@ -5,48 +5,54 @@ import re
|
||||
# A set of common English words and acronyms that look like stock tickers.
|
||||
# This helps reduce false positives.
|
||||
COMMON_WORDS_BLACKLIST = {
|
||||
"401K", "403B", "457B", "ABOUT", "ABOVE", "ADAM", "AEDT", "AEST", "AH", "AI",
|
||||
"ALL", "ALPHA", "ALSO", "AM", "AMA", "AMEX", "AND", "ANY", "AR", "ARE",
|
||||
"AROUND", "ASAP", "ASS", "ASSET", "AT", "ATH", "ATL", "ATM", "AUD", "BABY",
|
||||
"BAG", "BAGS", "BE", "BEAR", "BELOW", "BETA", "BIG", "BIS", "BLEND", "BOE",
|
||||
"BOJ", "BOMB", "BOND", "BOTH", "BOTS", "BRB", "BRL", "BS", "BST", "BTC",
|
||||
"BTW", "BULL", "BUST", "BUT", "BUY", "BUZZ", "CAD", "CALL", "CAN", "CEO",
|
||||
"CEST", "CET", "CFO", "CHF", "CHIPS", "CIA", "CLOSE", "CNY", "COME", "COST",
|
||||
"COULD", "CPI", "CST", "CTB", "CTO", "CYCLE", "CZK", "DAO", "DATE", "DAX",
|
||||
"DAY", "DCA", "DD", "DEBT", "DIA", "DIV", "DJIA", "DKK", "DM", "DO",
|
||||
"DOE", "DOGE", "DONT", "DR", "EACH", "EARLY", "EARN", "ECB", "EDGAR", "EDIT",
|
||||
"EDT", "END", "EOD", "EOW", "EOY", "EPS", "ER", "ESG", "EST", "ETF",
|
||||
"ETH", "EU", "EUR", "EV", "EVEN", "EVERY", "FAQ", "FAR", "FAST", "FBI",
|
||||
"FDA", "FIHTX", "FINRA", "FINT", "FINTX", "FINTY", "FOMC", "FOMO", "FOR", "FRAUD",
|
||||
"FRG", "FROM", "FSPSX", "FTSE", "FUCK", "FUD", "FULL", "FUND", "FXAIX", "FXIAX",
|
||||
"FY", "FYI", "FZROX", "GAIN", "GBP", "GDP", "GET", "GL", "GLHF", "GMT",
|
||||
"GO", "GOAL", "GOAT", "GOING", "GPU", "GRAB", "GTG", "HALF", "HAS", "HATE",
|
||||
"HAVE", "HEAR", "HEDGE", "HIGH", "HINT", "HKD", "HODL", "HOLD", "HOUR", "HSA",
|
||||
"HUF", "IF", "II", "IMHO", "IMO", "IN", "INR", "IP", "IPO", "IRA",
|
||||
"IRS", "IS", "ISM", "IST", "IT", "ITM", "IV", "IVV", "IWM", "JPOW",
|
||||
"JPY", "JST", "JUST", "KARMA", "KEEP", "KNOW", "KO", "KRW", "LARGE", "LAST",
|
||||
"LATE", "LATER", "LBO", "LEAP", "LEAPS", "LETS", "LFG", "LIKE", "LIMIT", "LMAO",
|
||||
"LOL", "LONG", "LOOK", "LOSS", "LOVE", "LOW", "M&A", "MA", "MAKE", "MAX",
|
||||
"MC", "ME", "MID", "MIGHT", "MIN", "ML", "MOASS", "MONTH", "MORE", "MSK",
|
||||
"MUST", "MXN", "MY", "NATO", "NEAR", "NEED", "NEVER", "NEW", "NEXT", "NFA",
|
||||
"NFT", "NGMI", "NIGHT", "NO", "NOK", "NONE", "NOT", "NOW", "NSA", "NULL",
|
||||
"NYSE", "NZD", "OEM", "OF", "OK", "OLD", "ON", "OP", "OR", "OS",
|
||||
"OTC", "OTM", "OUGHT", "OUT", "OVER", "OWN", "PC", "PDT", "PE", "PEAK",
|
||||
"PEG", "PEW", "PLAN", "PLN", "PM", "PMI", "POS", "PPI", "PR", "PRICE",
|
||||
"PROFIT", "PSA", "PST", "PUT", "Q1", "Q2", "Q3", "Q4", "QQQ", "RBA",
|
||||
"RBNZ", "RE", "REAL", "REIT", "REKT", "RH", "RIP", "RISK", "ROE", "ROFL",
|
||||
"ROI", "ROTH", "RSD", "RUB", "RULE", "SAVE", "SCALP", "SCAM", "SCHB", "SEC",
|
||||
"SEE", "SEK", "SELL", "SEP", "SGD", "SHALL", "SHARE", "SHORT", "SL", "SO",
|
||||
"SOLIS", "SOME", "SOON", "SP", "SPAC", "SPEND", "SPLG", "SPX", "SPY", "START",
|
||||
"STILL", "STOCK", "STOP", "SWING", "TAKE", "TERM", "THAT", "THE", "THINK", "THIS",
|
||||
"401K", "403B", "457B", "ABOUT", "ABOVE", "ADAM", "ADX", "AEDT", "AEST", "AH",
|
||||
"AI", "ALL", "ALPHA", "ALSO", "AM", "AMA", "AMEX", "AND", "ANY", "AR",
|
||||
"ARE", "ARK", "AROUND", "ASAP", "ASS", "ASSET", "AT", "ATH", "ATL", "ATM",
|
||||
"AUD", "AWS", "BABY", "BAG", "BAGS", "BE", "BEAR", "BELOW", "BETA", "BIG",
|
||||
"BIS", "BLEND", "BOE", "BOJ", "BOLL", "BOMB", "BOND", "BOTH", "BOTS", "BRB",
|
||||
"BRL", "BS", "BST", "BSU", "BTC", "BTW", "BULL", "BUST", "BUT", "BUY",
|
||||
"BUZZ", "CAD", "CALL", "CAN", "CAP", "CBS", "CCI", "CEO", "CEST", "CET",
|
||||
"CEX", "CFD", "CFO", "CHF", "CHIPS", "CIA", "CLOSE", "CNBC", "CNY", "COKE",
|
||||
"COME", "COST", "COULD", "CPAP", "CPI", "CSE", "CST", "CTB", "CTO", "CYCLE",
|
||||
"CZK", "DAO", "DATE", "DAX", "DAY", "DCA", "DD", "DEBT", "DEX", "DIA",
|
||||
"DIV", "DJIA", "DKK", "DM", "DO", "DOE", "DOGE", "DOJ", "DONT", "DR",
|
||||
"EACH", "EARLY", "EARN", "ECB", "EDGAR", "EDIT", "EDT", "EMA", "END", "EOD",
|
||||
"EOW", "EOY", "EPA", "EPS", "ER", "ESG", "EST", "ETF", "ETFS", "ETH",
|
||||
"EU", "EUR", "EV", "EVEN", "EVERY", "FAQ", "FAR", "FAST", "FBI", "FD",
|
||||
"FDA", "FIHTX", "FINRA", "FINT", "FINTX", "FINTY", "FIRST", "FOMC", "FOMO", "FOR",
|
||||
"FOREX", "FRAUD", "FRG", "FROM", "FSPSX", "FTSE", "FUCK", "FUD", "FULL", "FUND",
|
||||
"FXAIX", "FXIAX", "FY", "FYI", "FZROX", "GAAP", "GAIN", "GBP", "GDP", "GET",
|
||||
"GL", "GLHF", "GMT", "GO", "GOAL", "GOAT", "GOING", "GPT", "GPU", "GRAB",
|
||||
"GTG", "HALF", "HAS", "HATE", "HAVE", "HEAR", "HEDGE", "HELP", "HIGH", "HINT",
|
||||
"HKD", "HODL", "HOLD", "HOUR", "HSA", "HUF", "IF", "II", "IKZ", "IMHO",
|
||||
"IMO", "IN", "INR", "IP", "IPO", "IRA", "IRS", "IS", "ISA", "ISM",
|
||||
"IST", "IT", "ITM", "IV", "IVV", "IWM", "JD", "JPOW", "JPY", "JST",
|
||||
"JUST", "KARMA", "KEEP", "KNOW", "KO", "KRW", "LANGT", "LARGE", "LAST", "LATE",
|
||||
"LATER", "LBO", "LEAP", "LEAPS", "LETS", "LFG", "LIKE", "LIMIT", "LLC", "LLM",
|
||||
"LMAO", "LOKO", "LOL", "LONG", "LOOK", "LOSS", "LOVE", "LOW", "M&A", "MA",
|
||||
"MACD", "MAKE", "MAX", "MC", "ME", "MEME", "MERK", "MEXC", "MID", "MIGHT",
|
||||
"MIN", "MIND", "ML", "MOASS", "MONTH", "MORE", "MSK", "MUSIC", "MUST", "MXN",
|
||||
"MY", "NATO", "NEAR", "NEED", "NEVER", "NEW", "NEXT", "NFA", "NFC", "NFT",
|
||||
"NGMI", "NIGHT", "NO", "NOK", "NONE", "NOT", "NOW", "NSA", "NULL", "NUT",
|
||||
"NYSE", "NZD", "OBV", "OEM", "OF", "OG", "OK", "OLD", "ON", "ONE",
|
||||
"ONLY", "OP", "OPEX", "OR", "OS", "OSCE", "OTC", "OTM", "OUGHT", "OUT",
|
||||
"OVER", "OWN", "PANIC", "PC", "PDT", "PE", "PEAK", "PEG", "PEW", "PLAN",
|
||||
"PLN", "PM", "PMI", "POC", "POS", "PPI", "PR", "PRICE", "PROFIT", "PSA",
|
||||
"PST", "PT", "PUT", "Q1", "Q2", "Q3", "Q4", "QQQ", "QR", "RBA",
|
||||
"RBNZ", "RE", "REAL", "REIT", "REKT", "RH", "RIGHT", "RIP", "RISK", "ROCK",
|
||||
"ROE", "ROFL", "ROI", "ROTH", "RSD", "RSI", "RUB", "RULE", "SAME", "SAVE",
|
||||
"SCALP", "SCAM", "SCHB", "SEC", "SEE", "SEK", "SELL", "SEP", "SGD", "SHALL",
|
||||
"SHARE", "SHORT", "SL", "SMA", "SMALL", "SO", "SOLIS", "SOME", "SOON", "SP",
|
||||
"SPAC", "SPEND", "SPLG", "SPX", "SPY", "START", "STILL", "STOCK", "STOP", "STOR",
|
||||
"SWING", "TA", "TAG", "TAKE", "TERM", "THANK", "THAT", "THE", "THINK", "THIS",
|
||||
"TIME", "TITS", "TL", "TL;DR", "TLDR", "TO", "TODAY", "TOTAL", "TRADE", "TREND",
|
||||
"TRUE", "TRY", "TTYL", "TWO", "UI", "UK", "UNDER", "UP", "US", "USA",
|
||||
"USD", "UTC", "VALUE", "VOO", "VR", "VTI", "WAGMI", "WANT", "WATCH", "WAY",
|
||||
"WE", "WEB3", "WEEK", "WHO", "WHY", "WILL", "WORTH", "WOULD", "WSB", "WTF",
|
||||
"YES", "YET", "YIELD", "YOLO", "YOU", "YOUR", "YTD", "ZAR"
|
||||
"USD", "UTC", "VALUE", "VOO", "VP", "VR", "VTI", "WAGMI", "WANT", "WATCH",
|
||||
"WAY", "WE", "WEB3", "WEEK", "WHALE", "WHO", "WHY", "WIDE", "WILL", "WORDS",
|
||||
"WORTH", "WOULD", "WSB", "WTF", "XRP", "YES", "YET", "YIELD", "YOLO", "YOU",
|
||||
"YOUR", "YOY", "YT", "YTD", "ZAR", "ZEN", "ZERO"
|
||||
}
|
||||
|
||||
|
||||
def extract_tickers(text):
|
||||
"""
|
||||
Extracts potential stock tickers from a given piece of text.
|
||||
|
@@ -4,6 +4,15 @@
|
||||
"Shortsqueeze",
|
||||
"smallstreetbets",
|
||||
"wallstreetbets",
|
||||
"Wallstreetbetsnew"
|
||||
"Wallstreetbetsnew",
|
||||
"wallstreetbets2",
|
||||
"stocks",
|
||||
"RobinHoodPennyStocks",
|
||||
"StocksAndTrading",
|
||||
"investing",
|
||||
"WallStreetBetsELITE",
|
||||
"ValueInvesting",
|
||||
"Daytrading",
|
||||
"Tollbugatabets"
|
||||
]
|
||||
}
|
||||
|
116
templates/image_view.html
Normal file
116
templates/image_view.html
Normal file
@@ -0,0 +1,116 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>r/{{ subreddit_name }} Mentions</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700;800&display=swap" rel="stylesheet">
|
||||
<style>
|
||||
body {
|
||||
margin: 0;
|
||||
padding: 2rem;
|
||||
font-family: 'Inter', sans-serif;
|
||||
background: #1a1a1a;
|
||||
display: flex;
|
||||
justify-content: center;
|
||||
align-items: center;
|
||||
min-height: 100vh;
|
||||
}
|
||||
.image-container {
|
||||
width: 650px; /* Increased width to accommodate new column */
|
||||
background: linear-gradient(145deg, #4d302d, #1f2128);
|
||||
color: #ffffff;
|
||||
border-radius: 16px;
|
||||
padding: 2.5rem;
|
||||
box-shadow: 0 10px B30px rgba(0,0,0,0.5);
|
||||
text-align: center;
|
||||
}
|
||||
header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: flex-start;
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
.title-block { text-align: left; }
|
||||
.title-block h1 { font-size: 2.5rem; font-weight: 800; margin: 0; line-height: 1; }
|
||||
.title-block h2 { font-size: 1.25rem; font-weight: 600; margin: 0.5rem 0 0; color: #b0b0b0; }
|
||||
.date { font-size: 1.1rem; font-weight: 600; color: #c0c0c0; letter-spacing: 0.02em; }
|
||||
table { width: 100%; border-collapse: collapse; text-align: left; }
|
||||
th, td { padding: 1rem 0.5rem; border-bottom: 1px solid rgba(255, 255, 255, 0.1); }
|
||||
th { font-weight: 700; text-transform: uppercase; font-size: 0.8rem; color: #a0a0a0; }
|
||||
td { font-size: 1.1rem; font-weight: 600; }
|
||||
tr:last-child td { border-bottom: none; }
|
||||
td.rank { font-weight: 700; color: #d0d0d0; width: 8%; }
|
||||
td.ticker { width: 30%; }
|
||||
td.mentions { text-align: center; width: 18%; }
|
||||
td.sentiment { text-align: center; width: 26%; } /* New width */
|
||||
|
||||
/* Sentiment Colors */
|
||||
.sentiment-bullish { color: #28a745; font-weight: 700; }
|
||||
.sentiment-bearish { color: #dc3545; font-weight: 700; }
|
||||
.sentiment-neutral { color: #9e9e9e; font-weight: 600; }
|
||||
|
||||
/* Row colors */
|
||||
tr:nth-child(1) td.ticker { color: #d8b4fe; } tr:nth-child(6) td.ticker { color: #fca5a5; }
|
||||
tr:nth-child(2) td.ticker { color: #a3e635; } tr:nth-child(7) td.ticker { color: #fdba74; }
|
||||
tr:nth-child(3) td.ticker { color: #67e8f9; } tr:nth-child(8) td.ticker { color: #6ee7b7; }
|
||||
tr:nth-child(4) td.ticker { color: #fde047; } tr:nth-child(9) td.ticker { color: #93c5fd; }
|
||||
tr:nth-child(5) td.ticker { color: #fcd34d; } tr:nth-child(10) td.ticker { color: #d1d5db; }
|
||||
|
||||
footer { margin-top: 2.5rem; }
|
||||
.brand-name { font-size: 1.75rem; font-weight: 800; letter-spacing: -1px; }
|
||||
.brand-subtitle { font-size: 1rem; color: #b0b0b0; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="image-container">
|
||||
<header>
|
||||
<div class="title-block">
|
||||
<h1>Reddit Mentions</h1>
|
||||
<h2>r/{{ subreddit_name }}</h2>
|
||||
</div>
|
||||
<div class="date">{{ current_date }}</div>
|
||||
</header>
|
||||
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="rank">Rank</th>
|
||||
<th class="ticker">Ticker</th>
|
||||
<th class="mentions">Posts</th>
|
||||
<th class="mentions">Comments</th>
|
||||
<!-- UPDATED: Added Sentiment column header -->
|
||||
<th class="sentiment">Sentiment</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for ticker in tickers %}
|
||||
<tr>
|
||||
<td class="rank">{{ loop.index }}</td>
|
||||
<td class="ticker">{{ ticker.symbol }}</td>
|
||||
<td class="mentions">{{ ticker.post_mentions }}</td>
|
||||
<td class="mentions">{{ ticker.comment_mentions }}</td>
|
||||
<!-- UPDATED: Added Sentiment data cell -->
|
||||
<td class="sentiment">
|
||||
{% if ticker.bullish_mentions > ticker.bearish_mentions %}
|
||||
<span class="sentiment-bullish">Bullish</span>
|
||||
{% elif ticker.bearish_mentions > ticker.bullish_mentions %}
|
||||
<span class="sentiment-bearish">Bearish</span>
|
||||
{% else %}
|
||||
<span class="sentiment-neutral">Neutral</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<footer>
|
||||
<div class="brand-name">RSTAT</div>
|
||||
<div class="brand-subtitle">Reddit Stock Analysis Tool</div>
|
||||
</footer>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
@@ -10,6 +10,7 @@
|
||||
<th>Ticker</th>
|
||||
<th>Mentions</th>
|
||||
<th>Market Cap</th>
|
||||
<th>Closing Price</th>
|
||||
<th>Sentiment</th>
|
||||
</tr>
|
||||
</thead>
|
||||
@@ -19,6 +20,14 @@
|
||||
<td><strong><a href="/deep-dive/{{ ticker.symbol }}">{{ ticker.symbol }}</a></strong></td>
|
||||
<td>{{ ticker.mention_count }}</td>
|
||||
<td>{{ ticker.market_cap | format_mc }}</td>
|
||||
<!-- NEW COLUMN FOR CLOSING PRICE -->
|
||||
<td>
|
||||
{% if ticker.closing_price %}
|
||||
${{ "%.2f"|format(ticker.closing_price) }}
|
||||
{% else %}
|
||||
N/A
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
{% if ticker.bullish_mentions > ticker.bearish_mentions %}
|
||||
<span class="sentiment-bullish">Bullish</span>
|
||||
|
@@ -3,13 +3,19 @@
|
||||
{% block title %}r/{{ subreddit_name }} Dashboard{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<h1>Top 10 Tickers in r/{{ subreddit_name }}</h1>
|
||||
<h1>
|
||||
Top 10 Tickers in r/{{ subreddit_name }}
|
||||
<a href="/image/{{ subreddit_name }}" target="_blank" style="font-size: 0.8rem; margin-left: 1rem; font-weight: normal;">(View Daily Image)</a>
|
||||
<!-- ADD THIS NEW LINK -->
|
||||
<a href="/image/weekly/{{ subreddit_name }}" target="_blank" style="font-size: 0.8rem; margin-left: 1rem; font-weight: normal;">(View Weekly Image)</a>
|
||||
</h1>
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th>Ticker</th>
|
||||
<th>Mentions</th>
|
||||
<th>Market Cap</th>
|
||||
<th>Closing Price</th>
|
||||
<th>Sentiment</th>
|
||||
</tr>
|
||||
</thead>
|
||||
@@ -19,6 +25,14 @@
|
||||
<td><strong><a href="/deep-dive/{{ ticker.symbol }}">{{ ticker.symbol }}</a></strong></td>
|
||||
<td>{{ ticker.mention_count }}</td>
|
||||
<td>{{ ticker.market_cap | format_mc }}</td>
|
||||
<!-- NEW COLUMN FOR CLOSING PRICE -->
|
||||
<td>
|
||||
{% if ticker.closing_price %}
|
||||
${{ "%.2f"|format(ticker.closing_price) }}
|
||||
{% else %}
|
||||
N/A
|
||||
{% endif %}
|
||||
</td>
|
||||
<td>
|
||||
{% if ticker.bullish_mentions > ticker.bearish_mentions %}
|
||||
<span class="sentiment-bullish">Bullish</span>
|
||||
|
93
templates/weekly_image_view.html
Normal file
93
templates/weekly_image_view.html
Normal file
@@ -0,0 +1,93 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<title>Weekly Sentiment: r/{{ subreddit_name }}</title>
|
||||
<!-- All the <style> and <link> tags from image_view.html go here -->
|
||||
<!-- You can just copy the entire <head> section from image_view.html -->
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com">
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin>
|
||||
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@400;600;700;800&display=swap" rel="stylesheet">
|
||||
<style>
|
||||
/* This entire style block is IDENTICAL to image_view.html */
|
||||
body { margin: 0; padding: 2rem; font-family: 'Inter', sans-serif; background: #1a1a1a; display: flex; justify-content: center; align-items: center; min-height: 100vh; }
|
||||
.image-container { width: 650px; background: linear-gradient(145deg, #4d302d, #1f2128); color: #ffffff; border-radius: 16px; padding: 2.5rem; box-shadow: 0 10px 30px rgba(0,0,0,0.5); text-align: center; }
|
||||
header { display: flex; justify-content: space-between; align-items: flex-start; margin-bottom: 2rem; }
|
||||
.title-block { text-align: left; }
|
||||
.title-block h1 { font-size: 2.5rem; font-weight: 800; margin: 0; line-height: 1; }
|
||||
.title-block h2 { font-size: 1.25rem; font-weight: 600; margin: 0.5rem 0 0; color: #b0b0b0; }
|
||||
.date { font-size: 1rem; font-weight: 600; color: #c0c0c0; letter-spacing: 0.02em; }
|
||||
table { width: 100%; border-collapse: collapse; text-align: left; }
|
||||
th, td { padding: 1rem 0.5rem; border-bottom: 1px solid rgba(255, 255, 255, 0.1); }
|
||||
th { font-weight: 700; text-transform: uppercase; font-size: 0.8rem; color: #a0a0a0; }
|
||||
td { font-size: 1.1rem; font-weight: 600; }
|
||||
tr:last-child td { border-bottom: none; }
|
||||
td.rank { font-weight: 700; color: #d0d0d0; width: 8%; }
|
||||
td.ticker { width: 30%; }
|
||||
td.mentions { text-align: center; width: 18%; }
|
||||
td.sentiment { text-align: center; width: 26%; }
|
||||
.sentiment-bullish { color: #28a745; font-weight: 700; }
|
||||
.sentiment-bearish { color: #dc3545; font-weight: 700; }
|
||||
.sentiment-neutral { color: #9e9e9e; font-weight: 600; }
|
||||
tr:nth-child(1) td.ticker { color: #d8b4fe; } tr:nth-child(6) td.ticker { color: #fca5a5; }
|
||||
tr:nth-child(2) td.ticker { color: #a3e635; } tr:nth-child(7) td.ticker { color: #fdba74; }
|
||||
tr:nth-child(3) td.ticker { color: #67e8f9; } tr:nth-child(8) td.ticker { color: #6ee7b7; }
|
||||
tr:nth-child(4) td.ticker { color: #fde047; } tr:nth-child(9) td.ticker { color: #93c5fd; }
|
||||
tr:nth-child(5) td.ticker { color: #fcd34d; } tr:nth-child(10) td.ticker { color: #d1d5db; }
|
||||
footer { margin-top: 2.5rem; }
|
||||
.brand-name { font-size: 1.75rem; font-weight: 800; letter-spacing: -1px; }
|
||||
.brand-subtitle { font-size: 1rem; color: #b0b0b0; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<div class="image-container">
|
||||
<header>
|
||||
<div class="title-block">
|
||||
<!-- UPDATED: Title shows it's a weekly report -->
|
||||
<h1>Weekly Sentiment</h1>
|
||||
<h2>r/{{ subreddit_name }} - Top 10</h2>
|
||||
</div>
|
||||
<!-- UPDATED: Date now shows the range -->
|
||||
<div class="date">{{ date_range }}</div>
|
||||
</header>
|
||||
|
||||
<!-- The entire table structure is IDENTICAL to image_view.html -->
|
||||
<table>
|
||||
<thead>
|
||||
<tr>
|
||||
<th class="rank">Rank</th>
|
||||
<th class="ticker">Ticker</th>
|
||||
<th class="mentions">Posts</th>
|
||||
<th class="mentions">Comments</th>
|
||||
<th class="sentiment">Sentiment</th>
|
||||
</tr>
|
||||
</thead>
|
||||
<tbody>
|
||||
{% for ticker in tickers %}
|
||||
<tr>
|
||||
<td class="rank">{{ loop.index }}</td>
|
||||
<td class="ticker">{{ ticker.symbol }}</td>
|
||||
<td class="mentions">{{ ticker.post_mentions }}</td>
|
||||
<td class="mentions">{{ ticker.comment_mentions }}</td>
|
||||
<td class="sentiment">
|
||||
{% if ticker.bullish_mentions > ticker.bearish_mentions %}
|
||||
<span class="sentiment-bullish">Bullish</span>
|
||||
{% elif ticker.bearish_mentions > ticker.bullish_mentions %}
|
||||
<span class="sentiment-bearish">Bearish</span>
|
||||
{% else %}
|
||||
<span class="sentiment-neutral">Neutral</span>
|
||||
{% endif %}
|
||||
</td>
|
||||
</tr>
|
||||
{% endfor %}
|
||||
</tbody>
|
||||
</table>
|
||||
|
||||
<footer>
|
||||
<div class="brand-name">RSTAT</div>
|
||||
<div class="brand-subtitle">Reddit Stock Analysis Tool</div>
|
||||
</footer>
|
||||
</div>
|
||||
</body>
|
||||
</html>
|
Reference in New Issue
Block a user