Added better dashboarding.
This commit is contained in:
@@ -3,6 +3,7 @@
|
||||
import sqlite3
|
||||
import time
|
||||
from .ticker_extractor import COMMON_WORDS_BLACKLIST
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
DB_FILE = "reddit_stocks.db"
|
||||
|
||||
@@ -25,6 +26,7 @@ def initialize_db():
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
symbol TEXT NOT NULL UNIQUE,
|
||||
market_cap INTEGER,
|
||||
closing_price REAL,
|
||||
last_updated INTEGER
|
||||
)
|
||||
""")
|
||||
@@ -44,11 +46,12 @@ def initialize_db():
|
||||
ticker_id INTEGER,
|
||||
subreddit_id INTEGER,
|
||||
post_id TEXT NOT NULL,
|
||||
mention_type TEXT NOT NULL, -- Can be 'post' or 'comment'
|
||||
mention_timestamp INTEGER NOT NULL,
|
||||
sentiment_score REAL,
|
||||
FOREIGN KEY (ticker_id) REFERENCES tickers (id),
|
||||
FOREIGN KEY (subreddit_id) REFERENCES subreddits (id),
|
||||
UNIQUE(ticker_id, post_id, sentiment_score)
|
||||
UNIQUE(ticker_id, post_id, mention_type, sentiment_score)
|
||||
)
|
||||
""")
|
||||
|
||||
@@ -103,13 +106,12 @@ def clean_stale_tickers():
|
||||
print(f"Cleanup complete. Removed {deleted_count} records.")
|
||||
|
||||
|
||||
def add_mention(conn, ticker_id, subreddit_id, post_id, timestamp, sentiment):
|
||||
"""Adds a new mention with its sentiment score to the database."""
|
||||
def add_mention(conn, ticker_id, subreddit_id, post_id, mention_type, timestamp, sentiment):
|
||||
cursor = conn.cursor()
|
||||
try:
|
||||
cursor.execute(
|
||||
"INSERT INTO mentions (ticker_id, subreddit_id, post_id, mention_timestamp, sentiment_score) VALUES (?, ?, ?, ?, ?)",
|
||||
(ticker_id, subreddit_id, post_id, timestamp, sentiment)
|
||||
"INSERT INTO mentions (ticker_id, subreddit_id, post_id, mention_type, mention_timestamp, sentiment_score) VALUES (?, ?, ?, ?, ?, ?)",
|
||||
(ticker_id, subreddit_id, post_id, mention_type, timestamp, sentiment)
|
||||
)
|
||||
conn.commit()
|
||||
except sqlite3.IntegrityError:
|
||||
@@ -127,11 +129,14 @@ def get_or_create_entity(conn, table_name, column_name, value):
|
||||
conn.commit()
|
||||
return cursor.lastrowid
|
||||
|
||||
def update_ticker_market_cap(conn, ticker_id, market_cap):
|
||||
"""Updates the market cap and timestamp for a specific ticker."""
|
||||
def update_ticker_financials(conn, ticker_id, market_cap, closing_price):
|
||||
"""Updates the financials and timestamp for a specific ticker."""
|
||||
cursor = conn.cursor()
|
||||
current_timestamp = int(time.time())
|
||||
cursor.execute("UPDATE tickers SET market_cap = ?, last_updated = ? WHERE id = ?", (market_cap, current_timestamp, ticker_id))
|
||||
cursor.execute(
|
||||
"UPDATE tickers SET market_cap = ?, closing_price = ?, last_updated = ? WHERE id = ?",
|
||||
(market_cap, closing_price, current_timestamp, ticker_id)
|
||||
)
|
||||
conn.commit()
|
||||
|
||||
def get_ticker_info(conn, ticker_id):
|
||||
@@ -169,27 +174,28 @@ def generate_summary_report(limit=20):
|
||||
conn.close()
|
||||
|
||||
def get_overall_summary(limit=50):
|
||||
"""Gets the top tickers across all subreddits for the dashboard."""
|
||||
conn = get_db_connection()
|
||||
query = """
|
||||
SELECT
|
||||
t.symbol, t.market_cap, COUNT(m.id) as mention_count,
|
||||
t.symbol, t.market_cap, t.closing_price, -- Added closing_price
|
||||
COUNT(m.id) as mention_count,
|
||||
SUM(CASE WHEN m.sentiment_score > 0.1 THEN 1 ELSE 0 END) as bullish_mentions,
|
||||
SUM(CASE WHEN m.sentiment_score < -0.1 THEN 1 ELSE 0 END) as bearish_mentions,
|
||||
SUM(CASE WHEN m.sentiment_score BETWEEN -0.1 AND 0.1 THEN 1 ELSE 0 END) as neutral_mentions
|
||||
FROM mentions m JOIN tickers t ON m.ticker_id = t.id
|
||||
GROUP BY t.symbol, t.market_cap ORDER BY mention_count DESC LIMIT ?;
|
||||
GROUP BY t.symbol, t.market_cap, t.closing_price -- Added closing_price
|
||||
ORDER BY mention_count DESC LIMIT ?;
|
||||
"""
|
||||
results = conn.execute(query, (limit,)).fetchall()
|
||||
conn.close()
|
||||
return results
|
||||
|
||||
def get_subreddit_summary(subreddit_name, limit=50):
|
||||
"""Gets the top tickers for a specific subreddit for the dashboard."""
|
||||
conn = get_db_connection()
|
||||
query = """
|
||||
SELECT
|
||||
t.symbol, t.market_cap, COUNT(m.id) as mention_count,
|
||||
t.symbol, t.market_cap, t.closing_price, -- Added closing_price
|
||||
COUNT(m.id) as mention_count,
|
||||
SUM(CASE WHEN m.sentiment_score > 0.1 THEN 1 ELSE 0 END) as bullish_mentions,
|
||||
SUM(CASE WHEN m.sentiment_score < -0.1 THEN 1 ELSE 0 END) as bearish_mentions,
|
||||
SUM(CASE WHEN m.sentiment_score BETWEEN -0.1 AND 0.1 THEN 1 ELSE 0 END) as neutral_mentions
|
||||
@@ -197,7 +203,8 @@ def get_subreddit_summary(subreddit_name, limit=50):
|
||||
JOIN tickers t ON m.ticker_id = t.id
|
||||
JOIN subreddits s ON m.subreddit_id = s.id
|
||||
WHERE s.name = ?
|
||||
GROUP BY t.symbol, t.market_cap ORDER BY mention_count DESC LIMIT ?;
|
||||
GROUP BY t.symbol, t.market_cap, t.closing_price -- Added closing_price
|
||||
ORDER BY mention_count DESC LIMIT ?;
|
||||
"""
|
||||
results = conn.execute(query, (subreddit_name, limit)).fetchall()
|
||||
conn.close()
|
||||
@@ -205,7 +212,6 @@ def get_subreddit_summary(subreddit_name, limit=50):
|
||||
|
||||
def get_all_scanned_subreddits():
|
||||
"""Gets a unique list of all subreddits we have data for."""
|
||||
# --- THIS IS THE CORRECTED LINE ---
|
||||
conn = get_db_connection()
|
||||
results = conn.execute("SELECT DISTINCT name FROM subreddits ORDER BY name ASC;").fetchall()
|
||||
conn.close()
|
||||
@@ -245,4 +251,60 @@ def get_deep_dive_details(ticker_symbol):
|
||||
"""
|
||||
results = conn.execute(query, (ticker_symbol,)).fetchall()
|
||||
conn.close()
|
||||
return results
|
||||
|
||||
def get_image_view_summary(subreddit_name):
|
||||
"""
|
||||
Gets a summary of tickers for the image view, including post, comment,
|
||||
and sentiment counts.
|
||||
"""
|
||||
conn = get_db_connection()
|
||||
# This query now also counts sentiment types
|
||||
query = """
|
||||
SELECT
|
||||
t.symbol,
|
||||
COUNT(CASE WHEN m.mention_type = 'post' THEN 1 END) as post_mentions,
|
||||
COUNT(CASE WHEN m.mention_type = 'comment' THEN 1 END) as comment_mentions,
|
||||
COUNT(CASE WHEN m.sentiment_score > 0.1 THEN 1 END) as bullish_mentions,
|
||||
COUNT(CASE WHEN m.sentiment_score < -0.1 THEN 1 END) as bearish_mentions
|
||||
FROM mentions m
|
||||
JOIN tickers t ON m.ticker_id = t.id
|
||||
JOIN subreddits s ON m.subreddit_id = s.id
|
||||
WHERE s.name = ?
|
||||
GROUP BY t.symbol
|
||||
ORDER BY (post_mentions + comment_mentions) DESC
|
||||
LIMIT 10;
|
||||
"""
|
||||
results = conn.execute(query, (subreddit_name,)).fetchall()
|
||||
conn.close()
|
||||
return results
|
||||
|
||||
def get_weekly_summary_for_subreddit(subreddit_name):
|
||||
"""
|
||||
Gets a weekly summary for a specific subreddit for the image view.
|
||||
"""
|
||||
conn = get_db_connection()
|
||||
|
||||
# Calculate the timestamp for 7 days ago
|
||||
seven_days_ago = datetime.utcnow() - timedelta(days=7)
|
||||
seven_days_ago_timestamp = int(seven_days_ago.timestamp())
|
||||
|
||||
# The query is the same as before, but with an added WHERE clause for the timestamp
|
||||
query = """
|
||||
SELECT
|
||||
t.symbol,
|
||||
COUNT(CASE WHEN m.mention_type = 'post' THEN 1 END) as post_mentions,
|
||||
COUNT(CASE WHEN m.mention_type = 'comment' THEN 1 END) as comment_mentions,
|
||||
COUNT(CASE WHEN m.sentiment_score > 0.1 THEN 1 END) as bullish_mentions,
|
||||
COUNT(CASE WHEN m.sentiment_score < -0.1 THEN 1 END) as bearish_mentions
|
||||
FROM mentions m
|
||||
JOIN tickers t ON m.ticker_id = t.id
|
||||
JOIN subreddits s ON m.subreddit_id = s.id
|
||||
WHERE s.name = ? AND m.mention_timestamp >= ?
|
||||
GROUP BY t.symbol
|
||||
ORDER BY (post_mentions + comment_mentions) DESC
|
||||
LIMIT 10;
|
||||
"""
|
||||
results = conn.execute(query, (subreddit_name, seven_days_ago_timestamp)).fetchall()
|
||||
conn.close()
|
||||
return results
|
Reference in New Issue
Block a user