Files
reddit_stock_analyzer/rstat_tool/database.py

248 lines
9.3 KiB
Python

# rstat_tool/database.py
import sqlite3
import time
from .ticker_extractor import COMMON_WORDS_BLACKLIST
DB_FILE = "reddit_stocks.db"
def get_db_connection():
"""Establishes a connection to the SQLite database."""
conn = sqlite3.connect(DB_FILE)
conn.row_factory = sqlite3.Row
return conn
def initialize_db():
"""
Initializes the database and creates the necessary tables if they don't exist.
"""
conn = get_db_connection()
cursor = conn.cursor()
# --- Create tickers table ---
cursor.execute("""
CREATE TABLE IF NOT EXISTS tickers (
id INTEGER PRIMARY KEY AUTOINCREMENT,
symbol TEXT NOT NULL UNIQUE,
market_cap INTEGER,
last_updated INTEGER
)
""")
# --- Create subreddits table ---
cursor.execute("""
CREATE TABLE IF NOT EXISTS subreddits (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL UNIQUE
)
""")
# --- Create mentions table ---
cursor.execute("""
CREATE TABLE IF NOT EXISTS mentions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
ticker_id INTEGER,
subreddit_id INTEGER,
post_id TEXT NOT NULL,
mention_timestamp INTEGER NOT NULL,
sentiment_score REAL,
FOREIGN KEY (ticker_id) REFERENCES tickers (id),
FOREIGN KEY (subreddit_id) REFERENCES subreddits (id),
UNIQUE(ticker_id, post_id, sentiment_score)
)
""")
cursor.execute("""
CREATE TABLE IF NOT EXISTS posts (
id INTEGER PRIMARY KEY AUTOINCREMENT,
post_id TEXT NOT NULL UNIQUE,
title TEXT NOT NULL,
post_url TEXT,
subreddit_id INTEGER,
post_timestamp INTEGER,
comment_count INTEGER,
avg_comment_sentiment REAL,
FOREIGN KEY (subreddit_id) REFERENCES subreddits (id)
)
""")
conn.commit()
conn.close()
print("Database initialized successfully.")
def clean_stale_tickers():
"""
Removes tickers and their associated mentions from the database
if the ticker symbol exists in the COMMON_WORDS_BLACKLIST.
"""
print("\n--- Cleaning Stale Tickers from Database ---")
conn = get_db_connection()
cursor = conn.cursor()
placeholders = ','.join('?' for _ in COMMON_WORDS_BLACKLIST)
query = f"SELECT id, symbol FROM tickers WHERE symbol IN ({placeholders})"
cursor.execute(query, tuple(COMMON_WORDS_BLACKLIST))
stale_tickers = cursor.fetchall()
if not stale_tickers:
print("No stale tickers to clean.")
conn.close()
return
for ticker in stale_tickers:
ticker_id = ticker['id']
ticker_symbol = ticker['symbol']
print(f"Removing stale ticker '{ticker_symbol}' (ID: {ticker_id})...")
cursor.execute("DELETE FROM mentions WHERE ticker_id = ?", (ticker_id,))
cursor.execute("DELETE FROM tickers WHERE id = ?", (ticker_id,))
deleted_count = conn.total_changes
conn.commit()
conn.close()
print(f"Cleanup complete. Removed {deleted_count} records.")
def add_mention(conn, ticker_id, subreddit_id, post_id, timestamp, sentiment):
"""Adds a new mention with its sentiment score to the database."""
cursor = conn.cursor()
try:
cursor.execute(
"INSERT INTO mentions (ticker_id, subreddit_id, post_id, mention_timestamp, sentiment_score) VALUES (?, ?, ?, ?, ?)",
(ticker_id, subreddit_id, post_id, timestamp, sentiment)
)
conn.commit()
except sqlite3.IntegrityError:
pass
def get_or_create_entity(conn, table_name, column_name, value):
"""Generic function to get or create an entity and return its ID."""
cursor = conn.cursor()
cursor.execute(f"SELECT id FROM {table_name} WHERE {column_name} = ?", (value,))
result = cursor.fetchone()
if result:
return result['id']
else:
cursor.execute(f"INSERT INTO {table_name} ({column_name}) VALUES (?)", (value,))
conn.commit()
return cursor.lastrowid
def update_ticker_market_cap(conn, ticker_id, market_cap):
"""Updates the market cap and timestamp for a specific ticker."""
cursor = conn.cursor()
current_timestamp = int(time.time())
cursor.execute("UPDATE tickers SET market_cap = ?, last_updated = ? WHERE id = ?", (market_cap, current_timestamp, ticker_id))
conn.commit()
def get_ticker_info(conn, ticker_id):
"""Retrieves all info for a specific ticker by its ID."""
cursor = conn.cursor()
cursor.execute("SELECT * FROM tickers WHERE id = ?", (ticker_id,))
return cursor.fetchone()
def generate_summary_report(limit=20):
"""Queries the DB to generate a summary for the command-line tool."""
print(f"\n--- Top {limit} Tickers by Mention Count ---")
conn = get_db_connection()
cursor = conn.cursor()
query = """
SELECT
t.symbol, t.market_cap, COUNT(m.id) as mention_count,
SUM(CASE WHEN m.sentiment_score > 0.1 THEN 1 ELSE 0 END) as bullish_mentions,
SUM(CASE WHEN m.sentiment_score < -0.1 THEN 1 ELSE 0 END) as bearish_mentions,
SUM(CASE WHEN m.sentiment_score BETWEEN -0.1 AND 0.1 THEN 1 ELSE 0 END) as neutral_mentions
FROM mentions m JOIN tickers t ON m.ticker_id = t.id
GROUP BY t.symbol, t.market_cap ORDER BY mention_count DESC LIMIT ?;
"""
results = cursor.execute(query, (limit,)).fetchall()
header = f"{'Ticker':<8} | {'Mentions':<8} | {'Bullish':<8} | {'Bearish':<8} | {'Neutral':<8} | {'Market Cap':<15}"
print(header)
print("-" * len(header))
for row in results:
market_cap_str = "N/A"
if row['market_cap'] and row['market_cap'] > 0:
mc = row['market_cap']
if mc >= 1e12: market_cap_str = f"${mc/1e12:.2f}T"
elif mc >= 1e9: market_cap_str = f"${mc/1e9:.2f}B"
else: market_cap_str = f"${mc/1e6:.2f}M"
print(f"{row['symbol']:<8} | {row['mention_count']:<8} | {row['bullish_mentions']:<8} | {row['bearish_mentions']:<8} | {row['neutral_mentions']:<8} | {market_cap_str:<15}")
conn.close()
def get_overall_summary(limit=50):
"""Gets the top tickers across all subreddits for the dashboard."""
conn = get_db_connection()
query = """
SELECT
t.symbol, t.market_cap, COUNT(m.id) as mention_count,
SUM(CASE WHEN m.sentiment_score > 0.1 THEN 1 ELSE 0 END) as bullish_mentions,
SUM(CASE WHEN m.sentiment_score < -0.1 THEN 1 ELSE 0 END) as bearish_mentions,
SUM(CASE WHEN m.sentiment_score BETWEEN -0.1 AND 0.1 THEN 1 ELSE 0 END) as neutral_mentions
FROM mentions m JOIN tickers t ON m.ticker_id = t.id
GROUP BY t.symbol, t.market_cap ORDER BY mention_count DESC LIMIT ?;
"""
results = conn.execute(query, (limit,)).fetchall()
conn.close()
return results
def get_subreddit_summary(subreddit_name, limit=50):
"""Gets the top tickers for a specific subreddit for the dashboard."""
conn = get_db_connection()
query = """
SELECT
t.symbol, t.market_cap, COUNT(m.id) as mention_count,
SUM(CASE WHEN m.sentiment_score > 0.1 THEN 1 ELSE 0 END) as bullish_mentions,
SUM(CASE WHEN m.sentiment_score < -0.1 THEN 1 ELSE 0 END) as bearish_mentions,
SUM(CASE WHEN m.sentiment_score BETWEEN -0.1 AND 0.1 THEN 1 ELSE 0 END) as neutral_mentions
FROM mentions m
JOIN tickers t ON m.ticker_id = t.id
JOIN subreddits s ON m.subreddit_id = s.id
WHERE s.name = ?
GROUP BY t.symbol, t.market_cap ORDER BY mention_count DESC LIMIT ?;
"""
results = conn.execute(query, (subreddit_name, limit)).fetchall()
conn.close()
return results
def get_all_scanned_subreddits():
"""Gets a unique list of all subreddits we have data for."""
# --- THIS IS THE CORRECTED LINE ---
conn = get_db_connection()
results = conn.execute("SELECT DISTINCT name FROM subreddits ORDER BY name ASC;").fetchall()
conn.close()
return [row['name'] for row in results]
def add_or_update_post_analysis(conn, post_data):
"""
Inserts a new post analysis record or updates an existing one.
This prevents duplicate entries for the same post.
"""
cursor = conn.cursor()
# Use the UNIQUE post_id to replace old data with new on conflict
cursor.execute(
"""
INSERT INTO posts (post_id, title, post_url, subreddit_id, post_timestamp, comment_count, avg_comment_sentiment)
VALUES (:post_id, :title, :post_url, :subreddit_id, :post_timestamp, :comment_count, :avg_comment_sentiment)
ON CONFLICT(post_id) DO UPDATE SET
comment_count = excluded.comment_count,
avg_comment_sentiment = excluded.avg_comment_sentiment;
""",
post_data
)
conn.commit()
def get_deep_dive_details(ticker_symbol):
"""
Gets all analyzed posts that mention a specific ticker.
"""
conn = get_db_connection()
query = """
SELECT DISTINCT p.*, s.name as subreddit_name FROM posts p
JOIN mentions m ON p.post_id = m.post_id
JOIN tickers t ON m.ticker_id = t.id
JOIN subreddits s ON p.subreddit_id = s.id
WHERE t.symbol = ?
ORDER BY p.post_timestamp DESC;
"""
results = conn.execute(query, (ticker_symbol,)).fetchall()
conn.close()
return results