# main.py import argparse import json import os import time import praw import yfinance as yf from dotenv import load_dotenv import database from ticker_extractor import extract_tickers # Load environment variables from .env file load_dotenv() # How old (in seconds) market cap data can be before we refresh it. 24 hours = 86400 seconds. MARKET_CAP_REFRESH_INTERVAL = 86400 def load_subreddits(filepath): # (This function is unchanged) try: with open(filepath, 'r') as f: data = json.load(f) return data.get("subreddits", []) except FileNotFoundError: print(f"Error: The file '{filepath}' was not found.") return None except json.JSONDecodeError: print(f"Error: Could not decode JSON from '{filepath}'.") return None def get_market_cap(ticker_symbol): """Fetches the market capitalization for a given stock ticker from yfinance.""" try: ticker = yf.Ticker(ticker_symbol) # .info can be slow; .fast_info is a lighter alternative market_cap = ticker.fast_info.get('marketCap') return market_cap if market_cap else None except Exception: return None def get_reddit_instance(): # (This function is unchanged) client_id = os.getenv("REDDIT_CLIENT_ID") client_secret = os.getenv("REDDIT_CLIENT_SECRET") user_agent = os.getenv("REDDIT_USER_AGENT") if not all([client_id, client_secret, user_agent]): print("Error: Reddit API credentials not found in .env file.") return None return praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent=user_agent) def scan_subreddits(reddit, subreddits_list, post_limit=25): """Scans subreddits, stores mentions, and updates market caps in the database.""" conn = database.get_db_connection() print(f"\nScanning {len(subreddits_list)} subreddits for top {post_limit} posts...") for subreddit_name in subreddits_list: try: subreddit_id = database.get_or_create_entity(conn, 'subreddits', 'name', subreddit_name) subreddit = reddit.subreddit(subreddit_name) print(f"Scanning r/{subreddit_name}...") for submission in subreddit.hot(limit=post_limit): full_text = submission.title + " " + submission.selftext tickers_in_post = extract_tickers(full_text) for ticker_symbol in set(tickers_in_post): ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol) database.add_mention( conn, ticker_id=ticker_id, subreddit_id=subreddit_id, post_id=submission.id, timestamp=int(submission.created_utc) ) # --- Check if market cap needs updating --- ticker_info = database.get_ticker_info(conn, ticker_id) current_time = int(time.time()) if not ticker_info['last_updated'] or (current_time - ticker_info['last_updated'] > MARKET_CAP_REFRESH_INTERVAL): print(f" -> Fetching market cap for {ticker_symbol}...") market_cap = get_market_cap(ticker_symbol) if market_cap: database.update_ticker_market_cap(conn, ticker_id, market_cap) else: # If fetch fails, still update the timestamp so we don't try again for 24 hours database.update_ticker_market_cap(conn, ticker_id, ticker_info['market_cap']) # Keep old value except Exception as e: print(f"Could not scan r/{subreddit_name}. Error: {e}") conn.close() print("\n--- Scan Complete ---") def main(): """Main function to run the Reddit stock analysis tool.""" parser = argparse.ArgumentParser(description="Analyze stock ticker mentions on Reddit.") parser.add_argument("config_file", help="Path to the JSON file containing subreddits.") args = parser.parse_args() # --- Part 1: Initialize --- database.initialize_db() subreddits = load_subreddits(args.config_file) if not subreddits: return reddit = get_reddit_instance() if not reddit: return # --- Part 2: Scan and Store --- scan_subreddits(reddit, subreddits) # --- Part 3: Generate and Display Report --- database.generate_summary_report() if __name__ == "__main__": main()