Added better dashboarding.
This commit is contained in:
@@ -15,21 +15,23 @@ from .sentiment_analyzer import get_sentiment_score
|
||||
|
||||
load_dotenv()
|
||||
MARKET_CAP_REFRESH_INTERVAL = 86400
|
||||
POST_AGE_LIMIT = 86400
|
||||
|
||||
def load_subreddits(filepath):
|
||||
try:
|
||||
with open(filepath, 'r') as f:
|
||||
return json.load(f).get("subreddits", [])
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
print(f"Error loading config: {e}")
|
||||
print(f"Error loading config file '{filepath}': {e}")
|
||||
return None
|
||||
|
||||
def get_market_cap(ticker_symbol):
|
||||
def get_financial_data(ticker_symbol):
|
||||
try:
|
||||
ticker = yf.Ticker(ticker_symbol)
|
||||
return ticker.fast_info.get('marketCap')
|
||||
data = { "market_cap": ticker.fast_info.get('marketCap'), "closing_price": ticker.fast_info.get('previousClose') }
|
||||
return data
|
||||
except Exception:
|
||||
return None
|
||||
return {"market_cap": None, "closing_price": None}
|
||||
|
||||
def get_reddit_instance():
|
||||
client_id = os.getenv("REDDIT_CLIENT_ID")
|
||||
@@ -38,59 +40,55 @@ def get_reddit_instance():
|
||||
if not all([client_id, client_secret, user_agent]):
|
||||
print("Error: Reddit API credentials not found in .env file.")
|
||||
return None
|
||||
|
||||
# --- THIS IS THE CORRECTED LINE ---
|
||||
# The argument is 'client_secret', not 'secret_client'.
|
||||
return praw.Reddit(client_id=client_id, client_secret=client_secret, user_agent=user_agent)
|
||||
|
||||
def scan_subreddits(reddit, subreddits_list, post_limit=25, comment_limit=100):
|
||||
"""Scans subreddits, analyzes posts and comments, and stores results in the database."""
|
||||
def scan_subreddits(reddit, subreddits_list, post_limit=100, comment_limit=100, days_to_scan=1):
|
||||
conn = database.get_db_connection()
|
||||
post_age_limit = days_to_scan * 86400
|
||||
current_time = time.time()
|
||||
|
||||
print(f"\nScanning {len(subreddits_list)} subreddits (Top {post_limit} posts, {comment_limit} comments/post)...")
|
||||
print(f"\nScanning {len(subreddits_list)} subreddit(s) for NEW posts in the last {days_to_scan} day(s)...")
|
||||
for subreddit_name in subreddits_list:
|
||||
try:
|
||||
subreddit_id = database.get_or_create_entity(conn, 'subreddits', 'name', subreddit_name)
|
||||
subreddit = reddit.subreddit(subreddit_name)
|
||||
print(f"Scanning r/{subreddit_name}...")
|
||||
|
||||
for submission in subreddit.hot(limit=post_limit):
|
||||
|
||||
# --- LOGIC PART 1: PROCESS INDIVIDUAL MENTIONS ---
|
||||
# 1a. Process the Post Title and Body for mentions
|
||||
for submission in subreddit.new(limit=post_limit):
|
||||
if (current_time - submission.created_utc) > post_age_limit:
|
||||
print(f" -> Reached posts older than the {days_to_scan}-day limit. Moving to next subreddit.")
|
||||
break
|
||||
|
||||
post_text = submission.title + " " + submission.selftext
|
||||
tickers_in_post = extract_tickers(post_text)
|
||||
post_sentiment = get_sentiment_score(submission.title)
|
||||
|
||||
for ticker_symbol in set(tickers_in_post):
|
||||
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
|
||||
database.add_mention(conn, ticker_id, subreddit_id, submission.id, int(submission.created_utc), post_sentiment)
|
||||
|
||||
ticker_info = database.get_ticker_info(conn, ticker_id)
|
||||
current_time = int(time.time())
|
||||
if not ticker_info['last_updated'] or (current_time - ticker_info['last_updated'] > MARKET_CAP_REFRESH_INTERVAL):
|
||||
print(f" -> Fetching market cap for {ticker_symbol}...")
|
||||
market_cap = get_market_cap(ticker_symbol)
|
||||
database.update_ticker_market_cap(conn, ticker_id, market_cap or ticker_info['market_cap'])
|
||||
|
||||
# 1b. Process Comments for mentions
|
||||
submission.comments.replace_more(limit=0)
|
||||
for comment in submission.comments.list()[:comment_limit]:
|
||||
tickers_in_comment = extract_tickers(comment.body)
|
||||
if not tickers_in_comment:
|
||||
continue
|
||||
comment_sentiment = get_sentiment_score(comment.body)
|
||||
for ticker_symbol in set(tickers_in_comment):
|
||||
if tickers_in_post:
|
||||
post_sentiment = get_sentiment_score(submission.title)
|
||||
for ticker_symbol in set(tickers_in_post):
|
||||
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
|
||||
database.add_mention(conn, ticker_id, subreddit_id, submission.id, int(comment.created_utc), comment_sentiment)
|
||||
|
||||
# --- LOGIC PART 2: DEEP DIVE ANALYSIS ---
|
||||
database.add_mention(conn, ticker_id, subreddit_id, submission.id, 'post', int(submission.created_utc), post_sentiment)
|
||||
|
||||
ticker_info = database.get_ticker_info(conn, ticker_id)
|
||||
if not ticker_info['last_updated'] or (current_time - ticker_info['last_updated'] > MARKET_CAP_REFRESH_INTERVAL):
|
||||
print(f" -> Fetching financial data for {ticker_symbol}...")
|
||||
financials = get_financial_data(ticker_symbol)
|
||||
database.update_ticker_financials(
|
||||
conn, ticker_id,
|
||||
financials['market_cap'] or ticker_info['market_cap'],
|
||||
financials['closing_price'] or ticker_info['closing_price']
|
||||
)
|
||||
|
||||
submission.comments.replace_more(limit=0)
|
||||
all_comment_sentiments = []
|
||||
for comment in submission.comments.list()[:comment_limit]:
|
||||
all_comment_sentiments.append(get_sentiment_score(comment.body))
|
||||
|
||||
tickers_in_comment = extract_tickers(comment.body)
|
||||
if tickers_in_comment:
|
||||
comment_sentiment = get_sentiment_score(comment.body)
|
||||
for ticker_symbol in set(tickers_in_comment):
|
||||
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
|
||||
database.add_mention(conn, ticker_id, subreddit_id, submission.id, 'comment', int(comment.created_utc), comment_sentiment)
|
||||
|
||||
avg_sentiment = sum(all_comment_sentiments) / len(all_comment_sentiments) if all_comment_sentiments else 0
|
||||
|
||||
post_analysis_data = {
|
||||
"post_id": submission.id, "title": submission.title,
|
||||
"post_url": f"https://reddit.com{submission.permalink}",
|
||||
@@ -98,7 +96,7 @@ def scan_subreddits(reddit, subreddits_list, post_limit=25, comment_limit=100):
|
||||
"comment_count": len(all_comment_sentiments), "avg_comment_sentiment": avg_sentiment
|
||||
}
|
||||
database.add_or_update_post_analysis(conn, post_analysis_data)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
print(f"Could not scan r/{subreddit_name}. Error: {e}")
|
||||
|
||||
@@ -106,23 +104,47 @@ def scan_subreddits(reddit, subreddits_list, post_limit=25, comment_limit=100):
|
||||
print("\n--- Scan Complete ---")
|
||||
|
||||
def main():
|
||||
"""Main function to run the Reddit stock analysis tool."""
|
||||
parser = argparse.ArgumentParser(description="Analyze stock ticker mentions on Reddit.", formatter_class=argparse.RawTextHelpFormatter)
|
||||
parser.add_argument("config_file", help="Path to the JSON file containing subreddits.")
|
||||
parser.add_argument("-p", "--posts", type=int, default=25, help="Number of posts to scan per subreddit.\n(Default: 25)")
|
||||
|
||||
parser.add_argument("--config", default="subreddits.json", help="Path to the JSON file containing subreddits.\n(Default: subreddits.json)")
|
||||
parser.add_argument("--subreddit", help="Scan a single subreddit, ignoring the config file.")
|
||||
parser.add_argument("--days", type=int, default=1, help="Number of past days to scan for new posts.\n(Default: 1 for last 24 hours)")
|
||||
|
||||
parser.add_argument("-p", "--posts", type=int, default=200, help="Max posts to check per subreddit.\n(Default: 200)")
|
||||
parser.add_argument("-c", "--comments", type=int, default=100, help="Number of comments to scan per post.\n(Default: 100)")
|
||||
parser.add_argument("-l", "--limit", type=int, default=20, help="Number of tickers to show in the final report.\n(Default: 20)")
|
||||
parser.add_argument("-l", "--limit", type=int, default=20, help="Number of tickers to show in the CLI report.\n(Default: 20)")
|
||||
args = parser.parse_args()
|
||||
|
||||
# --- THIS IS THE CORRECTED LOGIC BLOCK ---
|
||||
if args.subreddit:
|
||||
# If --subreddit is used, create a list with just that one.
|
||||
subreddits_to_scan = [args.subreddit]
|
||||
print(f"Targeted Scan Mode: Focusing on r/{args.subreddit}")
|
||||
else:
|
||||
# Otherwise, load from the config file.
|
||||
print(f"Config Scan Mode: Loading subreddits from {args.config}")
|
||||
# Use the correct argument name: args.config
|
||||
subreddits_to_scan = load_subreddits(args.config)
|
||||
|
||||
if not subreddits_to_scan:
|
||||
print("Error: No subreddits to scan. Please check your config file or --subreddit argument.")
|
||||
return
|
||||
|
||||
# --- Initialize and Run ---
|
||||
database.initialize_db()
|
||||
database.clean_stale_tickers()
|
||||
|
||||
subreddits = load_subreddits(args.config_file)
|
||||
if not subreddits: return
|
||||
|
||||
|
||||
reddit = get_reddit_instance()
|
||||
if not reddit: return
|
||||
|
||||
scan_subreddits(reddit, subreddits, post_limit=args.posts, comment_limit=args.comments)
|
||||
scan_subreddits(
|
||||
reddit,
|
||||
subreddits_to_scan,
|
||||
post_limit=args.posts,
|
||||
comment_limit=args.comments,
|
||||
days_to_scan=args.days
|
||||
)
|
||||
database.generate_summary_report(limit=args.limit)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
Reference in New Issue
Block a user