Refactor main reddit scraping logic.

This commit is contained in:
2025-07-25 16:56:25 +02:00
parent eb6de197f0
commit c5a91c9d72

View File

@@ -66,8 +66,83 @@ def get_financial_data_via_fetcher(ticker_symbol):
return financials
# --- HELPER FUNCTION: Contains all the optimized logic for one post ---
def _process_submission(submission, subreddit_id, conn, comment_limit, fetch_financials):
"""
Processes a single Reddit submission with optimized logic.
- Uses a single loop over comments.
- Caches ticker IDs to reduce DB lookups.
"""
current_time = time.time()
# 1. Initialize data collectors for this post
tickers_in_title = set(extract_tickers(submission.title))
all_tickers_found_in_post = set(tickers_in_title)
all_comment_sentiments = []
ticker_id_cache = {} # In-memory cache for ticker IDs for this post
submission.comments.replace_more(limit=0)
all_comments = submission.comments.list()[:comment_limit]
# 2. --- SINGLE LOOP OVER COMMENTS ---
# We gather all necessary information in one pass.
for comment in all_comments:
comment_sentiment = get_sentiment_score(comment.body)
all_comment_sentiments.append(comment_sentiment) # For the deep dive
tickers_in_comment = set(extract_tickers(comment.body))
if not tickers_in_comment:
continue
all_tickers_found_in_post.update(tickers_in_comment)
# Apply the hybrid logic
if tickers_in_title:
# If the title has tickers, every comment is a mention for them
for ticker_symbol in tickers_in_title:
if ticker_symbol not in ticker_id_cache:
ticker_id_cache[ticker_symbol] = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
ticker_id = ticker_id_cache[ticker_symbol]
database.add_mention(conn, ticker_id, subreddit_id, submission.id, 'comment', int(comment.created_utc), comment_sentiment)
else:
# If no title tickers, only direct mentions in comments count
for ticker_symbol in tickers_in_comment:
if ticker_symbol not in ticker_id_cache:
ticker_id_cache[ticker_symbol] = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
ticker_id = ticker_id_cache[ticker_symbol]
database.add_mention(conn, ticker_id, subreddit_id, submission.id, 'comment', int(comment.created_utc), comment_sentiment)
# 3. Process title mentions (if any)
if tickers_in_title:
log.info(f" -> Title Mention(s): {', '.join(tickers_in_title)}. Attributing all comments.")
post_sentiment = get_sentiment_score(submission.title)
for ticker_symbol in tickers_in_title:
if ticker_symbol not in ticker_id_cache:
ticker_id_cache[ticker_symbol] = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
ticker_id = ticker_id_cache[ticker_symbol]
database.add_mention(conn, ticker_id, subreddit_id, submission.id, 'post', int(submission.created_utc), post_sentiment)
# 4. Fetch financial data if enabled
if fetch_financials:
for ticker_symbol in all_tickers_found_in_post:
ticker_id = ticker_id_cache[ticker_symbol] # Guaranteed to be in cache
ticker_info = database.get_ticker_info(conn, ticker_id)
if not ticker_info['last_updated'] or (current_time - ticker_info['last_updated'] > database.MARKET_CAP_REFRESH_INTERVAL):
log.info(f" -> Fetching financial data for {ticker_symbol}...")
financials = get_financial_data_via_fetcher(ticker_symbol)
database.update_ticker_financials(conn, ticker_id, financials.get('market_cap'), financials.get('closing_price'))
# 5. Save deep dive analysis
avg_sentiment = sum(all_comment_sentiments) / len(all_comment_sentiments) if all_comment_sentiments else 0
post_analysis_data = {
"post_id": submission.id, "title": submission.title,
"post_url": f"https://reddit.com{submission.permalink}", "subreddit_id": subreddit_id,
"post_timestamp": int(submission.created_utc), "comment_count": len(all_comments),
"avg_comment_sentiment": avg_sentiment
}
database.add_or_update_post_analysis(conn, post_analysis_data)
def scan_subreddits(reddit, subreddits_list, post_limit=100, comment_limit=100, days_to_scan=1, fetch_financials=True):
""" Scans subreddits and uses the fetcher to get financial data. """
conn = database.get_db_connection()
post_age_limit = days_to_scan * 86400
current_time = time.time()
@@ -88,55 +163,8 @@ def scan_subreddits(reddit, subreddits_list, post_limit=100, comment_limit=100,
log.info(f" -> Reached posts older than the {days_to_scan}-day limit.")
break
tickers_in_title = set(extract_tickers(submission.title))
all_tickers_found_in_post = set(tickers_in_title)
submission.comments.replace_more(limit=0)
all_comments = submission.comments.list()[:comment_limit]
if tickers_in_title:
log.info(f" -> Title Mention(s): {', '.join(tickers_in_title)}. Attributing all comments.")
post_sentiment = get_sentiment_score(submission.title)
for ticker_symbol in tickers_in_title:
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
database.add_mention(conn, ticker_id, subreddit_id, submission.id, 'post', int(submission.created_utc), post_sentiment)
for comment in all_comments:
comment_sentiment = get_sentiment_score(comment.body)
for ticker_symbol in tickers_in_title:
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
database.add_mention(conn, ticker_id, subreddit_id, submission.id, 'comment', int(comment.created_utc), comment_sentiment)
else:
for comment in all_comments:
tickers_in_comment = set(extract_tickers(comment.body))
if tickers_in_comment:
all_tickers_found_in_post.update(tickers_in_comment)
comment_sentiment = get_sentiment_score(comment.body)
for ticker_symbol in tickers_in_comment:
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
database.add_mention(conn, ticker_id, subreddit_id, submission.id, 'comment', int(comment.created_utc), comment_sentiment)
if fetch_financials:
for ticker_symbol in all_tickers_found_in_post:
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
ticker_info = database.get_ticker_info(conn, ticker_id)
if not ticker_info['last_updated'] or (current_time - ticker_info['last_updated'] > database.MARKET_CAP_REFRESH_INTERVAL):
log.info(f" -> Fetching financial data for {ticker_symbol}...")
financials = get_financial_data_via_fetcher(ticker_symbol)
database.update_ticker_financials(
conn, ticker_id,
financials.get('market_cap'),
financials.get('closing_price')
)
all_comment_sentiments = [get_sentiment_score(c.body) for c in all_comments]
avg_sentiment = sum(all_comment_sentiments) / len(all_comment_sentiments) if all_comment_sentiments else 0
post_analysis_data = {
"post_id": submission.id, "title": submission.title,
"post_url": f"https://reddit.com{submission.permalink}", "subreddit_id": subreddit_id,
"post_timestamp": int(submission.created_utc), "comment_count": len(all_comments),
"avg_comment_sentiment": avg_sentiment
}
database.add_or_update_post_analysis(conn, post_analysis_data)
# Call the new helper function for each post
_process_submission(submission, subreddit_id, conn, comment_limit, fetch_financials)
except Exception as e:
log.error(f"Could not scan r/{normalized_sub_name}. Error: {e}", exc_info=True)