Rewritten financial data fetching so it finally works again.
This commit is contained in:
@@ -40,17 +40,31 @@ def get_reddit_instance():
|
||||
|
||||
def get_financial_data_via_fetcher(ticker_symbol):
|
||||
"""
|
||||
Calls the external fetcher.py script in an isolated process to get financial data.
|
||||
Calls two separate, isolated fetcher scripts to get market cap and closing price,
|
||||
bypassing the internal library conflict.
|
||||
"""
|
||||
financials = {"market_cap": None, "closing_price": None}
|
||||
project_root = Path(__file__).parent.parent
|
||||
|
||||
# --- Call 1: Get Market Cap ---
|
||||
try:
|
||||
command = [sys.executable, "-m", "rstat_tool.fetcher", ticker_symbol]
|
||||
result = subprocess.run(
|
||||
command, capture_output=True, text=True, check=True, timeout=30
|
||||
)
|
||||
return json.loads(result.stdout)
|
||||
mc_script_path = project_root / 'fetch_market_cap.py'
|
||||
command_mc = [sys.executable, str(mc_script_path), ticker_symbol]
|
||||
result_mc = subprocess.run(command_mc, capture_output=True, text=True, check=True, timeout=30)
|
||||
financials.update(json.loads(result_mc.stdout))
|
||||
except Exception as e:
|
||||
log.warning(f"Fetcher script failed for {ticker_symbol}: {e}")
|
||||
return {"market_cap": None, "closing_price": None}
|
||||
log.warning(f"Market cap fetcher failed for {ticker_symbol}: {e}")
|
||||
|
||||
# --- Call 2: Get Closing Price ---
|
||||
try:
|
||||
cp_script_path = project_root / 'fetch_close_price.py'
|
||||
command_cp = [sys.executable, str(cp_script_path), ticker_symbol]
|
||||
result_cp = subprocess.run(command_cp, capture_output=True, text=True, check=True, timeout=30)
|
||||
financials.update(json.loads(result_cp.stdout))
|
||||
except Exception as e:
|
||||
log.warning(f"Closing price fetcher failed for {ticker_symbol}: {e}")
|
||||
|
||||
return financials
|
||||
|
||||
def scan_subreddits(reddit, subreddits_list, post_limit=100, comment_limit=100, days_to_scan=1):
|
||||
""" Scans subreddits and uses the fetcher to get financial data. """
|
||||
@@ -98,13 +112,28 @@ def scan_subreddits(reddit, subreddits_list, post_limit=100, comment_limit=100,
|
||||
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
|
||||
database.add_mention(conn, ticker_id, subreddit_id, submission.id, 'comment', int(comment.created_utc), comment_sentiment)
|
||||
|
||||
# --- THIS IS THE CRITICAL LOGIC THAT WAS MISSING ---
|
||||
for ticker_symbol in all_tickers_found_in_post:
|
||||
log.debug(f" DEBUG: Checking ticker '{ticker_symbol}' for financial update.")
|
||||
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
|
||||
ticker_info = database.get_ticker_info(conn, ticker_id)
|
||||
if not ticker_info['last_updated'] or (current_time - ticker_info['last_updated'] > database.MARKET_CAP_REFRESH_INTERVAL):
|
||||
|
||||
# Log the state we are about to check
|
||||
log.debug(f" -> Ticker Info from DB: last_updated = {ticker_info['last_updated']}")
|
||||
|
||||
needs_update = False
|
||||
if not ticker_info['last_updated']:
|
||||
log.debug(" -> Condition MET: 'last_updated' is NULL. Needs update.")
|
||||
needs_update = True
|
||||
elif (current_time - ticker_info['last_updated'] > database.MARKET_CAP_REFRESH_INTERVAL):
|
||||
log.debug(" -> Condition MET: Data is older than 24 hours. Needs update.")
|
||||
needs_update = True
|
||||
else:
|
||||
log.debug(" -> Condition NOT MET: Data is fresh. Skipping update.")
|
||||
|
||||
if needs_update:
|
||||
log.info(f" -> Fetching financial data for {ticker_symbol}...")
|
||||
financials = get_financial_data_via_fetcher(ticker_symbol)
|
||||
log.debug(f" -> Fetched data: {financials}")
|
||||
database.update_ticker_financials(
|
||||
conn, ticker_id,
|
||||
financials.get('market_cap'),
|
||||
@@ -131,16 +160,17 @@ def main():
|
||||
"""Main function to run the Reddit stock analysis tool."""
|
||||
parser = argparse.ArgumentParser(description="Analyze stock ticker mentions on Reddit.", formatter_class=argparse.RawTextHelpFormatter)
|
||||
|
||||
parser.add_argument("--update-financials-only", action="store_true", help="Skip Reddit scan and only update financial data for all existing tickers.")
|
||||
parser.add_argument("--config", default="subreddits.json", help="Path to the JSON file for scanning.")
|
||||
parser.add_argument("--subreddit", help="Scan a single subreddit, ignoring the config file.")
|
||||
parser.add_argument("--days", type=int, default=1, help="Number of past days to scan for new posts.")
|
||||
parser.add_argument("-u", "--update-financials-only", action="store_true", help="Skip Reddit scan and only update financial data for all existing tickers.")
|
||||
parser.add_argument("-f", "--config", default="subreddits.json", help="Path to the JSON file for scanning.")
|
||||
parser.add_argument("-s", "--subreddit", help="Scan a single subreddit, ignoring the config file.")
|
||||
parser.add_argument("-d", "--days", type=int, default=1, help="Number of past days to scan for new posts.")
|
||||
parser.add_argument("-p", "--posts", type=int, default=200, help="Max posts to check per subreddit.")
|
||||
parser.add_argument("-c", "--comments", type=int, default=100, help="Number of comments to scan per post.")
|
||||
parser.add_argument("--debug", action="store_true", help="Enable detailed debug logging to the console.")
|
||||
parser.add_argument("--stdout", action="store_true", help="Print all log messages to the console.")
|
||||
|
||||
args = parser.parse_args()
|
||||
setup_logging(console_verbose=args.stdout)
|
||||
setup_logging(console_verbose=args.stdout, debug_mode=args.debug)
|
||||
|
||||
database.initialize_db()
|
||||
|
||||
|
Reference in New Issue
Block a user