Improve market data fetching routines.

This commit is contained in:
2025-07-23 12:11:49 +02:00
parent fa7eddf02f
commit bd27db49e7
4 changed files with 175 additions and 20 deletions

View File

@@ -374,3 +374,10 @@ def get_all_scanned_subreddits():
results = conn.execute("SELECT DISTINCT name FROM subreddits ORDER BY name ASC;").fetchall() results = conn.execute("SELECT DISTINCT name FROM subreddits ORDER BY name ASC;").fetchall()
conn.close() conn.close()
return [row['name'] for row in results] return [row['name'] for row in results]
def get_all_tickers():
"""Retrieves the ID and symbol of every ticker in the database."""
conn = get_db_connection()
results = conn.execute("SELECT id, symbol FROM tickers;").fetchall()
conn.close()
return results

46
rstat_tool/fetcher.py Normal file
View File

@@ -0,0 +1,46 @@
# rstat_tool/fetcher.py
# A dedicated, isolated script for fetching financial data.
import sys
import json
import yfinance as yf
import pandas as pd
import logging
# Suppress verbose yfinance logging in this isolated process
logging.getLogger("yfinance").setLevel(logging.CRITICAL)
def get_financial_data_isolated(ticker_symbol):
"""
Fetches market cap and the most recent closing price for a ticker.
This is the robust version of the function.
"""
market_cap = None
closing_price = None
try:
data = yf.download(
ticker_symbol, period="2d", progress=False, auto_adjust=False
)
if not data.empty:
last_close_raw = data['Close'].iloc[-1]
if pd.notna(last_close_raw):
closing_price = float(last_close_raw)
try:
market_cap = yf.Ticker(ticker_symbol).info.get('marketCap')
except Exception:
# This is a non-critical failure, we can proceed without market cap
pass
return {"market_cap": market_cap, "closing_price": closing_price}
except Exception:
# This is a critical failure, return None for both
return {"market_cap": None, "closing_price": None}
if __name__ == "__main__":
if len(sys.argv) < 2:
# This script requires a ticker symbol as an argument
sys.exit(1)
ticker_to_fetch = sys.argv[1]
result = get_financial_data_isolated(ticker_to_fetch)
# Print the result as a JSON string to standard output
print(json.dumps(result))

View File

@@ -4,9 +4,9 @@ import argparse
import json import json
import os import os
import time import time
import sys
import subprocess
import praw import praw
import yfinance as yf
from dotenv import load_dotenv from dotenv import load_dotenv
from . import database from . import database
@@ -27,14 +27,6 @@ def load_subreddits(filepath):
log.error(f"Error loading config file '{filepath}': {e}") log.error(f"Error loading config file '{filepath}': {e}")
return None return None
def get_financial_data(ticker_symbol):
try:
ticker = yf.Ticker(ticker_symbol)
data = { "market_cap": ticker.fast_info.get('marketCap'), "closing_price": ticker.fast_info.get('previousClose') }
return data
except Exception:
return {"market_cap": None, "closing_price": None}
def get_reddit_instance(): def get_reddit_instance():
client_id = os.getenv("REDDIT_CLIENT_ID") client_id = os.getenv("REDDIT_CLIENT_ID")
client_secret = os.getenv("REDDIT_CLIENT_SECRET") client_secret = os.getenv("REDDIT_CLIENT_SECRET")
@@ -144,6 +136,7 @@ def main():
parser.add_argument("-d", "--days", type=int, default=1, help="Number of past days to scan for new posts.\n(Default: 1 for last 24 hours)") parser.add_argument("-d", "--days", type=int, default=1, help="Number of past days to scan for new posts.\n(Default: 1 for last 24 hours)")
parser.add_argument("-p", "--posts", type=int, default=200, help="Max posts to check per subreddit.\n(Default: 200)") parser.add_argument("-p", "--posts", type=int, default=200, help="Max posts to check per subreddit.\n(Default: 200)")
parser.add_argument("-c", "--comments", type=int, default=100, help="Number of comments to scan per post.\n(Default: 100)") parser.add_argument("-c", "--comments", type=int, default=100, help="Number of comments to scan per post.\n(Default: 100)")
parser.add_argument("-u", "--update-financials-only", action="store_true", help="Skip Reddit scan and only update financial data for all existing tickers.")
parser.add_argument("--stdout", action="store_true", help="Print all log messages to the console.") parser.add_argument("--stdout", action="store_true", help="Print all log messages to the console.")
args = parser.parse_args() args = parser.parse_args()
@@ -167,16 +160,75 @@ def main():
# --- Initialize and Run --- # --- Initialize and Run ---
database.initialize_db() database.initialize_db()
reddit = get_reddit_instance() if args.update_financials_only:
if not reddit: return log.critical("--- Starting Financial Data Update Only Mode (using isolated fetcher) ---")
all_tickers = database.get_all_tickers() # No longer need to manage 'conn' here
log.info(f"Found {len(all_tickers)} tickers in the database to update.")
scan_subreddits( conn = database.get_db_connection()
reddit, for ticker in all_tickers:
subreddits_to_scan, symbol = ticker['symbol']
post_limit=args.posts, log.info(f" -> Fetching financials for {symbol}...")
comment_limit=args.comments,
days_to_scan=args.days try:
) # --- THIS IS THE NEW LOGIC ---
# Construct the command to run our fetcher script in a new process
command = [sys.executable, "-m", "rstat_tool.fetcher", symbol]
# Run the command, capture the output, and set a timeout
result = subprocess.run(
command,
capture_output=True,
text=True,
check=True, # This will raise an exception if the script returns a non-zero exit code
timeout=30 # Timeout after 30 seconds
)
# The output from the script is a JSON string
financials = json.loads(result.stdout)
database.update_ticker_financials(
conn, ticker['id'],
financials.get('market_cap'),
financials.get('closing_price')
)
# --- END OF NEW LOGIC ---
except subprocess.CalledProcessError as e:
log.error(f"Fetcher script failed for {symbol}: {e.stderr}")
except subprocess.TimeoutExpired:
log.error(f"Fetcher script timed out for {symbol}.")
except json.JSONDecodeError:
log.error(f"Could not parse JSON from fetcher script for {symbol}.")
except Exception as e:
log.error(f"An unexpected error occurred for {symbol}: {e}")
conn.close()
log.critical("--- Financial Data Update Complete ---")
else:
# This is the normal Reddit scanning logic
log.critical("--- Starting Reddit Scan Mode ---")
if args.subreddit:
subreddits_to_scan = [args.subreddit]
log.info(f"Targeted Scan Mode: Focusing on r/{args.subreddit}")
else:
log.info(f"Config Scan Mode: Loading subreddits from {args.config}")
subreddits_to_scan = load_subreddits(args.config)
if not subreddits_to_scan:
log.error("Error: No subreddits to scan.")
return
reddit = get_reddit_instance()
if not reddit: return
scan_subreddits(
reddit,
subreddits_to_scan,
post_limit=args.posts,
comment_limit=args.comments,
days_to_scan=args.days
)
if __name__ == "__main__": if __name__ == "__main__":
main() main()

50
yfinance_test.py Normal file
View File

@@ -0,0 +1,50 @@
# yfinance_test.py
# A standalone script to diagnose the persistent yfinance issue.
import yfinance as yf
import logging
# Set up a simple logger to see detailed error tracebacks
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
# A list of tickers to test. One very common one, and two from your logs.
TICKERS_TO_TEST = ["MSFT", "AEBI", "AEHR"]
print("--- Starting YFINANCE Diagnostic Test ---")
for ticker_symbol in TICKERS_TO_TEST:
print(f"\n--- Testing Ticker: {ticker_symbol} ---")
# --- Test 1: The Ticker().info method ---
try:
logging.info(f"Attempting to create Ticker object and get .info for {ticker_symbol}...")
ticker_obj = yf.Ticker(ticker_symbol)
market_cap = ticker_obj.info.get('marketCap')
if market_cap is not None:
logging.info(f"SUCCESS: Got market cap for {ticker_symbol}: {market_cap}")
else:
logging.warning(f"PARTIAL SUCCESS: .info call for {ticker_symbol} worked, but no market cap was found.")
except Exception:
logging.error(f"FAILURE: An error occurred during the Ticker().info call for {ticker_symbol}.", exc_info=True)
# --- Test 2: The yf.download() method ---
try:
logging.info(f"Attempting yf.download() for {ticker_symbol}...")
data = yf.download(
ticker_symbol,
period="2d",
progress=False,
auto_adjust=False
)
if not data.empty:
logging.info(f"SUCCESS: yf.download() for {ticker_symbol} returned {len(data)} rows of data.")
else:
logging.warning(f"PARTIAL SUCCESS: yf.download() for {ticker_symbol} worked, but returned no data (likely delisted).")
except Exception:
logging.error(f"FAILURE: An error occurred during the yf.download() call for {ticker_symbol}.", exc_info=True)
print("\n--- YFINANCE Diagnostic Test Complete ---")