Improve market data fetching routines.

This commit is contained in:
2025-07-23 12:11:49 +02:00
parent fa7eddf02f
commit bd27db49e7
4 changed files with 175 additions and 20 deletions

View File

@@ -4,9 +4,9 @@ import argparse
import json
import os
import time
import sys
import subprocess
import praw
import yfinance as yf
from dotenv import load_dotenv
from . import database
@@ -27,14 +27,6 @@ def load_subreddits(filepath):
log.error(f"Error loading config file '{filepath}': {e}")
return None
def get_financial_data(ticker_symbol):
try:
ticker = yf.Ticker(ticker_symbol)
data = { "market_cap": ticker.fast_info.get('marketCap'), "closing_price": ticker.fast_info.get('previousClose') }
return data
except Exception:
return {"market_cap": None, "closing_price": None}
def get_reddit_instance():
client_id = os.getenv("REDDIT_CLIENT_ID")
client_secret = os.getenv("REDDIT_CLIENT_SECRET")
@@ -144,6 +136,7 @@ def main():
parser.add_argument("-d", "--days", type=int, default=1, help="Number of past days to scan for new posts.\n(Default: 1 for last 24 hours)")
parser.add_argument("-p", "--posts", type=int, default=200, help="Max posts to check per subreddit.\n(Default: 200)")
parser.add_argument("-c", "--comments", type=int, default=100, help="Number of comments to scan per post.\n(Default: 100)")
parser.add_argument("-u", "--update-financials-only", action="store_true", help="Skip Reddit scan and only update financial data for all existing tickers.")
parser.add_argument("--stdout", action="store_true", help="Print all log messages to the console.")
args = parser.parse_args()
@@ -167,16 +160,75 @@ def main():
# --- Initialize and Run ---
database.initialize_db()
reddit = get_reddit_instance()
if not reddit: return
if args.update_financials_only:
log.critical("--- Starting Financial Data Update Only Mode (using isolated fetcher) ---")
all_tickers = database.get_all_tickers() # No longer need to manage 'conn' here
log.info(f"Found {len(all_tickers)} tickers in the database to update.")
conn = database.get_db_connection()
for ticker in all_tickers:
symbol = ticker['symbol']
log.info(f" -> Fetching financials for {symbol}...")
try:
# --- THIS IS THE NEW LOGIC ---
# Construct the command to run our fetcher script in a new process
command = [sys.executable, "-m", "rstat_tool.fetcher", symbol]
# Run the command, capture the output, and set a timeout
result = subprocess.run(
command,
capture_output=True,
text=True,
check=True, # This will raise an exception if the script returns a non-zero exit code
timeout=30 # Timeout after 30 seconds
)
# The output from the script is a JSON string
financials = json.loads(result.stdout)
database.update_ticker_financials(
conn, ticker['id'],
financials.get('market_cap'),
financials.get('closing_price')
)
# --- END OF NEW LOGIC ---
except subprocess.CalledProcessError as e:
log.error(f"Fetcher script failed for {symbol}: {e.stderr}")
except subprocess.TimeoutExpired:
log.error(f"Fetcher script timed out for {symbol}.")
except json.JSONDecodeError:
log.error(f"Could not parse JSON from fetcher script for {symbol}.")
except Exception as e:
log.error(f"An unexpected error occurred for {symbol}: {e}")
scan_subreddits(
reddit,
subreddits_to_scan,
post_limit=args.posts,
comment_limit=args.comments,
days_to_scan=args.days
)
conn.close()
log.critical("--- Financial Data Update Complete ---")
else:
# This is the normal Reddit scanning logic
log.critical("--- Starting Reddit Scan Mode ---")
if args.subreddit:
subreddits_to_scan = [args.subreddit]
log.info(f"Targeted Scan Mode: Focusing on r/{args.subreddit}")
else:
log.info(f"Config Scan Mode: Loading subreddits from {args.config}")
subreddits_to_scan = load_subreddits(args.config)
if not subreddits_to_scan:
log.error("Error: No subreddits to scan.")
return
reddit = get_reddit_instance()
if not reddit: return
scan_subreddits(
reddit,
subreddits_to_scan,
post_limit=args.posts,
comment_limit=args.comments,
days_to_scan=args.days
)
if __name__ == "__main__":
main()