Improve market data fetching routines.
This commit is contained in:
@@ -373,4 +373,11 @@ def get_all_scanned_subreddits():
|
||||
conn = get_db_connection()
|
||||
results = conn.execute("SELECT DISTINCT name FROM subreddits ORDER BY name ASC;").fetchall()
|
||||
conn.close()
|
||||
return [row['name'] for row in results]
|
||||
return [row['name'] for row in results]
|
||||
|
||||
def get_all_tickers():
|
||||
"""Retrieves the ID and symbol of every ticker in the database."""
|
||||
conn = get_db_connection()
|
||||
results = conn.execute("SELECT id, symbol FROM tickers;").fetchall()
|
||||
conn.close()
|
||||
return results
|
46
rstat_tool/fetcher.py
Normal file
46
rstat_tool/fetcher.py
Normal file
@@ -0,0 +1,46 @@
|
||||
# rstat_tool/fetcher.py
|
||||
# A dedicated, isolated script for fetching financial data.
|
||||
|
||||
import sys
|
||||
import json
|
||||
import yfinance as yf
|
||||
import pandas as pd
|
||||
import logging
|
||||
|
||||
# Suppress verbose yfinance logging in this isolated process
|
||||
logging.getLogger("yfinance").setLevel(logging.CRITICAL)
|
||||
|
||||
def get_financial_data_isolated(ticker_symbol):
|
||||
"""
|
||||
Fetches market cap and the most recent closing price for a ticker.
|
||||
This is the robust version of the function.
|
||||
"""
|
||||
market_cap = None
|
||||
closing_price = None
|
||||
try:
|
||||
data = yf.download(
|
||||
ticker_symbol, period="2d", progress=False, auto_adjust=False
|
||||
)
|
||||
if not data.empty:
|
||||
last_close_raw = data['Close'].iloc[-1]
|
||||
if pd.notna(last_close_raw):
|
||||
closing_price = float(last_close_raw)
|
||||
try:
|
||||
market_cap = yf.Ticker(ticker_symbol).info.get('marketCap')
|
||||
except Exception:
|
||||
# This is a non-critical failure, we can proceed without market cap
|
||||
pass
|
||||
return {"market_cap": market_cap, "closing_price": closing_price}
|
||||
except Exception:
|
||||
# This is a critical failure, return None for both
|
||||
return {"market_cap": None, "closing_price": None}
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
# This script requires a ticker symbol as an argument
|
||||
sys.exit(1)
|
||||
|
||||
ticker_to_fetch = sys.argv[1]
|
||||
result = get_financial_data_isolated(ticker_to_fetch)
|
||||
# Print the result as a JSON string to standard output
|
||||
print(json.dumps(result))
|
@@ -4,9 +4,9 @@ import argparse
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
|
||||
import sys
|
||||
import subprocess
|
||||
import praw
|
||||
import yfinance as yf
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from . import database
|
||||
@@ -27,14 +27,6 @@ def load_subreddits(filepath):
|
||||
log.error(f"Error loading config file '{filepath}': {e}")
|
||||
return None
|
||||
|
||||
def get_financial_data(ticker_symbol):
|
||||
try:
|
||||
ticker = yf.Ticker(ticker_symbol)
|
||||
data = { "market_cap": ticker.fast_info.get('marketCap'), "closing_price": ticker.fast_info.get('previousClose') }
|
||||
return data
|
||||
except Exception:
|
||||
return {"market_cap": None, "closing_price": None}
|
||||
|
||||
def get_reddit_instance():
|
||||
client_id = os.getenv("REDDIT_CLIENT_ID")
|
||||
client_secret = os.getenv("REDDIT_CLIENT_SECRET")
|
||||
@@ -144,6 +136,7 @@ def main():
|
||||
parser.add_argument("-d", "--days", type=int, default=1, help="Number of past days to scan for new posts.\n(Default: 1 for last 24 hours)")
|
||||
parser.add_argument("-p", "--posts", type=int, default=200, help="Max posts to check per subreddit.\n(Default: 200)")
|
||||
parser.add_argument("-c", "--comments", type=int, default=100, help="Number of comments to scan per post.\n(Default: 100)")
|
||||
parser.add_argument("-u", "--update-financials-only", action="store_true", help="Skip Reddit scan and only update financial data for all existing tickers.")
|
||||
parser.add_argument("--stdout", action="store_true", help="Print all log messages to the console.")
|
||||
|
||||
args = parser.parse_args()
|
||||
@@ -167,16 +160,75 @@ def main():
|
||||
# --- Initialize and Run ---
|
||||
database.initialize_db()
|
||||
|
||||
reddit = get_reddit_instance()
|
||||
if not reddit: return
|
||||
if args.update_financials_only:
|
||||
log.critical("--- Starting Financial Data Update Only Mode (using isolated fetcher) ---")
|
||||
all_tickers = database.get_all_tickers() # No longer need to manage 'conn' here
|
||||
log.info(f"Found {len(all_tickers)} tickers in the database to update.")
|
||||
|
||||
conn = database.get_db_connection()
|
||||
for ticker in all_tickers:
|
||||
symbol = ticker['symbol']
|
||||
log.info(f" -> Fetching financials for {symbol}...")
|
||||
|
||||
try:
|
||||
# --- THIS IS THE NEW LOGIC ---
|
||||
# Construct the command to run our fetcher script in a new process
|
||||
command = [sys.executable, "-m", "rstat_tool.fetcher", symbol]
|
||||
|
||||
# Run the command, capture the output, and set a timeout
|
||||
result = subprocess.run(
|
||||
command,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
check=True, # This will raise an exception if the script returns a non-zero exit code
|
||||
timeout=30 # Timeout after 30 seconds
|
||||
)
|
||||
|
||||
# The output from the script is a JSON string
|
||||
financials = json.loads(result.stdout)
|
||||
|
||||
database.update_ticker_financials(
|
||||
conn, ticker['id'],
|
||||
financials.get('market_cap'),
|
||||
financials.get('closing_price')
|
||||
)
|
||||
# --- END OF NEW LOGIC ---
|
||||
|
||||
except subprocess.CalledProcessError as e:
|
||||
log.error(f"Fetcher script failed for {symbol}: {e.stderr}")
|
||||
except subprocess.TimeoutExpired:
|
||||
log.error(f"Fetcher script timed out for {symbol}.")
|
||||
except json.JSONDecodeError:
|
||||
log.error(f"Could not parse JSON from fetcher script for {symbol}.")
|
||||
except Exception as e:
|
||||
log.error(f"An unexpected error occurred for {symbol}: {e}")
|
||||
|
||||
scan_subreddits(
|
||||
reddit,
|
||||
subreddits_to_scan,
|
||||
post_limit=args.posts,
|
||||
comment_limit=args.comments,
|
||||
days_to_scan=args.days
|
||||
)
|
||||
conn.close()
|
||||
log.critical("--- Financial Data Update Complete ---")
|
||||
else:
|
||||
# This is the normal Reddit scanning logic
|
||||
log.critical("--- Starting Reddit Scan Mode ---")
|
||||
if args.subreddit:
|
||||
subreddits_to_scan = [args.subreddit]
|
||||
log.info(f"Targeted Scan Mode: Focusing on r/{args.subreddit}")
|
||||
else:
|
||||
log.info(f"Config Scan Mode: Loading subreddits from {args.config}")
|
||||
subreddits_to_scan = load_subreddits(args.config)
|
||||
|
||||
if not subreddits_to_scan:
|
||||
log.error("Error: No subreddits to scan.")
|
||||
return
|
||||
|
||||
reddit = get_reddit_instance()
|
||||
if not reddit: return
|
||||
|
||||
scan_subreddits(
|
||||
reddit,
|
||||
subreddits_to_scan,
|
||||
post_limit=args.posts,
|
||||
comment_limit=args.comments,
|
||||
days_to_scan=args.days
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
Reference in New Issue
Block a user