Rewritten financial data fetching so it finally works again.
This commit is contained in:
38
fetch_close_price.py
Normal file
38
fetch_close_price.py
Normal file
@@ -0,0 +1,38 @@
|
||||
# fetch_close_price.py
|
||||
# This script does ONLY ONE THING: gets the closing price using the stable Ticker.history() method.
|
||||
import sys
|
||||
import json
|
||||
import yfinance as yf
|
||||
import pandas as pd
|
||||
import logging
|
||||
|
||||
# Suppress verbose yfinance logging in this isolated process
|
||||
logging.getLogger("yfinance").setLevel(logging.CRITICAL)
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
# Exit with an error code if no ticker is provided
|
||||
sys.exit(1)
|
||||
|
||||
ticker_symbol = sys.argv[1]
|
||||
|
||||
try:
|
||||
# Instead of the global yf.download(), we use the Ticker object's .history() method.
|
||||
# This uses a different internal code path that we have proven is stable.
|
||||
ticker = yf.Ticker(ticker_symbol)
|
||||
data = ticker.history(period="2d", auto_adjust=False)
|
||||
# --- END OF FIX ---
|
||||
|
||||
closing_price = None
|
||||
if not data.empty:
|
||||
last_close_raw = data['Close'].iloc[-1]
|
||||
if pd.notna(last_close_raw):
|
||||
closing_price = float(last_close_raw)
|
||||
|
||||
# On success, print JSON to stdout and exit cleanly
|
||||
print(json.dumps({"closing_price": closing_price}))
|
||||
sys.exit(0)
|
||||
except Exception:
|
||||
# If any error occurs, print an empty JSON and exit with an error code
|
||||
print(json.dumps({"closing_price": None}))
|
||||
sys.exit(1)
|
28
fetch_market_cap.py
Normal file
28
fetch_market_cap.py
Normal file
@@ -0,0 +1,28 @@
|
||||
# fetch_market_cap.py
|
||||
# This script does ONLY ONE THING: gets the market cap.
|
||||
import sys
|
||||
import json
|
||||
import yfinance as yf
|
||||
import logging
|
||||
|
||||
# Suppress verbose yfinance logging in this isolated process
|
||||
logging.getLogger("yfinance").setLevel(logging.CRITICAL)
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
# Exit with an error code if no ticker is provided
|
||||
sys.exit(1)
|
||||
|
||||
ticker_symbol = sys.argv[1]
|
||||
|
||||
try:
|
||||
# Directly get the market cap
|
||||
market_cap = yf.Ticker(ticker_symbol).info.get('marketCap')
|
||||
|
||||
# On success, print JSON to stdout and exit cleanly
|
||||
print(json.dumps({"market_cap": market_cap}))
|
||||
sys.exit(0)
|
||||
except Exception:
|
||||
# If any error occurs, print an empty JSON and exit with an error code
|
||||
print(json.dumps({"market_cap": None}))
|
||||
sys.exit(1)
|
@@ -1,46 +0,0 @@
|
||||
# rstat_tool/fetcher.py
|
||||
# A dedicated, isolated script for fetching financial data.
|
||||
|
||||
import sys
|
||||
import json
|
||||
import yfinance as yf
|
||||
import pandas as pd
|
||||
import logging
|
||||
|
||||
# Suppress verbose yfinance logging in this isolated process
|
||||
logging.getLogger("yfinance").setLevel(logging.CRITICAL)
|
||||
|
||||
def get_financial_data_isolated(ticker_symbol):
|
||||
"""
|
||||
Fetches market cap and the most recent closing price for a ticker.
|
||||
This is the robust version of the function.
|
||||
"""
|
||||
market_cap = None
|
||||
closing_price = None
|
||||
try:
|
||||
data = yf.download(
|
||||
ticker_symbol, period="2d", progress=False, auto_adjust=False
|
||||
)
|
||||
if not data.empty:
|
||||
last_close_raw = data['Close'].iloc[-1]
|
||||
if pd.notna(last_close_raw):
|
||||
closing_price = float(last_close_raw)
|
||||
try:
|
||||
market_cap = yf.Ticker(ticker_symbol).info.get('marketCap')
|
||||
except Exception:
|
||||
# This is a non-critical failure, we can proceed without market cap
|
||||
pass
|
||||
return {"market_cap": market_cap, "closing_price": closing_price}
|
||||
except Exception:
|
||||
# This is a critical failure, return None for both
|
||||
return {"market_cap": None, "closing_price": None}
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) < 2:
|
||||
# This script requires a ticker symbol as an argument
|
||||
sys.exit(1)
|
||||
|
||||
ticker_to_fetch = sys.argv[1]
|
||||
result = get_financial_data_isolated(ticker_to_fetch)
|
||||
# Print the result as a JSON string to standard output
|
||||
print(json.dumps(result))
|
@@ -3,32 +3,35 @@
|
||||
import logging
|
||||
import sys
|
||||
|
||||
# Get the root logger for our application. Other modules will import this.
|
||||
logger = logging.getLogger("rstat_app")
|
||||
|
||||
def setup_logging(console_verbose=False):
|
||||
def setup_logging(console_verbose=False, debug_mode=False):
|
||||
"""
|
||||
Configures the application's logger and captures logs from yfinance.
|
||||
Configures the application's logger with a new DEBUG level.
|
||||
"""
|
||||
logger.setLevel(logging.INFO)
|
||||
logger.propagate = False
|
||||
# The logger itself must be set to the lowest possible level (DEBUG).
|
||||
log_level = logging.DEBUG if debug_mode else logging.INFO
|
||||
logger.setLevel(log_level)
|
||||
|
||||
logger.propagate = False
|
||||
if logger.hasHandlers():
|
||||
logger.handlers.clear()
|
||||
|
||||
# File Handler (Always Verbose)
|
||||
# File Handler (Always verbose at INFO level or higher)
|
||||
file_handler = logging.FileHandler("rstat.log", mode='a')
|
||||
file_handler.setLevel(logging.INFO)
|
||||
file_handler.setLevel(logging.INFO) # We don't need debug spam in the file usually
|
||||
file_formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
|
||||
file_handler.setFormatter(file_formatter)
|
||||
logger.addHandler(file_handler)
|
||||
|
||||
# Console Handler (Verbosity is Controlled)
|
||||
# Console Handler (Verbosity is controlled)
|
||||
console_handler = logging.StreamHandler(sys.stdout)
|
||||
console_formatter = logging.Formatter('%(message)s')
|
||||
console_handler.setFormatter(console_formatter)
|
||||
|
||||
if console_verbose:
|
||||
if debug_mode:
|
||||
console_handler.setLevel(logging.DEBUG)
|
||||
elif console_verbose:
|
||||
console_handler.setLevel(logging.INFO)
|
||||
else:
|
||||
console_handler.setLevel(logging.CRITICAL)
|
||||
@@ -41,5 +44,5 @@ def setup_logging(console_verbose=False):
|
||||
if yfinance_logger.hasHandlers():
|
||||
yfinance_logger.handlers.clear()
|
||||
yfinance_logger.setLevel(logging.WARNING)
|
||||
yfinance_logger.addHandler(console_handler) # Use the same console handler
|
||||
yfinance_logger.addHandler(file_handler) # Use the same file handler
|
||||
yfinance_logger.addHandler(console_handler)
|
||||
yfinance_logger.addHandler(file_handler)
|
@@ -40,17 +40,31 @@ def get_reddit_instance():
|
||||
|
||||
def get_financial_data_via_fetcher(ticker_symbol):
|
||||
"""
|
||||
Calls the external fetcher.py script in an isolated process to get financial data.
|
||||
Calls two separate, isolated fetcher scripts to get market cap and closing price,
|
||||
bypassing the internal library conflict.
|
||||
"""
|
||||
financials = {"market_cap": None, "closing_price": None}
|
||||
project_root = Path(__file__).parent.parent
|
||||
|
||||
# --- Call 1: Get Market Cap ---
|
||||
try:
|
||||
command = [sys.executable, "-m", "rstat_tool.fetcher", ticker_symbol]
|
||||
result = subprocess.run(
|
||||
command, capture_output=True, text=True, check=True, timeout=30
|
||||
)
|
||||
return json.loads(result.stdout)
|
||||
mc_script_path = project_root / 'fetch_market_cap.py'
|
||||
command_mc = [sys.executable, str(mc_script_path), ticker_symbol]
|
||||
result_mc = subprocess.run(command_mc, capture_output=True, text=True, check=True, timeout=30)
|
||||
financials.update(json.loads(result_mc.stdout))
|
||||
except Exception as e:
|
||||
log.warning(f"Fetcher script failed for {ticker_symbol}: {e}")
|
||||
return {"market_cap": None, "closing_price": None}
|
||||
log.warning(f"Market cap fetcher failed for {ticker_symbol}: {e}")
|
||||
|
||||
# --- Call 2: Get Closing Price ---
|
||||
try:
|
||||
cp_script_path = project_root / 'fetch_close_price.py'
|
||||
command_cp = [sys.executable, str(cp_script_path), ticker_symbol]
|
||||
result_cp = subprocess.run(command_cp, capture_output=True, text=True, check=True, timeout=30)
|
||||
financials.update(json.loads(result_cp.stdout))
|
||||
except Exception as e:
|
||||
log.warning(f"Closing price fetcher failed for {ticker_symbol}: {e}")
|
||||
|
||||
return financials
|
||||
|
||||
def scan_subreddits(reddit, subreddits_list, post_limit=100, comment_limit=100, days_to_scan=1):
|
||||
""" Scans subreddits and uses the fetcher to get financial data. """
|
||||
@@ -98,13 +112,28 @@ def scan_subreddits(reddit, subreddits_list, post_limit=100, comment_limit=100,
|
||||
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
|
||||
database.add_mention(conn, ticker_id, subreddit_id, submission.id, 'comment', int(comment.created_utc), comment_sentiment)
|
||||
|
||||
# --- THIS IS THE CRITICAL LOGIC THAT WAS MISSING ---
|
||||
for ticker_symbol in all_tickers_found_in_post:
|
||||
log.debug(f" DEBUG: Checking ticker '{ticker_symbol}' for financial update.")
|
||||
ticker_id = database.get_or_create_entity(conn, 'tickers', 'symbol', ticker_symbol)
|
||||
ticker_info = database.get_ticker_info(conn, ticker_id)
|
||||
if not ticker_info['last_updated'] or (current_time - ticker_info['last_updated'] > database.MARKET_CAP_REFRESH_INTERVAL):
|
||||
|
||||
# Log the state we are about to check
|
||||
log.debug(f" -> Ticker Info from DB: last_updated = {ticker_info['last_updated']}")
|
||||
|
||||
needs_update = False
|
||||
if not ticker_info['last_updated']:
|
||||
log.debug(" -> Condition MET: 'last_updated' is NULL. Needs update.")
|
||||
needs_update = True
|
||||
elif (current_time - ticker_info['last_updated'] > database.MARKET_CAP_REFRESH_INTERVAL):
|
||||
log.debug(" -> Condition MET: Data is older than 24 hours. Needs update.")
|
||||
needs_update = True
|
||||
else:
|
||||
log.debug(" -> Condition NOT MET: Data is fresh. Skipping update.")
|
||||
|
||||
if needs_update:
|
||||
log.info(f" -> Fetching financial data for {ticker_symbol}...")
|
||||
financials = get_financial_data_via_fetcher(ticker_symbol)
|
||||
log.debug(f" -> Fetched data: {financials}")
|
||||
database.update_ticker_financials(
|
||||
conn, ticker_id,
|
||||
financials.get('market_cap'),
|
||||
@@ -131,16 +160,17 @@ def main():
|
||||
"""Main function to run the Reddit stock analysis tool."""
|
||||
parser = argparse.ArgumentParser(description="Analyze stock ticker mentions on Reddit.", formatter_class=argparse.RawTextHelpFormatter)
|
||||
|
||||
parser.add_argument("--update-financials-only", action="store_true", help="Skip Reddit scan and only update financial data for all existing tickers.")
|
||||
parser.add_argument("--config", default="subreddits.json", help="Path to the JSON file for scanning.")
|
||||
parser.add_argument("--subreddit", help="Scan a single subreddit, ignoring the config file.")
|
||||
parser.add_argument("--days", type=int, default=1, help="Number of past days to scan for new posts.")
|
||||
parser.add_argument("-u", "--update-financials-only", action="store_true", help="Skip Reddit scan and only update financial data for all existing tickers.")
|
||||
parser.add_argument("-f", "--config", default="subreddits.json", help="Path to the JSON file for scanning.")
|
||||
parser.add_argument("-s", "--subreddit", help="Scan a single subreddit, ignoring the config file.")
|
||||
parser.add_argument("-d", "--days", type=int, default=1, help="Number of past days to scan for new posts.")
|
||||
parser.add_argument("-p", "--posts", type=int, default=200, help="Max posts to check per subreddit.")
|
||||
parser.add_argument("-c", "--comments", type=int, default=100, help="Number of comments to scan per post.")
|
||||
parser.add_argument("--debug", action="store_true", help="Enable detailed debug logging to the console.")
|
||||
parser.add_argument("--stdout", action="store_true", help="Print all log messages to the console.")
|
||||
|
||||
args = parser.parse_args()
|
||||
setup_logging(console_verbose=args.stdout)
|
||||
setup_logging(console_verbose=args.stdout, debug_mode=args.debug)
|
||||
|
||||
database.initialize_db()
|
||||
|
||||
|
Reference in New Issue
Block a user