Add functionality to only update top tickers.
This commit is contained in:
@@ -349,3 +349,31 @@ def get_ticker_by_symbol(symbol):
|
||||
result = cursor.fetchone()
|
||||
conn.close()
|
||||
return result
|
||||
|
||||
def get_top_daily_ticker_symbols():
|
||||
"""Gets a simple list of the Top 10 ticker symbols from the last 24 hours."""
|
||||
conn = get_db_connection()
|
||||
one_day_ago = datetime.now(timezone.utc) - timedelta(days=1)
|
||||
one_day_ago_timestamp = int(one_day_ago.timestamp())
|
||||
query = """
|
||||
SELECT t.symbol FROM mentions m JOIN tickers t ON m.ticker_id = t.id
|
||||
WHERE m.mention_timestamp >= ?
|
||||
GROUP BY t.symbol ORDER BY COUNT(m.id) DESC LIMIT 10;
|
||||
"""
|
||||
results = conn.execute(query, (one_day_ago_timestamp,)).fetchall()
|
||||
conn.close()
|
||||
return [row['symbol'] for row in results] # Return a simple list of strings
|
||||
|
||||
def get_top_weekly_ticker_symbols():
|
||||
"""Gets a simple list of the Top 10 ticker symbols from the last 7 days."""
|
||||
conn = get_db_connection()
|
||||
seven_days_ago = datetime.now(timezone.utc) - timedelta(days=7)
|
||||
seven_days_ago_timestamp = int(seven_days_ago.timestamp())
|
||||
query = """
|
||||
SELECT t.symbol FROM mentions m JOIN tickers t ON m.ticker_id = t.id
|
||||
WHERE m.mention_timestamp >= ?
|
||||
GROUP BY t.symbol ORDER BY COUNT(m.id) DESC LIMIT 10;
|
||||
"""
|
||||
results = conn.execute(query, (seven_days_ago_timestamp,)).fetchall()
|
||||
conn.close()
|
||||
return [row['symbol'] for row in results] # Return a simple list of strings
|
@@ -176,6 +176,13 @@ def main():
|
||||
"""Main function to run the Reddit stock analysis tool."""
|
||||
parser = argparse.ArgumentParser(description="Analyze stock ticker mentions on Reddit.", formatter_class=argparse.RawTextHelpFormatter)
|
||||
|
||||
parser.add_argument("-f", "--config", default="subreddits.json", help="Path to the JSON file for scanning. (Default: subreddits.json)")
|
||||
parser.add_argument("-s", "--subreddit", help="Scan a single subreddit, ignoring the config file.")
|
||||
parser.add_argument("-d", "--days", type=int, default=1, help="Number of past days to scan for new posts. (Default: 1)")
|
||||
parser.add_argument("-p", "--posts", type=int, default=200, help="Max posts to check per subreddit. (Default: 200)")
|
||||
parser.add_argument("-c", "--comments", type=int, default=100, help="Number of comments to scan per post. (Default: 100)")
|
||||
parser.add_argument("-n", "--no-financials", action="store_true", help="Disable fetching of financial data during the Reddit scan.")
|
||||
parser.add_argument("--update-top-tickers", action="store_true", help="Update financial data only for tickers currently in the Top 10 daily/weekly dashboards.")
|
||||
parser.add_argument(
|
||||
"-u", "--update-financials-only",
|
||||
nargs='?',
|
||||
@@ -184,12 +191,6 @@ def main():
|
||||
metavar='TICKER',
|
||||
help="Update financials. Provide a ticker symbol to update just one,\nor use the flag alone to update all tickers in the database."
|
||||
)
|
||||
parser.add_argument("-f", "--config", default="subreddits.json", help="Path to the JSON file for scanning. (Default: subreddits.json)")
|
||||
parser.add_argument("-s", "--subreddit", help="Scan a single subreddit, ignoring the config file.")
|
||||
parser.add_argument("-d", "--days", type=int, default=1, help="Number of past days to scan for new posts. (Default: 1)")
|
||||
parser.add_argument("-p", "--posts", type=int, default=200, help="Max posts to check per subreddit. (Default: 200)")
|
||||
parser.add_argument("-c", "--comments", type=int, default=100, help="Number of comments to scan per post. (Default: 100)")
|
||||
parser.add_argument("-n", "--no-financials", action="store_true", help="Disable fetching of financial data during the Reddit scan.")
|
||||
parser.add_argument("--debug", action="store_true", help="Enable detailed debug logging to the console.")
|
||||
parser.add_argument("--stdout", action="store_true", help="Print all log messages to the console.")
|
||||
|
||||
@@ -198,15 +199,38 @@ def main():
|
||||
|
||||
database.initialize_db()
|
||||
|
||||
update_mode = args.update_financials_only
|
||||
if args.update_top_tickers:
|
||||
# --- Mode 1: Update Top Tickers ---
|
||||
log.critical("--- Starting Financial Data Update for Top Tickers ---")
|
||||
top_daily = database.get_top_daily_ticker_symbols()
|
||||
top_weekly = database.get_top_weekly_ticker_symbols()
|
||||
unique_top_tickers = sorted(list(set(top_daily + top_weekly)))
|
||||
|
||||
if update_mode: # This block runs if -u or --update-financials-only was used
|
||||
if not unique_top_tickers:
|
||||
log.info("No top tickers found in the last week. Nothing to update.")
|
||||
else:
|
||||
log.info(f"Found {len(unique_top_tickers)} unique tickers to update: {', '.join(unique_top_tickers)}")
|
||||
conn = database.get_db_connection()
|
||||
for ticker_symbol in unique_top_tickers:
|
||||
ticker_info = database.get_ticker_by_symbol(ticker_symbol)
|
||||
if ticker_info:
|
||||
log.info(f" -> Updating financials for {ticker_info['symbol']}...")
|
||||
financials = get_financial_data_via_fetcher(ticker_info['symbol'])
|
||||
database.update_ticker_financials(
|
||||
conn, ticker_info['id'],
|
||||
financials.get('market_cap'),
|
||||
financials.get('closing_price')
|
||||
)
|
||||
conn.close()
|
||||
log.critical("--- Top Ticker Financial Data Update Complete ---")
|
||||
|
||||
elif args.update_financials_only:
|
||||
# --- Mode 2: Update All or a Single Ticker ---
|
||||
update_mode = args.update_financials_only
|
||||
if update_mode == "ALL_TICKERS":
|
||||
# This is the "update all" case
|
||||
log.critical("--- Starting Financial Data Update for ALL tickers ---")
|
||||
all_tickers = database.get_all_tickers()
|
||||
log.info(f"Found {len(all_tickers)} tickers in the database to update.")
|
||||
|
||||
conn = database.get_db_connection()
|
||||
for ticker in all_tickers:
|
||||
symbol = ticker['symbol']
|
||||
@@ -218,16 +242,10 @@ def main():
|
||||
financials.get('closing_price')
|
||||
)
|
||||
conn.close()
|
||||
log.critical("--- Financial Data Update Complete ---")
|
||||
|
||||
else:
|
||||
# This is the "update single ticker" case
|
||||
ticker_symbol_to_update = update_mode
|
||||
log.critical(f"--- Starting Financial Data Update for single ticker: {ticker_symbol_to_update} ---")
|
||||
|
||||
# Find the ticker in the database
|
||||
ticker_info = database.get_ticker_by_symbol(ticker_symbol_to_update)
|
||||
|
||||
if ticker_info:
|
||||
conn = database.get_db_connection()
|
||||
log.info(f" -> Updating financials for {ticker_info['symbol']}...")
|
||||
@@ -238,10 +256,12 @@ def main():
|
||||
financials.get('closing_price')
|
||||
)
|
||||
conn.close()
|
||||
log.critical("--- Financial Data Update Complete ---")
|
||||
else:
|
||||
log.error(f"Ticker '{ticker_symbol_to_update}' not found in the database. Please run a scan first to discover it.")
|
||||
log.error(f"Ticker '{ticker_symbol_to_update}' not found in the database.")
|
||||
log.critical("--- Financial Data Update Complete ---")
|
||||
|
||||
else:
|
||||
# --- Mode 3: Default Reddit Scan ---
|
||||
log.critical("--- Starting Reddit Scan Mode ---")
|
||||
if args.subreddit:
|
||||
subreddits_to_scan = [args.subreddit]
|
||||
@@ -263,7 +283,7 @@ def main():
|
||||
post_limit=args.posts,
|
||||
comment_limit=args.comments,
|
||||
days_to_scan=args.days,
|
||||
fetch_financials=(not args.no_financials) # Pass the inverse of the flag
|
||||
fetch_financials=(not args.no_financials)
|
||||
)
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
@@ -8,9 +8,10 @@ source /home/pkhamre/git/rstat/.venv/bin/activate
|
||||
|
||||
echo "--- Starting RSTAT Daily Job on $(date +%F) ---"
|
||||
|
||||
# 1. Scrape data from the last 24 hours.
|
||||
# 1. Scrape data from the last 24 hours and update price for top tickers.
|
||||
echo "Step 1: Scraping new data..."
|
||||
rstat -c 250
|
||||
rstat --no-financials --comments 256
|
||||
rstat --update-top-tickers
|
||||
|
||||
# 2. Start the dashboard in the background.
|
||||
echo "Step 2: Starting dashboard in background..."
|
||||
|
Reference in New Issue
Block a user