diff --git a/rstat_tool/cleanup.py b/rstat_tool/cleanup.py index 3529741..ddd3832 100644 --- a/rstat_tool/cleanup.py +++ b/rstat_tool/cleanup.py @@ -2,13 +2,11 @@ import argparse from . import database -from .logger_setup import get_logger +from .logger_setup import setup_logging, logger as log # We can't reuse load_subreddits from main anymore if it's not in the same file # So we will duplicate it here. It's small and keeps this script self-contained. import json -log = get_logger() - def load_subreddits(filepath): """Loads a list of subreddits from a JSON file.""" try: @@ -40,11 +38,16 @@ def run_cleanup(): ) parser.add_argument("--all", action="store_true", help="Run all available cleanup tasks.") + parser.add_argument("--stdout", action="store_true", help="Print all log messages to the console.") args = parser.parse_args() + + setup_logging(console_verbose=args.stdout) run_any_task = False + log.critical("\n--- Starting Cleanup ---") + # --- UPDATED LOGIC TO HANDLE THE NEW ARGUMENT --- if args.all or args.tickers: run_any_task = True @@ -65,7 +68,7 @@ def run_cleanup(): log.error("\nError: Please provide at least one cleanup option (e.g., --tickers, --subreddits, --all).") return - log.info("\nCleanup finished.") + log.critical("\nCleanup finished.") if __name__ == "__main__": run_cleanup() \ No newline at end of file diff --git a/rstat_tool/dashboard.py b/rstat_tool/dashboard.py index d001ef1..fa7a825 100644 --- a/rstat_tool/dashboard.py +++ b/rstat_tool/dashboard.py @@ -2,7 +2,7 @@ from flask import Flask, render_template, request from datetime import datetime, timedelta, timezone -from .logger_setup import get_logger +from .logger_setup import logger as log from .database import ( get_overall_summary, get_subreddit_summary, @@ -13,7 +13,6 @@ from .database import ( get_overall_image_view_summary ) -log = get_logger() app = Flask(__name__, template_folder='../templates') @app.template_filter('format_mc') diff --git a/rstat_tool/database.py b/rstat_tool/database.py index 23dd3bd..ef76600 100644 --- a/rstat_tool/database.py +++ b/rstat_tool/database.py @@ -3,11 +3,10 @@ import sqlite3 import time from .ticker_extractor import COMMON_WORDS_BLACKLIST -from .logger_setup import get_logger +from .logger_setup import logger as log from datetime import datetime, timedelta, timezone DB_FILE = "reddit_stocks.db" -log = get_logger() def get_db_connection(): """Establishes a connection to the SQLite database.""" @@ -251,52 +250,6 @@ def get_week_start_end(for_date): return start_of_week, end_of_week -def generate_summary_report(limit=20): - """Queries the DB to generate a summary for the command-line tool.""" - log.info(f"\n--- Top {limit} Tickers by Mention Count ---") - conn = get_db_connection() - cursor = conn.cursor() - - # --- UPDATED QUERY: Changed m.sentiment_score to m.mention_sentiment --- - query = """ - SELECT - t.symbol, t.market_cap, t.closing_price, - COUNT(m.id) as mention_count, - SUM(CASE WHEN m.mention_sentiment > 0.1 THEN 1 ELSE 0 END) as bullish_mentions, - SUM(CASE WHEN m.mention_sentiment < -0.1 THEN 1 ELSE 0 END) as bearish_mentions, - SUM(CASE WHEN m.mention_sentiment BETWEEN -0.1 AND 0.1 THEN 1 ELSE 0 END) as neutral_mentions - FROM mentions m JOIN tickers t ON m.ticker_id = t.id - GROUP BY t.symbol, t.market_cap, t.closing_price - ORDER BY mention_count DESC - LIMIT ?; - """ - results = cursor.execute(query, (limit,)).fetchall() - - header = f"{'Ticker':<8} | {'Mentions':<8} | {'Bullish':<8} | {'Bearish':<8} | {'Neutral':<8} | {'Market Cap':<15} | {'Close Price':<12}" - print(header) - print("-" * (len(header) + 2)) # Adjusted separator length - - for row in results: - market_cap_str = "N/A" - if row['market_cap'] and row['market_cap'] > 0: - mc = row['market_cap'] - if mc >= 1e12: market_cap_str = f"${mc/1e12:.2f}T" - elif mc >= 1e9: market_cap_str = f"${mc/1e9:.2f}B" - else: market_cap_str = f"${mc/1e6:.2f}M" - - closing_price_str = f"${row['closing_price']:.2f}" if row['closing_price'] else "N/A" - - print( - f"{row['symbol']:<8} | " - f"{row['mention_count']:<8} | " - f"{row['bullish_mentions']:<8} | " - f"{row['bearish_mentions']:<8} | " - f"{row['neutral_mentions']:<8} | " - f"{market_cap_str:<15} | " - f"{closing_price_str:<12}" - ) - conn.close() - def add_or_update_post_analysis(conn, post_data): """ Inserts a new post analysis record or updates an existing one. diff --git a/rstat_tool/logger_setup.py b/rstat_tool/logger_setup.py index 9caaffa..040d369 100644 --- a/rstat_tool/logger_setup.py +++ b/rstat_tool/logger_setup.py @@ -3,45 +3,43 @@ import logging import sys -# Get the root logger +# Get the root logger for our application. Other modules will import this. logger = logging.getLogger("rstat_app") -logger.setLevel(logging.INFO) # Set the minimum level of messages to handle -# Prevent the logger from propagating messages to the parent (root) logger -logger.propagate = False +def setup_logging(console_verbose=False): + """ + Configures the application's logger and captures logs from yfinance. + """ + logger.setLevel(logging.INFO) + logger.propagate = False -# Only add handlers if they haven't been added before -# This prevents duplicate log messages if this function is called multiple times. -if not logger.handlers: - # --- Console Handler --- - # This handler prints logs to the standard output (your terminal) - console_handler = logging.StreamHandler(sys.stdout) - console_handler.setLevel(logging.INFO) - # A simple formatter for the console - console_formatter = logging.Formatter('%(message)s') - console_handler.setFormatter(console_formatter) - logger.addHandler(console_handler) + if logger.hasHandlers(): + logger.handlers.clear() - # --- File Handler --- - # This handler writes logs to a file - # 'a' stands for append mode + # File Handler (Always Verbose) file_handler = logging.FileHandler("rstat.log", mode='a') file_handler.setLevel(logging.INFO) - # A more detailed formatter for the file, including timestamp and log level file_formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') file_handler.setFormatter(file_formatter) logger.addHandler(file_handler) -# Get the logger used by the yfinance library -yfinance_logger = logging.getLogger("yfinance") -# Set its level to capture warnings and errors -yfinance_logger.setLevel(logging.WARNING) -# Add our existing handlers to it. This tells yfinance's logger -# to send its messages to our console and our log file. -if not yfinance_logger.handlers: - yfinance_logger.addHandler(console_handler) - yfinance_logger.addHandler(file_handler) + # Console Handler (Verbosity is Controlled) + console_handler = logging.StreamHandler(sys.stdout) + console_formatter = logging.Formatter('%(message)s') + console_handler.setFormatter(console_formatter) + + if console_verbose: + console_handler.setLevel(logging.INFO) + else: + console_handler.setLevel(logging.CRITICAL) + + logger.addHandler(console_handler) -def get_logger(): - """A simple function to get our configured logger.""" - return logger \ No newline at end of file + # YFINANCE LOGGER CAPTURE + yfinance_logger = logging.getLogger("yfinance") + yfinance_logger.propagate = False + if yfinance_logger.hasHandlers(): + yfinance_logger.handlers.clear() + yfinance_logger.setLevel(logging.WARNING) + yfinance_logger.addHandler(console_handler) # Use the same console handler + yfinance_logger.addHandler(file_handler) # Use the same file handler \ No newline at end of file diff --git a/rstat_tool/main.py b/rstat_tool/main.py index c044d46..2c31a22 100644 --- a/rstat_tool/main.py +++ b/rstat_tool/main.py @@ -12,13 +12,12 @@ from dotenv import load_dotenv from . import database from .ticker_extractor import extract_tickers from .sentiment_analyzer import get_sentiment_score -from .logger_setup import get_logger +from .logger_setup import setup_logging, logger as log load_dotenv() MARKET_CAP_REFRESH_INTERVAL = 86400 POST_AGE_LIMIT = 86400 -log = get_logger() def load_subreddits(filepath): try: @@ -133,7 +132,7 @@ def scan_subreddits(reddit, subreddits_list, post_limit=100, comment_limit=100, log.error(f"Could not scan r/{subreddit_name}. Error: {e}") conn.close() - log.info("\n--- Scan Complete ---") + log.critical("\n--- Scan Complete ---") def main(): @@ -145,16 +144,19 @@ def main(): parser.add_argument("-d", "--days", type=int, default=1, help="Number of past days to scan for new posts.\n(Default: 1 for last 24 hours)") parser.add_argument("-p", "--posts", type=int, default=200, help="Max posts to check per subreddit.\n(Default: 200)") parser.add_argument("-c", "--comments", type=int, default=100, help="Number of comments to scan per post.\n(Default: 100)") - parser.add_argument("-l", "--limit", type=int, default=20, help="Number of tickers to show in the CLI report.\n(Default: 20)") + parser.add_argument("--stdout", action="store_true", help="Print all log messages to the console.") + args = parser.parse_args() + + setup_logging(console_verbose=args.stdout) if args.subreddit: # If --subreddit is used, create a list with just that one. subreddits_to_scan = [args.subreddit] - log.info(f"Targeted Scan Mode: Focusing on r/{args.subreddit}") + log.critical(f"Targeted Scan Mode: Focusing on r/{args.subreddit}") else: # Otherwise, load from the config file. - log.info(f"Config Scan Mode: Loading subreddits from {args.config}") + log.critical(f"Config Scan Mode: Loading subreddits from {args.config}") # Use the correct argument name: args.config subreddits_to_scan = load_subreddits(args.config) @@ -175,7 +177,6 @@ def main(): comment_limit=args.comments, days_to_scan=args.days ) - database.generate_summary_report(limit=args.limit) if __name__ == "__main__": main() \ No newline at end of file