Improved logging and added option for --stdout.

This commit is contained in:
2025-07-22 21:14:11 +02:00
parent 2688a7df44
commit ab44bc0e96
5 changed files with 46 additions and 92 deletions

View File

@@ -2,13 +2,11 @@
import argparse import argparse
from . import database from . import database
from .logger_setup import get_logger from .logger_setup import setup_logging, logger as log
# We can't reuse load_subreddits from main anymore if it's not in the same file # We can't reuse load_subreddits from main anymore if it's not in the same file
# So we will duplicate it here. It's small and keeps this script self-contained. # So we will duplicate it here. It's small and keeps this script self-contained.
import json import json
log = get_logger()
def load_subreddits(filepath): def load_subreddits(filepath):
"""Loads a list of subreddits from a JSON file.""" """Loads a list of subreddits from a JSON file."""
try: try:
@@ -40,11 +38,16 @@ def run_cleanup():
) )
parser.add_argument("--all", action="store_true", help="Run all available cleanup tasks.") parser.add_argument("--all", action="store_true", help="Run all available cleanup tasks.")
parser.add_argument("--stdout", action="store_true", help="Print all log messages to the console.")
args = parser.parse_args() args = parser.parse_args()
setup_logging(console_verbose=args.stdout)
run_any_task = False run_any_task = False
log.critical("\n--- Starting Cleanup ---")
# --- UPDATED LOGIC TO HANDLE THE NEW ARGUMENT --- # --- UPDATED LOGIC TO HANDLE THE NEW ARGUMENT ---
if args.all or args.tickers: if args.all or args.tickers:
run_any_task = True run_any_task = True
@@ -65,7 +68,7 @@ def run_cleanup():
log.error("\nError: Please provide at least one cleanup option (e.g., --tickers, --subreddits, --all).") log.error("\nError: Please provide at least one cleanup option (e.g., --tickers, --subreddits, --all).")
return return
log.info("\nCleanup finished.") log.critical("\nCleanup finished.")
if __name__ == "__main__": if __name__ == "__main__":
run_cleanup() run_cleanup()

View File

@@ -2,7 +2,7 @@
from flask import Flask, render_template, request from flask import Flask, render_template, request
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
from .logger_setup import get_logger from .logger_setup import logger as log
from .database import ( from .database import (
get_overall_summary, get_overall_summary,
get_subreddit_summary, get_subreddit_summary,
@@ -13,7 +13,6 @@ from .database import (
get_overall_image_view_summary get_overall_image_view_summary
) )
log = get_logger()
app = Flask(__name__, template_folder='../templates') app = Flask(__name__, template_folder='../templates')
@app.template_filter('format_mc') @app.template_filter('format_mc')

View File

@@ -3,11 +3,10 @@
import sqlite3 import sqlite3
import time import time
from .ticker_extractor import COMMON_WORDS_BLACKLIST from .ticker_extractor import COMMON_WORDS_BLACKLIST
from .logger_setup import get_logger from .logger_setup import logger as log
from datetime import datetime, timedelta, timezone from datetime import datetime, timedelta, timezone
DB_FILE = "reddit_stocks.db" DB_FILE = "reddit_stocks.db"
log = get_logger()
def get_db_connection(): def get_db_connection():
"""Establishes a connection to the SQLite database.""" """Establishes a connection to the SQLite database."""
@@ -251,52 +250,6 @@ def get_week_start_end(for_date):
return start_of_week, end_of_week return start_of_week, end_of_week
def generate_summary_report(limit=20):
"""Queries the DB to generate a summary for the command-line tool."""
log.info(f"\n--- Top {limit} Tickers by Mention Count ---")
conn = get_db_connection()
cursor = conn.cursor()
# --- UPDATED QUERY: Changed m.sentiment_score to m.mention_sentiment ---
query = """
SELECT
t.symbol, t.market_cap, t.closing_price,
COUNT(m.id) as mention_count,
SUM(CASE WHEN m.mention_sentiment > 0.1 THEN 1 ELSE 0 END) as bullish_mentions,
SUM(CASE WHEN m.mention_sentiment < -0.1 THEN 1 ELSE 0 END) as bearish_mentions,
SUM(CASE WHEN m.mention_sentiment BETWEEN -0.1 AND 0.1 THEN 1 ELSE 0 END) as neutral_mentions
FROM mentions m JOIN tickers t ON m.ticker_id = t.id
GROUP BY t.symbol, t.market_cap, t.closing_price
ORDER BY mention_count DESC
LIMIT ?;
"""
results = cursor.execute(query, (limit,)).fetchall()
header = f"{'Ticker':<8} | {'Mentions':<8} | {'Bullish':<8} | {'Bearish':<8} | {'Neutral':<8} | {'Market Cap':<15} | {'Close Price':<12}"
print(header)
print("-" * (len(header) + 2)) # Adjusted separator length
for row in results:
market_cap_str = "N/A"
if row['market_cap'] and row['market_cap'] > 0:
mc = row['market_cap']
if mc >= 1e12: market_cap_str = f"${mc/1e12:.2f}T"
elif mc >= 1e9: market_cap_str = f"${mc/1e9:.2f}B"
else: market_cap_str = f"${mc/1e6:.2f}M"
closing_price_str = f"${row['closing_price']:.2f}" if row['closing_price'] else "N/A"
print(
f"{row['symbol']:<8} | "
f"{row['mention_count']:<8} | "
f"{row['bullish_mentions']:<8} | "
f"{row['bearish_mentions']:<8} | "
f"{row['neutral_mentions']:<8} | "
f"{market_cap_str:<15} | "
f"{closing_price_str:<12}"
)
conn.close()
def add_or_update_post_analysis(conn, post_data): def add_or_update_post_analysis(conn, post_data):
""" """
Inserts a new post analysis record or updates an existing one. Inserts a new post analysis record or updates an existing one.

View File

@@ -3,45 +3,43 @@
import logging import logging
import sys import sys
# Get the root logger # Get the root logger for our application. Other modules will import this.
logger = logging.getLogger("rstat_app") logger = logging.getLogger("rstat_app")
logger.setLevel(logging.INFO) # Set the minimum level of messages to handle
# Prevent the logger from propagating messages to the parent (root) logger def setup_logging(console_verbose=False):
"""
Configures the application's logger and captures logs from yfinance.
"""
logger.setLevel(logging.INFO)
logger.propagate = False logger.propagate = False
# Only add handlers if they haven't been added before if logger.hasHandlers():
# This prevents duplicate log messages if this function is called multiple times. logger.handlers.clear()
if not logger.handlers:
# --- Console Handler ---
# This handler prints logs to the standard output (your terminal)
console_handler = logging.StreamHandler(sys.stdout)
console_handler.setLevel(logging.INFO)
# A simple formatter for the console
console_formatter = logging.Formatter('%(message)s')
console_handler.setFormatter(console_formatter)
logger.addHandler(console_handler)
# --- File Handler --- # File Handler (Always Verbose)
# This handler writes logs to a file
# 'a' stands for append mode
file_handler = logging.FileHandler("rstat.log", mode='a') file_handler = logging.FileHandler("rstat.log", mode='a')
file_handler.setLevel(logging.INFO) file_handler.setLevel(logging.INFO)
# A more detailed formatter for the file, including timestamp and log level
file_formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') file_formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S')
file_handler.setFormatter(file_formatter) file_handler.setFormatter(file_formatter)
logger.addHandler(file_handler) logger.addHandler(file_handler)
# Get the logger used by the yfinance library # Console Handler (Verbosity is Controlled)
yfinance_logger = logging.getLogger("yfinance") console_handler = logging.StreamHandler(sys.stdout)
# Set its level to capture warnings and errors console_formatter = logging.Formatter('%(message)s')
yfinance_logger.setLevel(logging.WARNING) console_handler.setFormatter(console_formatter)
# Add our existing handlers to it. This tells yfinance's logger
# to send its messages to our console and our log file.
if not yfinance_logger.handlers:
yfinance_logger.addHandler(console_handler)
yfinance_logger.addHandler(file_handler)
def get_logger(): if console_verbose:
"""A simple function to get our configured logger.""" console_handler.setLevel(logging.INFO)
return logger else:
console_handler.setLevel(logging.CRITICAL)
logger.addHandler(console_handler)
# YFINANCE LOGGER CAPTURE
yfinance_logger = logging.getLogger("yfinance")
yfinance_logger.propagate = False
if yfinance_logger.hasHandlers():
yfinance_logger.handlers.clear()
yfinance_logger.setLevel(logging.WARNING)
yfinance_logger.addHandler(console_handler) # Use the same console handler
yfinance_logger.addHandler(file_handler) # Use the same file handler

View File

@@ -12,13 +12,12 @@ from dotenv import load_dotenv
from . import database from . import database
from .ticker_extractor import extract_tickers from .ticker_extractor import extract_tickers
from .sentiment_analyzer import get_sentiment_score from .sentiment_analyzer import get_sentiment_score
from .logger_setup import get_logger from .logger_setup import setup_logging, logger as log
load_dotenv() load_dotenv()
MARKET_CAP_REFRESH_INTERVAL = 86400 MARKET_CAP_REFRESH_INTERVAL = 86400
POST_AGE_LIMIT = 86400 POST_AGE_LIMIT = 86400
log = get_logger()
def load_subreddits(filepath): def load_subreddits(filepath):
try: try:
@@ -133,7 +132,7 @@ def scan_subreddits(reddit, subreddits_list, post_limit=100, comment_limit=100,
log.error(f"Could not scan r/{subreddit_name}. Error: {e}") log.error(f"Could not scan r/{subreddit_name}. Error: {e}")
conn.close() conn.close()
log.info("\n--- Scan Complete ---") log.critical("\n--- Scan Complete ---")
def main(): def main():
@@ -145,16 +144,19 @@ def main():
parser.add_argument("-d", "--days", type=int, default=1, help="Number of past days to scan for new posts.\n(Default: 1 for last 24 hours)") parser.add_argument("-d", "--days", type=int, default=1, help="Number of past days to scan for new posts.\n(Default: 1 for last 24 hours)")
parser.add_argument("-p", "--posts", type=int, default=200, help="Max posts to check per subreddit.\n(Default: 200)") parser.add_argument("-p", "--posts", type=int, default=200, help="Max posts to check per subreddit.\n(Default: 200)")
parser.add_argument("-c", "--comments", type=int, default=100, help="Number of comments to scan per post.\n(Default: 100)") parser.add_argument("-c", "--comments", type=int, default=100, help="Number of comments to scan per post.\n(Default: 100)")
parser.add_argument("-l", "--limit", type=int, default=20, help="Number of tickers to show in the CLI report.\n(Default: 20)") parser.add_argument("--stdout", action="store_true", help="Print all log messages to the console.")
args = parser.parse_args() args = parser.parse_args()
setup_logging(console_verbose=args.stdout)
if args.subreddit: if args.subreddit:
# If --subreddit is used, create a list with just that one. # If --subreddit is used, create a list with just that one.
subreddits_to_scan = [args.subreddit] subreddits_to_scan = [args.subreddit]
log.info(f"Targeted Scan Mode: Focusing on r/{args.subreddit}") log.critical(f"Targeted Scan Mode: Focusing on r/{args.subreddit}")
else: else:
# Otherwise, load from the config file. # Otherwise, load from the config file.
log.info(f"Config Scan Mode: Loading subreddits from {args.config}") log.critical(f"Config Scan Mode: Loading subreddits from {args.config}")
# Use the correct argument name: args.config # Use the correct argument name: args.config
subreddits_to_scan = load_subreddits(args.config) subreddits_to_scan = load_subreddits(args.config)
@@ -175,7 +177,6 @@ def main():
comment_limit=args.comments, comment_limit=args.comments,
days_to_scan=args.days days_to_scan=args.days
) )
database.generate_summary_report(limit=args.limit)
if __name__ == "__main__": if __name__ == "__main__":
main() main()