Initial commit with translated description
This commit is contained in:
500
scripts/alerts.py
Normal file
500
scripts/alerts.py
Normal file
@@ -0,0 +1,500 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Price Target Alerts - Track buy zone alerts for stocks.
|
||||
|
||||
Features:
|
||||
- Set price target alerts (buy zone triggers)
|
||||
- Check alerts against current prices
|
||||
- Snooze, update, delete alerts
|
||||
- Multi-currency support (USD, EUR, JPY, SGD, MXN)
|
||||
|
||||
Usage:
|
||||
alerts.py list # Show all alerts
|
||||
alerts.py set CRWD 400 --note 'Kaufzone' # Set alert
|
||||
alerts.py check # Check triggered alerts
|
||||
alerts.py delete CRWD # Delete alert
|
||||
alerts.py snooze CRWD --days 7 # Snooze for 7 days
|
||||
alerts.py update CRWD 380 # Update target price
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
|
||||
from utils import ensure_venv
|
||||
|
||||
ensure_venv()
|
||||
|
||||
# Lazy import to avoid numpy issues at module load
|
||||
fetch_market_data = None
|
||||
|
||||
def get_fetch_market_data():
|
||||
global fetch_market_data
|
||||
if fetch_market_data is None:
|
||||
from fetch_news import fetch_market_data as fmd
|
||||
fetch_market_data = fmd
|
||||
return fetch_market_data
|
||||
|
||||
SCRIPT_DIR = Path(__file__).parent
|
||||
CONFIG_DIR = SCRIPT_DIR.parent / "config"
|
||||
ALERTS_FILE = CONFIG_DIR / "alerts.json"
|
||||
|
||||
SUPPORTED_CURRENCIES = ["USD", "EUR", "JPY", "SGD", "MXN"]
|
||||
|
||||
|
||||
def load_alerts() -> dict:
|
||||
"""Load alerts from JSON file."""
|
||||
if not ALERTS_FILE.exists():
|
||||
return {"_meta": {"version": 1, "supported_currencies": SUPPORTED_CURRENCIES}, "alerts": []}
|
||||
return json.loads(ALERTS_FILE.read_text())
|
||||
|
||||
|
||||
def save_alerts(data: dict) -> None:
|
||||
"""Save alerts to JSON file."""
|
||||
data["_meta"]["updated_at"] = datetime.now().isoformat()
|
||||
ALERTS_FILE.write_text(json.dumps(data, indent=2))
|
||||
|
||||
|
||||
def get_alert_by_ticker(alerts: list, ticker: str) -> dict | None:
|
||||
"""Find alert by ticker."""
|
||||
ticker = ticker.upper()
|
||||
for alert in alerts:
|
||||
if alert["ticker"] == ticker:
|
||||
return alert
|
||||
return None
|
||||
|
||||
|
||||
def format_price(price: float, currency: str) -> str:
|
||||
"""Format price with currency symbol."""
|
||||
symbols = {"USD": "$", "EUR": "€", "JPY": "¥", "SGD": "S$", "MXN": "MX$"}
|
||||
symbol = symbols.get(currency, currency + " ")
|
||||
if currency == "JPY":
|
||||
return f"{symbol}{price:,.0f}"
|
||||
return f"{symbol}{price:,.2f}"
|
||||
|
||||
|
||||
def cmd_list(args) -> None:
|
||||
"""List all alerts."""
|
||||
data = load_alerts()
|
||||
alerts = data.get("alerts", [])
|
||||
|
||||
if not alerts:
|
||||
print("📭 No price alerts set")
|
||||
return
|
||||
|
||||
print(f"📊 Price Alerts ({len(alerts)} total)\n")
|
||||
|
||||
now = datetime.now()
|
||||
active = []
|
||||
snoozed = []
|
||||
|
||||
for alert in alerts:
|
||||
snooze_until = alert.get("snooze_until")
|
||||
if snooze_until and datetime.fromisoformat(snooze_until) > now:
|
||||
snoozed.append(alert)
|
||||
else:
|
||||
active.append(alert)
|
||||
|
||||
if active:
|
||||
print("### Active Alerts")
|
||||
for a in active:
|
||||
target = format_price(a["target_price"], a.get("currency", "USD"))
|
||||
note = f' — "{a["note"]}"' if a.get("note") else ""
|
||||
user = f" (by {a['set_by']})" if a.get("set_by") else ""
|
||||
print(f" • {a['ticker']}: {target}{note}{user}")
|
||||
print()
|
||||
|
||||
if snoozed:
|
||||
print("### Snoozed")
|
||||
for a in snoozed:
|
||||
target = format_price(a["target_price"], a.get("currency", "USD"))
|
||||
until = datetime.fromisoformat(a["snooze_until"]).strftime("%Y-%m-%d")
|
||||
print(f" • {a['ticker']}: {target} (until {until})")
|
||||
print()
|
||||
|
||||
|
||||
def cmd_set(args) -> None:
|
||||
"""Set a new alert."""
|
||||
data = load_alerts()
|
||||
alerts = data.get("alerts", [])
|
||||
ticker = args.ticker.upper()
|
||||
|
||||
# Check if alert exists
|
||||
existing = get_alert_by_ticker(alerts, ticker)
|
||||
if existing:
|
||||
print(f"⚠️ Alert for {ticker} already exists. Use 'update' to change target.")
|
||||
return
|
||||
|
||||
# Validate target price
|
||||
if args.target <= 0:
|
||||
print(f"❌ Target price must be greater than 0")
|
||||
return
|
||||
|
||||
currency = args.currency.upper() if args.currency else "USD"
|
||||
if currency not in SUPPORTED_CURRENCIES:
|
||||
print(f"❌ Currency {currency} not supported. Use: {', '.join(SUPPORTED_CURRENCIES)}")
|
||||
return
|
||||
|
||||
# Warn about currency mismatch based on ticker suffix
|
||||
ticker_currency_map = {
|
||||
".T": "JPY", # Tokyo
|
||||
".SI": "SGD", # Singapore
|
||||
".MX": "MXN", # Mexico
|
||||
".DE": "EUR", ".F": "EUR", ".PA": "EUR", # Europe
|
||||
}
|
||||
expected_currency = "USD" # Default for US stocks
|
||||
for suffix, curr in ticker_currency_map.items():
|
||||
if ticker.endswith(suffix):
|
||||
expected_currency = curr
|
||||
break
|
||||
|
||||
if currency != expected_currency:
|
||||
print(f"⚠️ Warning: {ticker} trades in {expected_currency}, but alert set in {currency}")
|
||||
|
||||
# Fetch current price (optional - may fail if numpy broken)
|
||||
current_price = None
|
||||
try:
|
||||
quotes = get_fetch_market_data()([ticker], timeout=10)
|
||||
if ticker in quotes and quotes[ticker].get("price"):
|
||||
current_price = quotes[ticker]["price"]
|
||||
except Exception as e:
|
||||
print(f"⚠️ Could not fetch current price: {e}", file=sys.stderr)
|
||||
|
||||
alert = {
|
||||
"ticker": ticker,
|
||||
"target_price": args.target,
|
||||
"currency": currency,
|
||||
"note": args.note or "",
|
||||
"set_by": args.user or "",
|
||||
"set_date": datetime.now().strftime("%Y-%m-%d"),
|
||||
"status": "active",
|
||||
"snooze_until": None,
|
||||
"triggered_count": 0,
|
||||
"last_triggered": None,
|
||||
}
|
||||
|
||||
alerts.append(alert)
|
||||
data["alerts"] = alerts
|
||||
save_alerts(data)
|
||||
|
||||
target_str = format_price(args.target, currency)
|
||||
print(f"✅ Alert set: {ticker} under {target_str}")
|
||||
if current_price:
|
||||
pct_diff = ((current_price - args.target) / current_price) * 100
|
||||
current_str = format_price(current_price, currency)
|
||||
print(f" Current: {current_str} ({pct_diff:+.1f}% to target)")
|
||||
|
||||
|
||||
def cmd_delete(args) -> None:
|
||||
"""Delete an alert."""
|
||||
data = load_alerts()
|
||||
alerts = data.get("alerts", [])
|
||||
ticker = args.ticker.upper()
|
||||
|
||||
new_alerts = [a for a in alerts if a["ticker"] != ticker]
|
||||
if len(new_alerts) == len(alerts):
|
||||
print(f"❌ No alert found for {ticker}")
|
||||
return
|
||||
|
||||
data["alerts"] = new_alerts
|
||||
save_alerts(data)
|
||||
print(f"🗑️ Alert deleted: {ticker}")
|
||||
|
||||
|
||||
def cmd_snooze(args) -> None:
|
||||
"""Snooze an alert."""
|
||||
data = load_alerts()
|
||||
alerts = data.get("alerts", [])
|
||||
ticker = args.ticker.upper()
|
||||
|
||||
alert = get_alert_by_ticker(alerts, ticker)
|
||||
if not alert:
|
||||
print(f"❌ No alert found for {ticker}")
|
||||
return
|
||||
|
||||
days = args.days or 7
|
||||
snooze_until = datetime.now() + timedelta(days=days)
|
||||
alert["snooze_until"] = snooze_until.isoformat()
|
||||
save_alerts(data)
|
||||
print(f"😴 Alert snoozed: {ticker} until {snooze_until.strftime('%Y-%m-%d')}")
|
||||
|
||||
|
||||
def cmd_update(args) -> None:
|
||||
"""Update alert target price."""
|
||||
data = load_alerts()
|
||||
alerts = data.get("alerts", [])
|
||||
ticker = args.ticker.upper()
|
||||
|
||||
alert = get_alert_by_ticker(alerts, ticker)
|
||||
if not alert:
|
||||
print(f"❌ No alert found for {ticker}")
|
||||
return
|
||||
|
||||
# Validate target price
|
||||
if args.target <= 0:
|
||||
print(f"❌ Target price must be greater than 0")
|
||||
return
|
||||
|
||||
old_target = alert["target_price"]
|
||||
alert["target_price"] = args.target
|
||||
if args.note:
|
||||
alert["note"] = args.note
|
||||
save_alerts(data)
|
||||
|
||||
currency = alert.get("currency", "USD")
|
||||
old_str = format_price(old_target, currency)
|
||||
new_str = format_price(args.target, currency)
|
||||
print(f"✏️ Alert updated: {ticker} {old_str} → {new_str}")
|
||||
|
||||
|
||||
def cmd_check(args) -> None:
|
||||
"""Check alerts against current prices."""
|
||||
data = load_alerts()
|
||||
alerts = data.get("alerts", [])
|
||||
|
||||
if not alerts:
|
||||
if args.json:
|
||||
print(json.dumps({"triggered": [], "watching": []}))
|
||||
else:
|
||||
print("📭 No alerts to check")
|
||||
return
|
||||
|
||||
now = datetime.now()
|
||||
active_alerts = []
|
||||
for alert in alerts:
|
||||
snooze_until = alert.get("snooze_until")
|
||||
if snooze_until and datetime.fromisoformat(snooze_until) > now:
|
||||
continue
|
||||
active_alerts.append(alert)
|
||||
|
||||
if not active_alerts:
|
||||
if args.json:
|
||||
print(json.dumps({"triggered": [], "watching": []}))
|
||||
else:
|
||||
print("📭 All alerts snoozed")
|
||||
return
|
||||
|
||||
# Fetch prices for all active alerts
|
||||
tickers = [a["ticker"] for a in active_alerts]
|
||||
quotes = get_fetch_market_data()(tickers, timeout=30)
|
||||
|
||||
triggered = []
|
||||
watching = []
|
||||
|
||||
for alert in active_alerts:
|
||||
ticker = alert["ticker"]
|
||||
target = alert["target_price"]
|
||||
currency = alert.get("currency", "USD")
|
||||
|
||||
quote = quotes.get(ticker, {})
|
||||
price = quote.get("price")
|
||||
|
||||
if price is None:
|
||||
continue
|
||||
|
||||
# Divide-by-zero protection
|
||||
if target == 0:
|
||||
pct_diff = 0
|
||||
else:
|
||||
pct_diff = ((price - target) / target) * 100
|
||||
|
||||
result = {
|
||||
"ticker": ticker,
|
||||
"target_price": target,
|
||||
"current_price": price,
|
||||
"currency": currency,
|
||||
"pct_from_target": round(pct_diff, 2),
|
||||
"note": alert.get("note", ""),
|
||||
"set_by": alert.get("set_by", ""),
|
||||
}
|
||||
|
||||
if price <= target:
|
||||
triggered.append(result)
|
||||
# Update triggered count (only once per day to avoid inflation)
|
||||
last_triggered = alert.get("last_triggered")
|
||||
today = now.strftime("%Y-%m-%d")
|
||||
if not last_triggered or not last_triggered.startswith(today):
|
||||
alert["triggered_count"] = alert.get("triggered_count", 0) + 1
|
||||
alert["last_triggered"] = now.isoformat()
|
||||
else:
|
||||
watching.append(result)
|
||||
|
||||
save_alerts(data)
|
||||
|
||||
if args.json:
|
||||
print(json.dumps({"triggered": triggered, "watching": watching}, indent=2))
|
||||
return
|
||||
|
||||
# Translations
|
||||
lang = getattr(args, 'lang', 'en')
|
||||
if lang == "de":
|
||||
labels = {
|
||||
"title": "PREISWARNUNGEN",
|
||||
"in_zone": "IN KAUFZONE",
|
||||
"buy": "KAUFEN!",
|
||||
"target": "Ziel",
|
||||
"watching": "BEOBACHTUNG",
|
||||
"to_target": "noch",
|
||||
"no_data": "Keine Preisdaten für Alerts verfügbar",
|
||||
}
|
||||
else:
|
||||
labels = {
|
||||
"title": "PRICE ALERTS",
|
||||
"in_zone": "IN BUY ZONE",
|
||||
"buy": "BUY SIGNAL",
|
||||
"target": "target",
|
||||
"watching": "WATCHING",
|
||||
"to_target": "to target",
|
||||
"no_data": "No price data available for alerts",
|
||||
}
|
||||
|
||||
# Date header
|
||||
date_str = datetime.now().strftime("%b %d, %Y") if lang == "en" else datetime.now().strftime("%d. %b %Y")
|
||||
print(f"📊 {labels['title']} — {date_str}\n")
|
||||
|
||||
# Human-readable output
|
||||
if triggered:
|
||||
print(f"🟢 {labels['in_zone']}:\n")
|
||||
for t in triggered:
|
||||
target_str = format_price(t["target_price"], t["currency"])
|
||||
current_str = format_price(t["current_price"], t["currency"])
|
||||
note = f'\n "{t["note"]}"' if t.get("note") else ""
|
||||
user = f" — {t['set_by']}" if t.get("set_by") else ""
|
||||
print(f"• {t['ticker']}: {current_str} ({labels['target']}: {target_str}) ← {labels['buy']}{note}{user}")
|
||||
print()
|
||||
|
||||
if watching:
|
||||
print(f"⏳ {labels['watching']}:\n")
|
||||
for w in sorted(watching, key=lambda x: x["pct_from_target"]):
|
||||
target_str = format_price(w["target_price"], w["currency"])
|
||||
current_str = format_price(w["current_price"], w["currency"])
|
||||
print(f"• {w['ticker']}: {current_str} ({labels['target']}: {target_str}) — {labels['to_target']} {abs(w['pct_from_target']):.1f}%")
|
||||
print()
|
||||
|
||||
if not triggered and not watching:
|
||||
print(f"📭 {labels['no_data']}")
|
||||
|
||||
|
||||
def check_alerts() -> dict:
|
||||
"""
|
||||
Check alerts and return results for briefing integration.
|
||||
Returns: {"triggered": [...], "watching": [...]}
|
||||
"""
|
||||
data = load_alerts()
|
||||
alerts = data.get("alerts", [])
|
||||
|
||||
if not alerts:
|
||||
return {"triggered": [], "watching": []}
|
||||
|
||||
now = datetime.now()
|
||||
active_alerts = [
|
||||
a for a in alerts
|
||||
if not a.get("snooze_until") or datetime.fromisoformat(a["snooze_until"]) <= now
|
||||
]
|
||||
|
||||
if not active_alerts:
|
||||
return {"triggered": [], "watching": []}
|
||||
|
||||
tickers = [a["ticker"] for a in active_alerts]
|
||||
quotes = get_fetch_market_data()(tickers, timeout=30)
|
||||
|
||||
triggered = []
|
||||
watching = []
|
||||
|
||||
for alert in active_alerts:
|
||||
ticker = alert["ticker"]
|
||||
target = alert["target_price"]
|
||||
currency = alert.get("currency", "USD")
|
||||
|
||||
quote = quotes.get(ticker, {})
|
||||
price = quote.get("price")
|
||||
|
||||
if price is None:
|
||||
continue
|
||||
|
||||
# Divide-by-zero protection
|
||||
if target == 0:
|
||||
pct_diff = 0
|
||||
else:
|
||||
pct_diff = ((price - target) / target) * 100
|
||||
|
||||
result = {
|
||||
"ticker": ticker,
|
||||
"target_price": target,
|
||||
"current_price": price,
|
||||
"currency": currency,
|
||||
"pct_from_target": round(pct_diff, 2),
|
||||
"note": alert.get("note", ""),
|
||||
"set_by": alert.get("set_by", ""),
|
||||
}
|
||||
|
||||
if price <= target:
|
||||
triggered.append(result)
|
||||
# Update triggered count (only once per day to avoid inflation)
|
||||
last_triggered = alert.get("last_triggered")
|
||||
today = now.strftime("%Y-%m-%d")
|
||||
if not last_triggered or not last_triggered.startswith(today):
|
||||
alert["triggered_count"] = alert.get("triggered_count", 0) + 1
|
||||
alert["last_triggered"] = now.isoformat()
|
||||
else:
|
||||
watching.append(result)
|
||||
|
||||
save_alerts(data)
|
||||
return {"triggered": triggered, "watching": watching}
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Price target alerts")
|
||||
subparsers = parser.add_subparsers(dest="command", required=True)
|
||||
|
||||
# list
|
||||
subparsers.add_parser("list", help="List all alerts")
|
||||
|
||||
# set
|
||||
set_parser = subparsers.add_parser("set", help="Set new alert")
|
||||
set_parser.add_argument("ticker", help="Stock ticker")
|
||||
set_parser.add_argument("target", type=float, help="Target price")
|
||||
set_parser.add_argument("--note", help="Note/reason")
|
||||
set_parser.add_argument("--user", help="Who set the alert")
|
||||
set_parser.add_argument("--currency", default="USD", help="Currency (USD, EUR, JPY, SGD, MXN)")
|
||||
|
||||
# delete
|
||||
del_parser = subparsers.add_parser("delete", help="Delete alert")
|
||||
del_parser.add_argument("ticker", help="Stock ticker")
|
||||
|
||||
# snooze
|
||||
snooze_parser = subparsers.add_parser("snooze", help="Snooze alert")
|
||||
snooze_parser.add_argument("ticker", help="Stock ticker")
|
||||
snooze_parser.add_argument("--days", type=int, default=7, help="Days to snooze")
|
||||
|
||||
# update
|
||||
update_parser = subparsers.add_parser("update", help="Update alert target")
|
||||
update_parser.add_argument("ticker", help="Stock ticker")
|
||||
update_parser.add_argument("target", type=float, help="New target price")
|
||||
update_parser.add_argument("--note", help="Update note")
|
||||
|
||||
# check
|
||||
check_parser = subparsers.add_parser("check", help="Check alerts against prices")
|
||||
check_parser.add_argument("--json", action="store_true", help="JSON output")
|
||||
check_parser.add_argument("--lang", default="en", help="Output language (en, de)")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.command == "list":
|
||||
cmd_list(args)
|
||||
elif args.command == "set":
|
||||
cmd_set(args)
|
||||
elif args.command == "delete":
|
||||
cmd_delete(args)
|
||||
elif args.command == "snooze":
|
||||
cmd_snooze(args)
|
||||
elif args.command == "update":
|
||||
cmd_update(args)
|
||||
elif args.command == "check":
|
||||
cmd_check(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
170
scripts/briefing.py
Normal file
170
scripts/briefing.py
Normal file
@@ -0,0 +1,170 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Briefing Generator - Main entry point for market briefings.
|
||||
Generates and optionally sends to WhatsApp group.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from utils import ensure_venv
|
||||
|
||||
SCRIPT_DIR = Path(__file__).parent
|
||||
|
||||
ensure_venv()
|
||||
|
||||
|
||||
def send_to_whatsapp(message: str, group_name: str | None = None):
|
||||
"""Send message to WhatsApp group via openclaw message tool."""
|
||||
if not group_name:
|
||||
group_name = os.environ.get('FINANCE_NEWS_TARGET', '')
|
||||
if not group_name:
|
||||
print("❌ No target specified. Set FINANCE_NEWS_TARGET env var or use --group", file=sys.stderr)
|
||||
return False
|
||||
# Use openclaw message tool
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[
|
||||
'openclaw', 'message', 'send',
|
||||
'--channel', 'whatsapp',
|
||||
'--target', group_name,
|
||||
'--message', message
|
||||
],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
print(f"✅ Sent to WhatsApp group: {group_name}", file=sys.stderr)
|
||||
return True
|
||||
else:
|
||||
print(f"⚠️ WhatsApp send failed: {result.stderr}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
print(f"❌ WhatsApp error: {e}", file=sys.stderr)
|
||||
return False
|
||||
|
||||
|
||||
def generate_and_send(args):
|
||||
"""Generate briefing and optionally send to WhatsApp."""
|
||||
|
||||
# Determine briefing type based on current time or args
|
||||
if args.time:
|
||||
briefing_time = args.time
|
||||
else:
|
||||
hour = datetime.now().hour
|
||||
briefing_time = 'morning' if hour < 12 else 'evening'
|
||||
|
||||
# Generate the briefing
|
||||
cmd = [
|
||||
sys.executable, SCRIPT_DIR / 'summarize.py',
|
||||
'--time', briefing_time,
|
||||
'--style', args.style,
|
||||
'--lang', args.lang
|
||||
]
|
||||
|
||||
if args.deadline is not None:
|
||||
cmd.extend(['--deadline', str(args.deadline)])
|
||||
|
||||
if args.fast:
|
||||
cmd.append('--fast')
|
||||
|
||||
if args.llm:
|
||||
cmd.append('--llm')
|
||||
cmd.extend(['--model', args.model])
|
||||
|
||||
if args.debug:
|
||||
cmd.append('--debug')
|
||||
|
||||
# Always use JSON for internal processing to handle splits
|
||||
cmd.append('--json')
|
||||
|
||||
print(f"📊 Generating {briefing_time} briefing...", file=sys.stderr)
|
||||
|
||||
timeout = args.deadline if args.deadline is not None else 300
|
||||
timeout = max(1, int(timeout))
|
||||
if args.deadline is not None:
|
||||
timeout = timeout + 5
|
||||
result = subprocess.run(
|
||||
cmd,
|
||||
capture_output=True,
|
||||
text=True,
|
||||
stdin=subprocess.DEVNULL,
|
||||
timeout=timeout
|
||||
)
|
||||
|
||||
if result.returncode != 0:
|
||||
print(f"❌ Briefing generation failed: {result.stderr}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
data = json.loads(result.stdout.strip())
|
||||
except json.JSONDecodeError:
|
||||
# Fallback if not JSON (shouldn't happen with --json)
|
||||
print(f"⚠️ Failed to parse briefing JSON", file=sys.stderr)
|
||||
print(result.stdout)
|
||||
return result.stdout
|
||||
|
||||
# Output handling
|
||||
if args.json:
|
||||
print(json.dumps(data, indent=2))
|
||||
else:
|
||||
# Print for humans
|
||||
if data.get('macro_message'):
|
||||
print(data['macro_message'])
|
||||
if data.get('portfolio_message'):
|
||||
print("\n" + "="*20 + "\n")
|
||||
print(data['portfolio_message'])
|
||||
|
||||
# Send to WhatsApp if requested
|
||||
if args.send and args.group:
|
||||
# Message 1: Macro
|
||||
macro_msg = data.get('macro_message') or data.get('summary', '')
|
||||
if macro_msg:
|
||||
send_to_whatsapp(macro_msg, args.group)
|
||||
|
||||
# Message 2: Portfolio (if exists)
|
||||
portfolio_msg = data.get('portfolio_message')
|
||||
if portfolio_msg:
|
||||
send_to_whatsapp(portfolio_msg, args.group)
|
||||
|
||||
return data.get('macro_message', '')
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Briefing Generator')
|
||||
parser.add_argument('--time', choices=['morning', 'evening'],
|
||||
help='Briefing type (auto-detected if not specified)')
|
||||
parser.add_argument('--style', choices=['briefing', 'analysis', 'headlines'],
|
||||
default='briefing', help='Summary style')
|
||||
parser.add_argument('--lang', choices=['en', 'de'], default='en',
|
||||
help='Output language')
|
||||
parser.add_argument('--send', action='store_true',
|
||||
help='Send to WhatsApp group')
|
||||
parser.add_argument('--group', default=os.environ.get('FINANCE_NEWS_TARGET', ''),
|
||||
help='WhatsApp group name or JID (default: FINANCE_NEWS_TARGET env var)')
|
||||
parser.add_argument('--json', action='store_true',
|
||||
help='Output as JSON')
|
||||
parser.add_argument('--deadline', type=int, default=None,
|
||||
help='Overall deadline in seconds')
|
||||
parser.add_argument('--llm', action='store_true', help='Use LLM summary')
|
||||
parser.add_argument('--model', choices=['claude', 'minimax', 'gemini'],
|
||||
default='claude', help='LLM model (only with --llm)')
|
||||
parser.add_argument('--fast', action='store_true',
|
||||
help='Use fast mode (shorter timeouts, fewer items)')
|
||||
parser.add_argument('--debug', action='store_true',
|
||||
help='Write debug log with sources')
|
||||
|
||||
args = parser.parse_args()
|
||||
generate_and_send(args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
614
scripts/earnings.py
Normal file
614
scripts/earnings.py
Normal file
@@ -0,0 +1,614 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Earnings Calendar - Track earnings dates for portfolio stocks.
|
||||
|
||||
Features:
|
||||
- Fetch earnings dates from FMP API
|
||||
- Show upcoming earnings in daily briefing
|
||||
- Alert 24h before earnings release
|
||||
- Cache results to avoid API spam
|
||||
|
||||
Usage:
|
||||
earnings.py list # Show all upcoming earnings
|
||||
earnings.py check # Check what's reporting today/this week
|
||||
earnings.py refresh # Force refresh earnings data
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import csv
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from urllib.request import urlopen, Request
|
||||
from urllib.error import URLError, HTTPError
|
||||
|
||||
# Paths
|
||||
SCRIPT_DIR = Path(__file__).parent
|
||||
CONFIG_DIR = SCRIPT_DIR.parent / "config"
|
||||
CACHE_DIR = SCRIPT_DIR.parent / "cache"
|
||||
PORTFOLIO_FILE = CONFIG_DIR / "portfolio.csv"
|
||||
EARNINGS_CACHE = CACHE_DIR / "earnings_calendar.json"
|
||||
MANUAL_EARNINGS = CONFIG_DIR / "manual_earnings.json" # For JP/other stocks not in Finnhub
|
||||
|
||||
# OpenBB binary path
|
||||
OPENBB_BINARY = None
|
||||
try:
|
||||
env_path = os.environ.get('OPENBB_QUOTE_BIN')
|
||||
if env_path and os.path.isfile(env_path) and os.access(env_path, os.X_OK):
|
||||
OPENBB_BINARY = env_path
|
||||
else:
|
||||
OPENBB_BINARY = shutil.which('openbb-quote')
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# API Keys
|
||||
def get_fmp_key() -> str:
|
||||
"""Get FMP API key from environment or .env file."""
|
||||
key = os.environ.get("FMP_API_KEY", "")
|
||||
if not key:
|
||||
env_file = Path.home() / ".openclaw" / ".env"
|
||||
if env_file.exists():
|
||||
for line in env_file.read_text().splitlines():
|
||||
if line.startswith("FMP_API_KEY="):
|
||||
key = line.split("=", 1)[1].strip()
|
||||
break
|
||||
return key
|
||||
|
||||
|
||||
def load_portfolio() -> list[dict]:
|
||||
"""Load portfolio from CSV."""
|
||||
if not PORTFOLIO_FILE.exists():
|
||||
return []
|
||||
with open(PORTFOLIO_FILE, 'r') as f:
|
||||
reader = csv.DictReader(f)
|
||||
return list(reader)
|
||||
|
||||
|
||||
def load_earnings_cache() -> dict:
|
||||
"""Load cached earnings data."""
|
||||
if EARNINGS_CACHE.exists():
|
||||
try:
|
||||
return json.loads(EARNINGS_CACHE.read_text())
|
||||
except Exception:
|
||||
pass
|
||||
return {"last_updated": None, "earnings": {}}
|
||||
|
||||
|
||||
def load_manual_earnings() -> dict:
|
||||
"""
|
||||
Load manually-entered earnings dates (for JP stocks not in Finnhub).
|
||||
Format: {"6857.T": {"date": "2026-01-30", "time": "amc", "note": "Q3 FY2025"}, ...}
|
||||
"""
|
||||
if MANUAL_EARNINGS.exists():
|
||||
try:
|
||||
data = json.loads(MANUAL_EARNINGS.read_text())
|
||||
# Filter out metadata keys (starting with _)
|
||||
return {k: v for k, v in data.items() if not k.startswith("_") and isinstance(v, dict)}
|
||||
except Exception:
|
||||
pass
|
||||
return {}
|
||||
|
||||
|
||||
def save_earnings_cache(data: dict):
|
||||
"""Save earnings data to cache."""
|
||||
CACHE_DIR.mkdir(exist_ok=True)
|
||||
EARNINGS_CACHE.write_text(json.dumps(data, indent=2, default=str))
|
||||
|
||||
|
||||
def get_finnhub_key() -> str:
|
||||
"""Get Finnhub API key from environment or .env file."""
|
||||
key = os.environ.get("FINNHUB_API_KEY", "")
|
||||
if not key:
|
||||
env_file = Path.home() / ".openclaw" / ".env"
|
||||
if env_file.exists():
|
||||
for line in env_file.read_text().splitlines():
|
||||
if line.startswith("FINNHUB_API_KEY="):
|
||||
key = line.split("=", 1)[1].strip()
|
||||
break
|
||||
return key
|
||||
|
||||
|
||||
def fetch_all_earnings_finnhub(days_ahead: int = 60) -> dict:
|
||||
"""
|
||||
Fetch all earnings for the next N days from Finnhub.
|
||||
Returns dict keyed by symbol: {"AAPL": {...}, ...}
|
||||
"""
|
||||
finnhub_key = get_finnhub_key()
|
||||
if not finnhub_key:
|
||||
return {}
|
||||
|
||||
from_date = datetime.now().strftime("%Y-%m-%d")
|
||||
to_date = (datetime.now() + timedelta(days=days_ahead)).strftime("%Y-%m-%d")
|
||||
|
||||
url = f"https://finnhub.io/api/v1/calendar/earnings?from={from_date}&to={to_date}&token={finnhub_key}"
|
||||
|
||||
try:
|
||||
req = Request(url, headers={"User-Agent": "finance-news/1.0"})
|
||||
with urlopen(req, timeout=30) as resp:
|
||||
data = json.loads(resp.read().decode("utf-8"))
|
||||
|
||||
earnings_by_symbol = {}
|
||||
for entry in data.get("earningsCalendar", []):
|
||||
symbol = entry.get("symbol")
|
||||
if symbol:
|
||||
earnings_by_symbol[symbol] = {
|
||||
"date": entry.get("date"),
|
||||
"time": entry.get("hour", ""), # bmo/amc
|
||||
"eps_estimate": entry.get("epsEstimate"),
|
||||
"revenue_estimate": entry.get("revenueEstimate"),
|
||||
"quarter": entry.get("quarter"),
|
||||
"year": entry.get("year"),
|
||||
}
|
||||
return earnings_by_symbol
|
||||
except Exception as e:
|
||||
print(f"❌ Finnhub error: {e}", file=sys.stderr)
|
||||
return {}
|
||||
|
||||
|
||||
def normalize_ticker_for_lookup(ticker: str) -> list[str]:
|
||||
"""
|
||||
Convert portfolio ticker to possible Finnhub symbols.
|
||||
Returns list of possible formats to try.
|
||||
"""
|
||||
variants = [ticker]
|
||||
|
||||
# Japanese stocks: 6857.T -> try 6857
|
||||
if ticker.endswith('.T'):
|
||||
base = ticker.replace('.T', '')
|
||||
variants.extend([base, f"{base}.T"])
|
||||
|
||||
# Singapore stocks: D05.SI -> try D05
|
||||
elif ticker.endswith('.SI'):
|
||||
base = ticker.replace('.SI', '')
|
||||
variants.extend([base, f"{base}.SI"])
|
||||
|
||||
return variants
|
||||
|
||||
|
||||
def fetch_earnings_for_portfolio(portfolio: list[dict]) -> dict:
|
||||
"""
|
||||
Fetch earnings dates for portfolio stocks using Finnhub bulk API.
|
||||
More efficient than per-ticker calls.
|
||||
"""
|
||||
# Get all earnings for next 60 days
|
||||
all_earnings = fetch_all_earnings_finnhub(days_ahead=60)
|
||||
|
||||
if not all_earnings:
|
||||
return {}
|
||||
|
||||
# Match portfolio tickers to earnings data
|
||||
results = {}
|
||||
for stock in portfolio:
|
||||
ticker = stock["symbol"]
|
||||
variants = normalize_ticker_for_lookup(ticker)
|
||||
|
||||
for variant in variants:
|
||||
if variant in all_earnings:
|
||||
results[ticker] = all_earnings[variant]
|
||||
break
|
||||
|
||||
return results
|
||||
|
||||
|
||||
def refresh_earnings(portfolio: list[dict], force: bool = False) -> dict:
|
||||
"""Refresh earnings data for all portfolio stocks."""
|
||||
finnhub_key = get_finnhub_key()
|
||||
if not finnhub_key:
|
||||
print("❌ FINNHUB_API_KEY not found", file=sys.stderr)
|
||||
return {}
|
||||
|
||||
cache = load_earnings_cache()
|
||||
|
||||
# Check if cache is fresh (< 6 hours old)
|
||||
if not force and cache.get("last_updated"):
|
||||
try:
|
||||
last = datetime.fromisoformat(cache["last_updated"])
|
||||
if datetime.now() - last < timedelta(hours=6):
|
||||
print(f"📦 Using cached data (updated {last.strftime('%H:%M')})")
|
||||
return cache
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
print(f"🔄 Fetching earnings calendar from Finnhub...")
|
||||
|
||||
# Use bulk fetch - much more efficient
|
||||
earnings = fetch_earnings_for_portfolio(portfolio)
|
||||
|
||||
# Merge manual earnings (for JP stocks not in Finnhub)
|
||||
manual = load_manual_earnings()
|
||||
if manual:
|
||||
print(f"📝 Merging {len(manual)} manual entries...")
|
||||
for ticker, data in manual.items():
|
||||
if ticker not in earnings: # Manual data fills gaps
|
||||
earnings[ticker] = data
|
||||
|
||||
found = len(earnings)
|
||||
total = len(portfolio)
|
||||
print(f"✅ Found earnings data for {found}/{total} stocks")
|
||||
|
||||
if earnings:
|
||||
for ticker, data in sorted(earnings.items(), key=lambda x: x[1].get("date", "")):
|
||||
print(f" • {ticker}: {data.get('date', '?')}")
|
||||
|
||||
cache = {
|
||||
"last_updated": datetime.now().isoformat(),
|
||||
"earnings": earnings
|
||||
}
|
||||
save_earnings_cache(cache)
|
||||
|
||||
return cache
|
||||
|
||||
|
||||
def list_earnings(args):
|
||||
"""List all upcoming earnings for portfolio."""
|
||||
portfolio = load_portfolio()
|
||||
if not portfolio:
|
||||
print("📂 Portfolio empty")
|
||||
return
|
||||
|
||||
cache = refresh_earnings(portfolio, force=args.refresh)
|
||||
earnings = cache.get("earnings", {})
|
||||
|
||||
if not earnings:
|
||||
print("\n❌ No earnings dates found")
|
||||
return
|
||||
|
||||
# Sort by date
|
||||
sorted_earnings = sorted(
|
||||
[(ticker, data) for ticker, data in earnings.items() if data.get("date")],
|
||||
key=lambda x: x[1]["date"]
|
||||
)
|
||||
|
||||
print(f"\n📅 Upcoming Earnings ({len(sorted_earnings)} stocks)\n")
|
||||
|
||||
today = datetime.now().date()
|
||||
|
||||
for ticker, data in sorted_earnings:
|
||||
date_str = data["date"]
|
||||
try:
|
||||
ed = datetime.strptime(date_str, "%Y-%m-%d").date()
|
||||
days_until = (ed - today).days
|
||||
|
||||
# Emoji based on timing
|
||||
if days_until < 0:
|
||||
emoji = "✅" # Past
|
||||
timing = f"{-days_until}d ago"
|
||||
elif days_until == 0:
|
||||
emoji = "🔴" # Today!
|
||||
timing = "TODAY"
|
||||
elif days_until == 1:
|
||||
emoji = "🟡" # Tomorrow
|
||||
timing = "TOMORROW"
|
||||
elif days_until <= 7:
|
||||
emoji = "🟠" # This week
|
||||
timing = f"in {days_until}d"
|
||||
else:
|
||||
emoji = "⚪" # Later
|
||||
timing = f"in {days_until}d"
|
||||
|
||||
# Time of day
|
||||
time_str = ""
|
||||
if data.get("time") == "bmo":
|
||||
time_str = " (pre-market)"
|
||||
elif data.get("time") == "amc":
|
||||
time_str = " (after-close)"
|
||||
|
||||
# EPS estimate
|
||||
eps_str = ""
|
||||
if data.get("eps_estimate"):
|
||||
eps_str = f" | Est: ${data['eps_estimate']:.2f}"
|
||||
|
||||
# Stock name from portfolio
|
||||
stock_name = next((s["name"] for s in portfolio if s["symbol"] == ticker), ticker)
|
||||
|
||||
print(f"{emoji} {date_str} ({timing}): **{ticker}** — {stock_name}{time_str}{eps_str}")
|
||||
|
||||
except ValueError:
|
||||
print(f"⚪ {date_str}: {ticker}")
|
||||
|
||||
print()
|
||||
|
||||
|
||||
def check_earnings(args):
|
||||
"""Check earnings for today and this week (briefing format)."""
|
||||
portfolio = load_portfolio()
|
||||
if not portfolio:
|
||||
return
|
||||
|
||||
cache = load_earnings_cache()
|
||||
|
||||
# Auto-refresh if cache is stale
|
||||
if not cache.get("last_updated"):
|
||||
cache = refresh_earnings(portfolio, force=False)
|
||||
else:
|
||||
try:
|
||||
last = datetime.fromisoformat(cache["last_updated"])
|
||||
if datetime.now() - last > timedelta(hours=12):
|
||||
cache = refresh_earnings(portfolio, force=False)
|
||||
except Exception:
|
||||
cache = refresh_earnings(portfolio, force=False)
|
||||
|
||||
earnings = cache.get("earnings", {})
|
||||
if not earnings:
|
||||
return
|
||||
|
||||
today = datetime.now().date()
|
||||
week_only = getattr(args, 'week', False)
|
||||
|
||||
# For weekly mode (Sunday cron), show Mon-Fri of upcoming week
|
||||
# Calculation: weekday() returns 0=Mon, 6=Sun. (7 - weekday) % 7 gives days until next Monday.
|
||||
# Special case: if today is Monday (result=0), we want next Monday (7 days), not today.
|
||||
if week_only:
|
||||
days_until_monday = (7 - today.weekday()) % 7
|
||||
if days_until_monday == 0 and today.weekday() != 0:
|
||||
days_until_monday = 7
|
||||
week_start = today + timedelta(days=days_until_monday)
|
||||
week_end = week_start + timedelta(days=4) # Mon-Fri
|
||||
else:
|
||||
week_end = today + timedelta(days=7)
|
||||
|
||||
today_list = []
|
||||
week_list = []
|
||||
|
||||
for ticker, data in earnings.items():
|
||||
if not data.get("date"):
|
||||
continue
|
||||
try:
|
||||
ed = datetime.strptime(data["date"], "%Y-%m-%d").date()
|
||||
stock = next((s for s in portfolio if s["symbol"] == ticker), None)
|
||||
name = stock["name"] if stock else ticker
|
||||
category = stock.get("category", "") if stock else ""
|
||||
|
||||
entry = {
|
||||
"ticker": ticker,
|
||||
"name": name,
|
||||
"date": ed,
|
||||
"time": data.get("time", ""),
|
||||
"eps_estimate": data.get("eps_estimate"),
|
||||
"category": category,
|
||||
}
|
||||
|
||||
if week_only:
|
||||
# Weekly mode: only show week range
|
||||
if week_start <= ed <= week_end:
|
||||
week_list.append(entry)
|
||||
else:
|
||||
# Daily mode: today + this week
|
||||
if ed == today:
|
||||
today_list.append(entry)
|
||||
elif today < ed <= week_end:
|
||||
week_list.append(entry)
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
# Handle JSON output
|
||||
if getattr(args, 'json', False):
|
||||
if week_only:
|
||||
result = {
|
||||
"week_start": week_start.isoformat(),
|
||||
"week_end": week_end.isoformat(),
|
||||
"earnings": [
|
||||
{
|
||||
"ticker": e["ticker"],
|
||||
"name": e["name"],
|
||||
"date": e["date"].isoformat(),
|
||||
"time": e["time"],
|
||||
"eps_estimate": e.get("eps_estimate"),
|
||||
"category": e.get("category", ""),
|
||||
}
|
||||
for e in sorted(week_list, key=lambda x: x["date"])
|
||||
],
|
||||
}
|
||||
else:
|
||||
result = {
|
||||
"today": [
|
||||
{
|
||||
"ticker": e["ticker"],
|
||||
"name": e["name"],
|
||||
"date": e["date"].isoformat(),
|
||||
"time": e["time"],
|
||||
"eps_estimate": e.get("eps_estimate"),
|
||||
"category": e.get("category", ""),
|
||||
}
|
||||
for e in sorted(today_list, key=lambda x: x.get("time", "zzz"))
|
||||
],
|
||||
"this_week": [
|
||||
{
|
||||
"ticker": e["ticker"],
|
||||
"name": e["name"],
|
||||
"date": e["date"].isoformat(),
|
||||
"time": e["time"],
|
||||
"eps_estimate": e.get("eps_estimate"),
|
||||
"category": e.get("category", ""),
|
||||
}
|
||||
for e in sorted(week_list, key=lambda x: x["date"])
|
||||
],
|
||||
}
|
||||
print(json.dumps(result, indent=2))
|
||||
return
|
||||
|
||||
# Translations
|
||||
lang = getattr(args, 'lang', 'en')
|
||||
if lang == "de":
|
||||
labels = {
|
||||
"today": "EARNINGS HEUTE",
|
||||
"week": "EARNINGS DIESE WOCHE",
|
||||
"week_preview": "EARNINGS NÄCHSTE WOCHE",
|
||||
"pre": "vor Börseneröffnung",
|
||||
"post": "nach Börsenschluss",
|
||||
"pre_short": "vor",
|
||||
"post_short": "nach",
|
||||
"est": "Erw",
|
||||
"none": "Keine Earnings diese Woche",
|
||||
"none_week": "Keine Earnings nächste Woche",
|
||||
}
|
||||
else:
|
||||
labels = {
|
||||
"today": "EARNINGS TODAY",
|
||||
"week": "EARNINGS THIS WEEK",
|
||||
"week_preview": "EARNINGS NEXT WEEK",
|
||||
"pre": "pre-market",
|
||||
"post": "after-close",
|
||||
"pre_short": "pre",
|
||||
"post_short": "post",
|
||||
"est": "Est",
|
||||
"none": "No earnings this week",
|
||||
"none_week": "No earnings next week",
|
||||
}
|
||||
|
||||
# Date header
|
||||
date_str = datetime.now().strftime("%b %d, %Y") if lang == "en" else datetime.now().strftime("%d. %b %Y")
|
||||
|
||||
# Output for briefing
|
||||
output = []
|
||||
|
||||
# Daily mode: show today's earnings
|
||||
if not week_only and today_list:
|
||||
output.append(f"📅 {labels['today']} — {date_str}\n")
|
||||
for e in sorted(today_list, key=lambda x: x.get("time", "zzz")):
|
||||
time_str = f" ({labels['pre']})" if e["time"] == "bmo" else f" ({labels['post']})" if e["time"] == "amc" else ""
|
||||
eps_str = f" — {labels['est']}: ${e['eps_estimate']:.2f}" if e.get("eps_estimate") else ""
|
||||
output.append(f"• {e['ticker']} — {e['name']}{time_str}{eps_str}")
|
||||
output.append("")
|
||||
|
||||
if week_list:
|
||||
# Use different header for weekly preview mode
|
||||
week_label = labels['week_preview'] if week_only else labels['week']
|
||||
if week_only:
|
||||
# Show date range for weekly preview
|
||||
week_range = f"{week_start.strftime('%b %d')} - {week_end.strftime('%b %d')}"
|
||||
output.append(f"📅 {week_label} ({week_range})\n")
|
||||
else:
|
||||
output.append(f"📅 {week_label}\n")
|
||||
for e in sorted(week_list, key=lambda x: x["date"]):
|
||||
day_name = e["date"].strftime("%a %d.%m")
|
||||
time_str = f" ({labels['pre_short']})" if e["time"] == "bmo" else f" ({labels['post_short']})" if e["time"] == "amc" else ""
|
||||
output.append(f"• {day_name}: {e['ticker']} — {e['name']}{time_str}")
|
||||
output.append("")
|
||||
|
||||
if output:
|
||||
print("\n".join(output))
|
||||
else:
|
||||
if args.verbose:
|
||||
no_earnings_label = labels['none_week'] if week_only else labels['none']
|
||||
print(f"📅 {no_earnings_label}")
|
||||
|
||||
|
||||
def get_briefing_section() -> str:
|
||||
"""Get earnings section for daily briefing (called by briefing.py)."""
|
||||
from io import StringIO
|
||||
import contextlib
|
||||
|
||||
# Capture check output
|
||||
class Args:
|
||||
verbose = False
|
||||
|
||||
f = StringIO()
|
||||
with contextlib.redirect_stdout(f):
|
||||
check_earnings(Args())
|
||||
|
||||
return f.getvalue()
|
||||
|
||||
|
||||
def get_earnings_context(symbols: list[str]) -> list[dict]:
|
||||
"""
|
||||
Get recent earnings data (beats/misses) for symbols using OpenBB.
|
||||
|
||||
Returns list of dicts with: symbol, eps_actual, eps_estimate, surprise, revenue_actual, revenue_estimate
|
||||
"""
|
||||
if not OPENBB_BINARY:
|
||||
return []
|
||||
|
||||
results = []
|
||||
for symbol in symbols[:10]: # Limit to 10 symbols
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[OPENBB_BINARY, symbol, '--earnings'],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30
|
||||
)
|
||||
if result.returncode == 0:
|
||||
try:
|
||||
data = json.loads(result.stdout)
|
||||
if isinstance(data, list) and data:
|
||||
results.append({
|
||||
'symbol': symbol,
|
||||
'earnings': data[0] if isinstance(data[0], dict) else {}
|
||||
})
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
return results
|
||||
|
||||
|
||||
def get_analyst_ratings(symbols: list[str]) -> list[dict]:
|
||||
"""
|
||||
Get analyst upgrades/downgrades for symbols using OpenBB.
|
||||
|
||||
Returns list of dicts with: symbol, rating, target_price, firm, direction
|
||||
"""
|
||||
if not OPENBB_BINARY:
|
||||
return []
|
||||
|
||||
results = []
|
||||
for symbol in symbols[:10]: # Limit to 10 symbols
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[OPENBB_BINARY, symbol, '--rating'],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=30
|
||||
)
|
||||
if result.returncode == 0:
|
||||
try:
|
||||
data = json.loads(result.stdout)
|
||||
if isinstance(data, list) and data:
|
||||
results.append({
|
||||
'symbol': symbol,
|
||||
'rating': data[0] if isinstance(data[0], dict) else {}
|
||||
})
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
except Exception:
|
||||
pass
|
||||
return results
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Earnings Calendar Tracker")
|
||||
subparsers = parser.add_subparsers(dest="command", help="Commands")
|
||||
|
||||
# list command
|
||||
list_parser = subparsers.add_parser("list", help="List all upcoming earnings")
|
||||
list_parser.add_argument("--refresh", "-r", action="store_true", help="Force refresh")
|
||||
list_parser.set_defaults(func=list_earnings)
|
||||
|
||||
# check command
|
||||
check_parser = subparsers.add_parser("check", help="Check today/this week")
|
||||
check_parser.add_argument("--verbose", "-v", action="store_true")
|
||||
check_parser.add_argument("--json", action="store_true", help="JSON output")
|
||||
check_parser.add_argument("--lang", default="en", help="Output language (en, de)")
|
||||
check_parser.add_argument("--week", action="store_true", help="Show full week preview (for weekly cron)")
|
||||
check_parser.set_defaults(func=check_earnings)
|
||||
|
||||
# refresh command
|
||||
refresh_parser = subparsers.add_parser("refresh", help="Force refresh all data")
|
||||
refresh_parser.set_defaults(func=lambda a: refresh_earnings(load_portfolio(), force=True))
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.command:
|
||||
parser.print_help()
|
||||
return
|
||||
|
||||
args.func(args)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
1126
scripts/fetch_news.py
Normal file
1126
scripts/fetch_news.py
Normal file
File diff suppressed because it is too large
Load Diff
317
scripts/portfolio.py
Normal file
317
scripts/portfolio.py
Normal file
@@ -0,0 +1,317 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Portfolio Manager - CRUD operations for stock watchlist.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import csv
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
PORTFOLIO_FILE = Path(__file__).parent.parent / "config" / "portfolio.csv"
|
||||
REQUIRED_COLUMNS = ['symbol', 'name']
|
||||
DEFAULT_COLUMNS = ['symbol', 'name', 'category', 'notes', 'type']
|
||||
|
||||
|
||||
def validate_portfolio_csv(path: Path) -> tuple[bool, list[str]]:
|
||||
"""
|
||||
Validate portfolio CSV file for common issues.
|
||||
|
||||
Returns:
|
||||
Tuple of (is_valid, list of warnings)
|
||||
"""
|
||||
warnings = []
|
||||
|
||||
if not path.exists():
|
||||
return True, warnings
|
||||
|
||||
try:
|
||||
with open(path, 'r', encoding='utf-8') as f:
|
||||
# Check for encoding issues
|
||||
content = f.read()
|
||||
|
||||
with open(path, 'r', encoding='utf-8') as f:
|
||||
reader = csv.DictReader(f)
|
||||
|
||||
# Check required columns
|
||||
if reader.fieldnames is None:
|
||||
warnings.append("CSV appears to be empty")
|
||||
return False, warnings
|
||||
|
||||
missing_cols = set(REQUIRED_COLUMNS) - set(reader.fieldnames or [])
|
||||
if missing_cols:
|
||||
warnings.append(f"Missing required columns: {', '.join(missing_cols)}")
|
||||
|
||||
# Check for duplicate symbols
|
||||
symbols = []
|
||||
for row in reader:
|
||||
symbol = row.get('symbol', '').strip().upper()
|
||||
if symbol:
|
||||
symbols.append(symbol)
|
||||
|
||||
duplicates = [s for s in set(symbols) if symbols.count(s) > 1]
|
||||
if duplicates:
|
||||
warnings.append(f"Duplicate symbols found: {', '.join(duplicates)}")
|
||||
|
||||
except UnicodeDecodeError:
|
||||
warnings.append("File encoding issue - try saving as UTF-8")
|
||||
except Exception as e:
|
||||
warnings.append(f"Error reading portfolio: {e}")
|
||||
return False, warnings
|
||||
|
||||
return True, warnings
|
||||
|
||||
|
||||
def load_portfolio() -> list[dict]:
|
||||
"""Load portfolio from CSV with validation."""
|
||||
if not PORTFOLIO_FILE.exists():
|
||||
return []
|
||||
|
||||
# Validate first
|
||||
is_valid, warnings = validate_portfolio_csv(PORTFOLIO_FILE)
|
||||
for warning in warnings:
|
||||
print(f"⚠️ Portfolio warning: {warning}", file=sys.stderr)
|
||||
|
||||
if not is_valid:
|
||||
print("⚠️ Portfolio has errors - returning empty", file=sys.stderr)
|
||||
return []
|
||||
|
||||
try:
|
||||
with open(PORTFOLIO_FILE, 'r', encoding='utf-8') as f:
|
||||
reader = csv.DictReader(f)
|
||||
|
||||
# Normalize data
|
||||
portfolio = []
|
||||
seen_symbols = set()
|
||||
|
||||
for row in reader:
|
||||
symbol = row.get('symbol', '').strip().upper()
|
||||
if not symbol:
|
||||
continue
|
||||
|
||||
# Skip duplicates (keep first occurrence)
|
||||
if symbol in seen_symbols:
|
||||
continue
|
||||
seen_symbols.add(symbol)
|
||||
|
||||
portfolio.append({
|
||||
'symbol': symbol,
|
||||
'name': row.get('name', symbol) or symbol,
|
||||
'category': row.get('category', '') or '',
|
||||
'notes': row.get('notes', '') or '',
|
||||
'type': row.get('type', 'Watchlist') or 'Watchlist'
|
||||
})
|
||||
|
||||
return portfolio
|
||||
|
||||
except Exception as e:
|
||||
print(f"⚠️ Error loading portfolio: {e}", file=sys.stderr)
|
||||
return []
|
||||
|
||||
|
||||
def save_portfolio(portfolio: list[dict]):
|
||||
"""Save portfolio to CSV."""
|
||||
if not portfolio:
|
||||
PORTFOLIO_FILE.write_text("symbol,name,category,notes,type\n")
|
||||
return
|
||||
|
||||
with open(PORTFOLIO_FILE, 'w', newline='') as f:
|
||||
writer = csv.DictWriter(f, fieldnames=['symbol', 'name', 'category', 'notes', 'type'])
|
||||
writer.writeheader()
|
||||
writer.writerows(portfolio)
|
||||
|
||||
|
||||
def list_portfolio(args):
|
||||
"""List all stocks in portfolio."""
|
||||
portfolio = load_portfolio()
|
||||
|
||||
if not portfolio:
|
||||
print("📂 Portfolio is empty. Use 'portfolio add <SYMBOL>' to add stocks.")
|
||||
return
|
||||
|
||||
print(f"\n📊 Portfolio ({len(portfolio)} stocks)\n")
|
||||
|
||||
# Group by Type then Category
|
||||
by_type = {}
|
||||
for stock in portfolio:
|
||||
t = stock.get('type', 'Watchlist') or 'Watchlist'
|
||||
if t not in by_type:
|
||||
by_type[t] = []
|
||||
by_type[t].append(stock)
|
||||
|
||||
for t, type_stocks in by_type.items():
|
||||
print(f"# {t}")
|
||||
categories = {}
|
||||
for stock in type_stocks:
|
||||
cat = stock.get('category', 'Other') or 'Other'
|
||||
if cat not in categories:
|
||||
categories[cat] = []
|
||||
categories[cat].append(stock)
|
||||
|
||||
for cat, stocks in categories.items():
|
||||
print(f"### {cat}")
|
||||
for s in stocks:
|
||||
notes = f" — {s['notes']}" if s.get('notes') else ""
|
||||
print(f" • {s['symbol']}: {s['name']}{notes}")
|
||||
print()
|
||||
|
||||
|
||||
def add_stock(args):
|
||||
"""Add a stock to portfolio."""
|
||||
portfolio = load_portfolio()
|
||||
|
||||
# Check if already exists
|
||||
if any(s['symbol'].upper() == args.symbol.upper() for s in portfolio):
|
||||
print(f"⚠️ {args.symbol.upper()} already in portfolio")
|
||||
return
|
||||
|
||||
new_stock = {
|
||||
'symbol': args.symbol.upper(),
|
||||
'name': args.name or args.symbol.upper(),
|
||||
'category': args.category or '',
|
||||
'notes': args.notes or '',
|
||||
'type': args.type
|
||||
}
|
||||
|
||||
portfolio.append(new_stock)
|
||||
save_portfolio(portfolio)
|
||||
print(f"✅ Added {args.symbol.upper()} to portfolio ({args.type})")
|
||||
|
||||
|
||||
def remove_stock(args):
|
||||
"""Remove a stock from portfolio."""
|
||||
portfolio = load_portfolio()
|
||||
|
||||
original_len = len(portfolio)
|
||||
portfolio = [s for s in portfolio if s['symbol'].upper() != args.symbol.upper()]
|
||||
|
||||
if len(portfolio) == original_len:
|
||||
print(f"⚠️ {args.symbol.upper()} not found in portfolio")
|
||||
return
|
||||
|
||||
save_portfolio(portfolio)
|
||||
print(f"✅ Removed {args.symbol.upper()} from portfolio")
|
||||
|
||||
|
||||
def import_csv(args):
|
||||
"""Import portfolio from external CSV."""
|
||||
import_path = Path(args.file)
|
||||
|
||||
if not import_path.exists():
|
||||
print(f"❌ File not found: {args.file}")
|
||||
sys.exit(1)
|
||||
|
||||
with open(import_path, 'r') as f:
|
||||
reader = csv.DictReader(f)
|
||||
imported = list(reader)
|
||||
|
||||
# Normalize fields
|
||||
normalized = []
|
||||
for row in imported:
|
||||
normalized.append({
|
||||
'symbol': row.get('symbol', row.get('Symbol', row.get('ticker', ''))).upper(),
|
||||
'name': row.get('name', row.get('Name', row.get('company', ''))),
|
||||
'category': row.get('category', row.get('Category', row.get('sector', ''))),
|
||||
'notes': row.get('notes', row.get('Notes', '')),
|
||||
'type': row.get('type', 'Watchlist')
|
||||
})
|
||||
|
||||
save_portfolio(normalized)
|
||||
print(f"✅ Imported {len(normalized)} stocks from {args.file}")
|
||||
|
||||
|
||||
def create_interactive(args):
|
||||
"""Interactive portfolio creation."""
|
||||
print("\n📊 Portfolio Creator\n")
|
||||
print("Enter stocks one per line (format: SYMBOL or SYMBOL,Name,Category)")
|
||||
print("Type 'done' when finished.\n")
|
||||
|
||||
portfolio = []
|
||||
|
||||
while True:
|
||||
try:
|
||||
line = input("> ").strip()
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
break
|
||||
|
||||
if line.lower() == 'done':
|
||||
break
|
||||
|
||||
if not line:
|
||||
continue
|
||||
|
||||
parts = line.split(',')
|
||||
symbol = parts[0].strip().upper()
|
||||
name = parts[1].strip() if len(parts) > 1 else symbol
|
||||
category = parts[2].strip() if len(parts) > 2 else ''
|
||||
|
||||
portfolio.append({
|
||||
'symbol': symbol,
|
||||
'name': name,
|
||||
'category': category,
|
||||
'notes': '',
|
||||
'type': 'Watchlist'
|
||||
})
|
||||
print(f" Added: {symbol}")
|
||||
|
||||
if portfolio:
|
||||
save_portfolio(portfolio)
|
||||
print(f"\n✅ Created portfolio with {len(portfolio)} stocks")
|
||||
else:
|
||||
print("\n⚠️ No stocks added")
|
||||
|
||||
|
||||
def get_symbols(args=None):
|
||||
"""Get list of symbols (for other scripts to use)."""
|
||||
portfolio = load_portfolio()
|
||||
symbols = [s['symbol'] for s in portfolio]
|
||||
|
||||
if args and args.json:
|
||||
import json
|
||||
print(json.dumps(symbols))
|
||||
else:
|
||||
print(','.join(symbols))
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Portfolio Manager')
|
||||
subparsers = parser.add_subparsers(dest='command', required=True)
|
||||
|
||||
# List command
|
||||
list_parser = subparsers.add_parser('list', help='List portfolio')
|
||||
list_parser.set_defaults(func=list_portfolio)
|
||||
|
||||
# Add command
|
||||
add_parser = subparsers.add_parser('add', help='Add stock')
|
||||
add_parser.add_argument('symbol', help='Stock symbol')
|
||||
add_parser.add_argument('--name', help='Company name')
|
||||
add_parser.add_argument('--category', help='Category (e.g., Tech, Finance)')
|
||||
add_parser.add_argument('--notes', help='Notes')
|
||||
add_parser.add_argument('--type', choices=['Holding', 'Watchlist'], default='Watchlist', help='Portfolio type')
|
||||
add_parser.set_defaults(func=add_stock)
|
||||
|
||||
# Remove command
|
||||
remove_parser = subparsers.add_parser('remove', help='Remove stock')
|
||||
remove_parser.add_argument('symbol', help='Stock symbol')
|
||||
remove_parser.set_defaults(func=remove_stock)
|
||||
|
||||
# Import command
|
||||
import_parser = subparsers.add_parser('import', help='Import from CSV')
|
||||
import_parser.add_argument('file', help='CSV file path')
|
||||
import_parser.set_defaults(func=import_csv)
|
||||
|
||||
# Create command
|
||||
create_parser = subparsers.add_parser('create', help='Interactive creation')
|
||||
create_parser.set_defaults(func=create_interactive)
|
||||
|
||||
# Symbols command (for other scripts)
|
||||
symbols_parser = subparsers.add_parser('symbols', help='Get symbols list')
|
||||
symbols_parser.add_argument('--json', action='store_true', help='Output as JSON')
|
||||
symbols_parser.set_defaults(func=get_symbols)
|
||||
|
||||
args = parser.parse_args()
|
||||
args.func(args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
325
scripts/ranking.py
Normal file
325
scripts/ranking.py
Normal file
@@ -0,0 +1,325 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Deterministic Headline Ranking - Impact-based ranking policy.
|
||||
|
||||
Implements #53: Deterministic impact-based ranking for headline selection.
|
||||
|
||||
Scoring Rubric (weights):
|
||||
- Market Impact (40%): CB decisions, earnings, sanctions, oil spikes
|
||||
- Novelty (20%): New vs recycled news
|
||||
- Breadth (20%): Sector-wide vs single-stock
|
||||
- Credibility (10%): Source reliability
|
||||
- Diversity Bonus (10%): Underrepresented categories
|
||||
|
||||
Output:
|
||||
- MUST_READ: Top 5 stories
|
||||
- SCAN: 3-5 additional stories (if quality threshold met)
|
||||
"""
|
||||
|
||||
import re
|
||||
from datetime import datetime
|
||||
from difflib import SequenceMatcher
|
||||
|
||||
|
||||
# Category keywords for classification
|
||||
CATEGORY_KEYWORDS = {
|
||||
"macro": ["fed", "ecb", "boj", "central bank", "rate", "inflation", "gdp", "unemployment", "treasury", "yield", "bond"],
|
||||
"equities": ["earnings", "revenue", "profit", "eps", "guidance", "beat", "miss", "upgrade", "downgrade", "target"],
|
||||
"geopolitics": ["sanction", "tariff", "war", "conflict", "embargo", "trump", "china", "russia", "ukraine", "iran", "trade war"],
|
||||
"energy": ["oil", "opec", "crude", "gas", "energy", "brent", "wti"],
|
||||
"tech": ["ai", "chip", "semiconductor", "nvidia", "apple", "google", "microsoft", "meta", "amazon"],
|
||||
}
|
||||
|
||||
# Source credibility scores (0-1)
|
||||
SOURCE_CREDIBILITY = {
|
||||
"Wall Street Journal": 0.95,
|
||||
"WSJ": 0.95,
|
||||
"Bloomberg": 0.95,
|
||||
"Reuters": 0.90,
|
||||
"Financial Times": 0.90,
|
||||
"CNBC": 0.80,
|
||||
"Yahoo Finance": 0.70,
|
||||
"MarketWatch": 0.75,
|
||||
"Barron's": 0.85,
|
||||
"Seeking Alpha": 0.60,
|
||||
"Tagesschau": 0.85,
|
||||
"Handelsblatt": 0.80,
|
||||
}
|
||||
|
||||
# Default config
|
||||
DEFAULT_CONFIG = {
|
||||
"dedupe_threshold": 0.7,
|
||||
"must_read_count": 5,
|
||||
"scan_count": 5,
|
||||
"must_read_min_score": 0.4,
|
||||
"scan_min_score": 0.25,
|
||||
"source_cap": 2,
|
||||
"weights": {
|
||||
"market_impact": 0.40,
|
||||
"novelty": 0.20,
|
||||
"breadth": 0.20,
|
||||
"credibility": 0.10,
|
||||
"diversity": 0.10,
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
def normalize_title(title: str) -> str:
|
||||
"""Normalize title for comparison."""
|
||||
if not title:
|
||||
return ""
|
||||
cleaned = re.sub(r"[^a-z0-9\s]", " ", title.lower())
|
||||
tokens = cleaned.split()
|
||||
return " ".join(tokens)
|
||||
|
||||
|
||||
def title_similarity(a: str, b: str) -> float:
|
||||
"""Calculate title similarity using SequenceMatcher."""
|
||||
if not a or not b:
|
||||
return 0.0
|
||||
return SequenceMatcher(None, normalize_title(a), normalize_title(b)).ratio()
|
||||
|
||||
|
||||
def deduplicate_headlines(headlines: list[dict], threshold: float = 0.7) -> list[dict]:
|
||||
"""Remove duplicate headlines by title similarity."""
|
||||
if not headlines:
|
||||
return []
|
||||
|
||||
unique = []
|
||||
for article in headlines:
|
||||
title = article.get("title", "")
|
||||
is_dupe = False
|
||||
for existing in unique:
|
||||
if title_similarity(title, existing.get("title", "")) > threshold:
|
||||
is_dupe = True
|
||||
break
|
||||
if not is_dupe:
|
||||
unique.append(article)
|
||||
|
||||
return unique
|
||||
|
||||
|
||||
def classify_category(title: str, description: str = "") -> list[str]:
|
||||
"""Classify headline into categories based on keywords."""
|
||||
text = f"{title} {description}".lower()
|
||||
categories = []
|
||||
|
||||
for category, keywords in CATEGORY_KEYWORDS.items():
|
||||
for keyword in keywords:
|
||||
if keyword in text:
|
||||
categories.append(category)
|
||||
break
|
||||
|
||||
return categories if categories else ["general"]
|
||||
|
||||
|
||||
def score_market_impact(title: str, description: str = "") -> float:
|
||||
"""Score market impact (0-1)."""
|
||||
text = f"{title} {description}".lower()
|
||||
score = 0.3 # Base score
|
||||
|
||||
# High impact indicators
|
||||
high_impact = ["fed", "rate cut", "rate hike", "earnings", "guidance", "sanctions", "war", "oil", "recession"]
|
||||
for term in high_impact:
|
||||
if term in text:
|
||||
score += 0.15
|
||||
|
||||
# Medium impact
|
||||
medium_impact = ["profit", "revenue", "gdp", "inflation", "tariff", "merger", "acquisition"]
|
||||
for term in medium_impact:
|
||||
if term in text:
|
||||
score += 0.1
|
||||
|
||||
return min(score, 1.0)
|
||||
|
||||
|
||||
def score_novelty(article: dict) -> float:
|
||||
"""Score novelty based on recency (0-1)."""
|
||||
published_at = article.get("published_at")
|
||||
if not published_at:
|
||||
return 0.5 # Unknown = medium
|
||||
|
||||
try:
|
||||
if isinstance(published_at, str):
|
||||
pub_time = datetime.fromisoformat(published_at.replace("Z", "+00:00"))
|
||||
else:
|
||||
pub_time = published_at
|
||||
|
||||
hours_old = (datetime.now(pub_time.tzinfo) - pub_time).total_seconds() / 3600
|
||||
|
||||
if hours_old < 2:
|
||||
return 1.0
|
||||
elif hours_old < 6:
|
||||
return 0.8
|
||||
elif hours_old < 12:
|
||||
return 0.6
|
||||
elif hours_old < 24:
|
||||
return 0.4
|
||||
else:
|
||||
return 0.2
|
||||
except Exception:
|
||||
return 0.5
|
||||
|
||||
|
||||
def score_breadth(categories: list[str]) -> float:
|
||||
"""Score breadth - sector-wide vs single-stock (0-1)."""
|
||||
# More categories = broader impact
|
||||
if "macro" in categories or "geopolitics" in categories:
|
||||
return 0.9
|
||||
if "energy" in categories:
|
||||
return 0.7
|
||||
if len(categories) > 1:
|
||||
return 0.6
|
||||
return 0.4
|
||||
|
||||
|
||||
def score_credibility(source: str) -> float:
|
||||
"""Score source credibility (0-1)."""
|
||||
return SOURCE_CREDIBILITY.get(source, 0.5)
|
||||
|
||||
|
||||
def calculate_score(article: dict, weights: dict, category_counts: dict) -> float:
|
||||
"""Calculate overall score for a headline."""
|
||||
title = article.get("title", "")
|
||||
description = article.get("description", "")
|
||||
source = article.get("source", "")
|
||||
categories = classify_category(title, description)
|
||||
article["_categories"] = categories # Store for later use
|
||||
|
||||
# Component scores
|
||||
impact = score_market_impact(title, description)
|
||||
novelty = score_novelty(article)
|
||||
breadth = score_breadth(categories)
|
||||
credibility = score_credibility(source)
|
||||
|
||||
# Diversity bonus - boost underrepresented categories
|
||||
diversity = 0.0
|
||||
for cat in categories:
|
||||
if category_counts.get(cat, 0) < 1:
|
||||
diversity = 0.5
|
||||
break
|
||||
elif category_counts.get(cat, 0) < 2:
|
||||
diversity = 0.3
|
||||
|
||||
# Weighted sum
|
||||
score = (
|
||||
impact * weights.get("market_impact", 0.4) +
|
||||
novelty * weights.get("novelty", 0.2) +
|
||||
breadth * weights.get("breadth", 0.2) +
|
||||
credibility * weights.get("credibility", 0.1) +
|
||||
diversity * weights.get("diversity", 0.1)
|
||||
)
|
||||
|
||||
article["_score"] = round(score, 3)
|
||||
article["_impact"] = round(impact, 3)
|
||||
article["_novelty"] = round(novelty, 3)
|
||||
|
||||
return score
|
||||
|
||||
|
||||
def apply_source_cap(ranked: list[dict], cap: int = 2) -> list[dict]:
|
||||
"""Apply source cap - max N items per outlet."""
|
||||
source_counts = {}
|
||||
result = []
|
||||
|
||||
for article in ranked:
|
||||
source = article.get("source", "Unknown")
|
||||
if source_counts.get(source, 0) < cap:
|
||||
result.append(article)
|
||||
source_counts[source] = source_counts.get(source, 0) + 1
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def ensure_diversity(selected: list[dict], candidates: list[dict], required: list[str]) -> list[dict]:
|
||||
"""Ensure at least one headline from required categories if available."""
|
||||
result = list(selected)
|
||||
covered = set()
|
||||
|
||||
for article in result:
|
||||
for cat in article.get("_categories", []):
|
||||
covered.add(cat)
|
||||
|
||||
for req_cat in required:
|
||||
if req_cat not in covered:
|
||||
# Find candidate from this category
|
||||
for candidate in candidates:
|
||||
if candidate not in result and req_cat in candidate.get("_categories", []):
|
||||
result.append(candidate)
|
||||
covered.add(req_cat)
|
||||
break
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def rank_headlines(headlines: list[dict], config: dict | None = None) -> dict:
|
||||
"""
|
||||
Rank headlines deterministically.
|
||||
|
||||
Args:
|
||||
headlines: List of headline dicts with title, source, description, etc.
|
||||
config: Optional config overrides
|
||||
|
||||
Returns:
|
||||
{"must_read": [...], "scan": [...]}
|
||||
"""
|
||||
cfg = {**DEFAULT_CONFIG, **(config or {})}
|
||||
weights = cfg.get("weights", DEFAULT_CONFIG["weights"])
|
||||
|
||||
if not headlines:
|
||||
return {"must_read": [], "scan": []}
|
||||
|
||||
# Step 1: Deduplicate
|
||||
unique = deduplicate_headlines(headlines, cfg["dedupe_threshold"])
|
||||
|
||||
# Step 2: Score all headlines
|
||||
category_counts = {}
|
||||
for article in unique:
|
||||
calculate_score(article, weights, category_counts)
|
||||
for cat in article.get("_categories", []):
|
||||
category_counts[cat] = category_counts.get(cat, 0) + 1
|
||||
|
||||
# Step 3: Sort by score
|
||||
ranked = sorted(unique, key=lambda x: x.get("_score", 0), reverse=True)
|
||||
|
||||
# Step 4: Apply source cap
|
||||
capped = apply_source_cap(ranked, cfg["source_cap"])
|
||||
|
||||
# Step 5: Select must_read with diversity quota
|
||||
# Leave room for diversity additions by taking count-1 initially
|
||||
must_read_candidates = [a for a in capped if a.get("_score", 0) >= cfg["must_read_min_score"]]
|
||||
must_read_count = cfg["must_read_count"]
|
||||
must_read = must_read_candidates[:max(1, must_read_count - 2)] # Reserve 2 slots for diversity
|
||||
must_read = ensure_diversity(must_read, capped, ["macro", "equities", "geopolitics"])
|
||||
must_read = must_read[:must_read_count] # Final trim to exact count
|
||||
|
||||
# Step 6: Select scan (additional items)
|
||||
scan_candidates = [a for a in capped if a not in must_read and a.get("_score", 0) >= cfg["scan_min_score"]]
|
||||
scan = scan_candidates[:cfg["scan_count"]]
|
||||
|
||||
return {
|
||||
"must_read": must_read,
|
||||
"scan": scan,
|
||||
"total_processed": len(headlines),
|
||||
"after_dedupe": len(unique),
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
# Test with sample data
|
||||
test_headlines = [
|
||||
{"title": "Fed signals rate cut in March", "source": "WSJ", "description": "Federal Reserve hints at policy shift"},
|
||||
{"title": "Apple earnings beat expectations", "source": "CNBC", "description": "Revenue up 15%"},
|
||||
{"title": "Oil prices surge on OPEC cuts", "source": "Reuters", "description": "Brent crude hits $90"},
|
||||
{"title": "China-US trade tensions escalate", "source": "Bloomberg", "description": "New tariffs announced"},
|
||||
{"title": "Tech stocks rally on AI optimism", "source": "Yahoo Finance", "description": "Nvidia leads gains"},
|
||||
{"title": "Fed hints at rate reduction", "source": "MarketWatch", "description": "Same story as WSJ"}, # Dupe
|
||||
]
|
||||
|
||||
result = rank_headlines(test_headlines)
|
||||
print("MUST_READ:")
|
||||
for h in result["must_read"]:
|
||||
print(f" [{h['_score']:.2f}] {h['title']} ({h['source']})")
|
||||
print("\nSCAN:")
|
||||
for h in result["scan"]:
|
||||
print(f" [{h['_score']:.2f}] {h['title']} ({h['source']})")
|
||||
283
scripts/research.py
Normal file
283
scripts/research.py
Normal file
@@ -0,0 +1,283 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Research Module - Deep research using Gemini CLI.
|
||||
Crawls articles, finds correlations, researches companies.
|
||||
Outputs research_report.md for later analysis.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from utils import ensure_venv
|
||||
|
||||
from fetch_news import PortfolioError, get_market_news, get_portfolio_news
|
||||
|
||||
SCRIPT_DIR = Path(__file__).parent
|
||||
CONFIG_DIR = SCRIPT_DIR.parent / "config"
|
||||
OUTPUT_DIR = SCRIPT_DIR.parent / "research"
|
||||
|
||||
|
||||
ensure_venv()
|
||||
|
||||
|
||||
def format_market_data(market_data: dict) -> str:
|
||||
"""Format market data for research prompt."""
|
||||
lines = ["## Market Data\n"]
|
||||
|
||||
for region, data in market_data.get('markets', {}).items():
|
||||
lines.append(f"### {data['name']}")
|
||||
for symbol, idx in data.get('indices', {}).items():
|
||||
if 'data' in idx and idx['data']:
|
||||
price = idx['data'].get('price', 'N/A')
|
||||
change_pct = idx['data'].get('change_percent', 0)
|
||||
emoji = '📈' if change_pct >= 0 else '📉'
|
||||
lines.append(f"- {idx['name']}: {price} ({change_pct:+.2f}%) {emoji}")
|
||||
lines.append("")
|
||||
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
def format_headlines(headlines: list) -> str:
|
||||
"""Format headlines for research prompt."""
|
||||
lines = ["## Current Headlines\n"]
|
||||
|
||||
for article in headlines[:20]:
|
||||
source = article.get('source', 'Unknown')
|
||||
title = article.get('title', '')
|
||||
link = article.get('link', '')
|
||||
lines.append(f"- [{source}] {title}")
|
||||
if link:
|
||||
lines.append(f" URL: {link}")
|
||||
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
def format_portfolio_news(portfolio_data: dict) -> str:
|
||||
"""Format portfolio news for research prompt."""
|
||||
lines = ["## Portfolio Analysis\n"]
|
||||
|
||||
for symbol, data in portfolio_data.get('stocks', {}).items():
|
||||
quote = data.get('quote', {})
|
||||
price = quote.get('price', 'N/A')
|
||||
change_pct = quote.get('change_percent', 0)
|
||||
|
||||
lines.append(f"### {symbol} (${price}, {change_pct:+.2f}%)")
|
||||
|
||||
for article in data.get('articles', [])[:5]:
|
||||
title = article.get('title', '')
|
||||
link = article.get('link', '')
|
||||
lines.append(f"- {title}")
|
||||
if link:
|
||||
lines.append(f" URL: {link}")
|
||||
lines.append("")
|
||||
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
def gemini_available() -> bool:
|
||||
return shutil.which('gemini') is not None
|
||||
|
||||
|
||||
def research_with_gemini(content: str, focus_areas: list = None) -> str:
|
||||
"""Perform deep research using Gemini CLI.
|
||||
|
||||
Args:
|
||||
content: Combined market/headlines/portfolio content
|
||||
focus_areas: Optional list of focus areas (e.g., ['earnings', 'macro', 'sectors'])
|
||||
|
||||
Returns:
|
||||
Research report text
|
||||
"""
|
||||
focus_prompt = ""
|
||||
if focus_areas:
|
||||
focus_prompt = f"""
|
||||
Focus areas for the research:
|
||||
{', '.join(f'- {area}' for area in focus_areas)}
|
||||
|
||||
Go deep on each area.
|
||||
"""
|
||||
|
||||
prompt = f"""You are an experienced investment research analyst.
|
||||
|
||||
Your task is to deliver deep research on current market developments.
|
||||
|
||||
{focus_prompt}
|
||||
Please analyze the following market data:
|
||||
|
||||
{content}
|
||||
|
||||
## Analysis Requirements:
|
||||
|
||||
1. **Macro Trends**: What is driving the market today? Which economic data/decisions matter?
|
||||
|
||||
2. **Sector Analysis**: Which sectors are performing best/worst? Why?
|
||||
|
||||
3. **Company News**: Relevant earnings, M&A, product launches?
|
||||
|
||||
4. **Risks**: What downside risks should be noted?
|
||||
|
||||
5. **Opportunities**: Which positive developments offer opportunities?
|
||||
|
||||
6. **Correlations**: Are there links between different news items/asset classes?
|
||||
|
||||
7. **Trade Ideas**: Concrete setups based on the analysis (not financial advice!)
|
||||
|
||||
8. **Sources**: Original links for further research
|
||||
|
||||
Be analytical, objective, and opinionated where appropriate.
|
||||
Deliver a substantial report (500-800 words).
|
||||
"""
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
['gemini', prompt],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=120
|
||||
)
|
||||
|
||||
if result.returncode == 0:
|
||||
return result.stdout.strip()
|
||||
else:
|
||||
return f"⚠️ Gemini research error: {result.stderr}"
|
||||
|
||||
except subprocess.TimeoutExpired:
|
||||
return "⚠️ Gemini research timeout"
|
||||
except FileNotFoundError:
|
||||
return "⚠️ Gemini CLI not found. Install: brew install gemini-cli"
|
||||
|
||||
|
||||
def format_raw_data_report(market_data: dict, portfolio_data: dict) -> str:
|
||||
parts = []
|
||||
if market_data:
|
||||
parts.append(format_market_data(market_data))
|
||||
if market_data.get('headlines'):
|
||||
parts.append(format_headlines(market_data['headlines']))
|
||||
if portfolio_data and 'error' not in portfolio_data:
|
||||
parts.append(format_portfolio_news(portfolio_data))
|
||||
return '\n\n'.join(parts)
|
||||
|
||||
|
||||
def generate_research_content(market_data: dict, portfolio_data: dict, focus_areas: list = None) -> dict:
|
||||
raw_report = format_raw_data_report(market_data, portfolio_data)
|
||||
if not raw_report.strip():
|
||||
return {
|
||||
'report': '',
|
||||
'source': 'none'
|
||||
}
|
||||
if gemini_available():
|
||||
return {
|
||||
'report': research_with_gemini(raw_report, focus_areas),
|
||||
'source': 'gemini'
|
||||
}
|
||||
return {
|
||||
'report': raw_report,
|
||||
'source': 'raw'
|
||||
}
|
||||
|
||||
|
||||
def generate_research_report(args):
|
||||
"""Generate full research report."""
|
||||
OUTPUT_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
config_path = CONFIG_DIR / "config.json"
|
||||
if not config_path.exists():
|
||||
print("⚠️ No config found. Run 'finance-news wizard' first.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Fetch fresh data
|
||||
print("📡 Fetching market data...", file=sys.stderr)
|
||||
|
||||
# Get market overview
|
||||
market_data = get_market_news(
|
||||
args.limit if hasattr(args, 'limit') else 5,
|
||||
regions=args.regions.split(',') if hasattr(args, 'regions') else ["us", "europe"],
|
||||
max_indices_per_region=2
|
||||
)
|
||||
|
||||
# Get portfolio news
|
||||
try:
|
||||
portfolio_data = get_portfolio_news(
|
||||
args.limit if hasattr(args, 'limit') else 5,
|
||||
args.max_stocks if hasattr(args, 'max_stocks') else 10
|
||||
)
|
||||
except PortfolioError as exc:
|
||||
print(f"⚠️ Skipping portfolio: {exc}", file=sys.stderr)
|
||||
portfolio_data = None
|
||||
|
||||
# Build report
|
||||
focus_areas = None
|
||||
if hasattr(args, 'focus') and args.focus:
|
||||
focus_areas = args.focus.split(',')
|
||||
|
||||
research_result = generate_research_content(market_data, portfolio_data, focus_areas)
|
||||
research_report = research_result['report']
|
||||
source = research_result['source']
|
||||
|
||||
if not research_report.strip():
|
||||
print("⚠️ No data available for research", file=sys.stderr)
|
||||
return
|
||||
|
||||
if source == 'gemini':
|
||||
print("🔬 Running deep research with Gemini...", file=sys.stderr)
|
||||
else:
|
||||
print("🧾 Gemini not available; using raw data report", file=sys.stderr)
|
||||
|
||||
# Add metadata header
|
||||
timestamp = datetime.now().isoformat()
|
||||
date_str = datetime.now().strftime("%Y-%m-%d %H:%M")
|
||||
|
||||
full_report = f"""# Market Research Report
|
||||
**Generiert:** {date_str}
|
||||
**Quelle:** Finance News Skill
|
||||
|
||||
---
|
||||
|
||||
{research_report}
|
||||
|
||||
---
|
||||
|
||||
*This report was generated automatically. Not financial advice.*
|
||||
"""
|
||||
|
||||
# Save to file
|
||||
output_file = OUTPUT_DIR / f"research_{datetime.now().strftime('%Y-%m-%d')}.md"
|
||||
with open(output_file, 'w') as f:
|
||||
f.write(full_report)
|
||||
|
||||
print(f"✅ Research report saved to: {output_file}", file=sys.stderr)
|
||||
|
||||
# Also output to stdout
|
||||
if args.json:
|
||||
print(json.dumps({
|
||||
'report': research_report,
|
||||
'saved_to': str(output_file),
|
||||
'timestamp': timestamp
|
||||
}))
|
||||
else:
|
||||
print("\n" + "="*60)
|
||||
print("RESEARCH REPORT")
|
||||
print("="*60)
|
||||
print(research_report)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Deep Market Research')
|
||||
parser.add_argument('--limit', type=int, default=5, help='Max headlines per source')
|
||||
parser.add_argument('--regions', default='us,europe', help='Comma-separated regions')
|
||||
parser.add_argument('--max-stocks', type=int, default=10, help='Max portfolio stocks')
|
||||
parser.add_argument('--focus', help='Focus areas (comma-separated)')
|
||||
parser.add_argument('--json', action='store_true', help='Output as JSON')
|
||||
|
||||
args = parser.parse_args()
|
||||
generate_research_report(args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
290
scripts/setup.py
Normal file
290
scripts/setup.py
Normal file
@@ -0,0 +1,290 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Finance News Skill - Interactive Setup
|
||||
Configures RSS feeds, WhatsApp channels, and cron jobs.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
SCRIPT_DIR = Path(__file__).parent
|
||||
CONFIG_DIR = SCRIPT_DIR.parent / "config"
|
||||
SOURCES_FILE = CONFIG_DIR / "config.json"
|
||||
|
||||
|
||||
def load_sources():
|
||||
"""Load current sources configuration."""
|
||||
if SOURCES_FILE.exists():
|
||||
with open(SOURCES_FILE, 'r') as f:
|
||||
return json.load(f)
|
||||
return get_default_sources()
|
||||
|
||||
|
||||
def save_sources(sources: dict):
|
||||
"""Save sources configuration."""
|
||||
CONFIG_DIR.mkdir(parents=True, exist_ok=True)
|
||||
with open(SOURCES_FILE, 'w') as f:
|
||||
json.dump(sources, f, indent=2)
|
||||
print(f"✅ Configuration saved to {SOURCES_FILE}")
|
||||
|
||||
|
||||
def get_default_sources():
|
||||
"""Return default source configuration."""
|
||||
config_path = CONFIG_DIR / "config.json"
|
||||
if config_path.exists():
|
||||
with open(config_path, 'r') as f:
|
||||
return json.load(f)
|
||||
return {}
|
||||
|
||||
|
||||
def prompt(message: str, default: str = "") -> str:
|
||||
"""Prompt for input with optional default."""
|
||||
if default:
|
||||
result = input(f"{message} [{default}]: ").strip()
|
||||
return result if result else default
|
||||
return input(f"{message}: ").strip()
|
||||
|
||||
|
||||
def prompt_bool(message: str, default: bool = True) -> bool:
|
||||
"""Prompt for yes/no input."""
|
||||
default_str = "Y/n" if default else "y/N"
|
||||
result = input(f"{message} [{default_str}]: ").strip().lower()
|
||||
if not result:
|
||||
return default
|
||||
return result in ('y', 'yes', '1', 'true')
|
||||
|
||||
|
||||
def setup_rss_feeds(sources: dict):
|
||||
"""Interactive RSS feed configuration."""
|
||||
print("\n📰 RSS Feed Configuration\n")
|
||||
print("Enable/disable news sources:\n")
|
||||
|
||||
for feed_id, feed_config in sources['rss_feeds'].items():
|
||||
name = feed_config.get('name', feed_id)
|
||||
current = feed_config.get('enabled', True)
|
||||
enabled = prompt_bool(f" {name}", current)
|
||||
sources['rss_feeds'][feed_id]['enabled'] = enabled
|
||||
|
||||
print("\n Add custom RSS feed? (leave blank to skip)")
|
||||
custom_name = prompt(" Feed name", "")
|
||||
if custom_name:
|
||||
custom_url = prompt(" Feed URL")
|
||||
sources['rss_feeds'][custom_name.lower().replace(' ', '_')] = {
|
||||
"name": custom_name,
|
||||
"enabled": True,
|
||||
"main": custom_url
|
||||
}
|
||||
print(f" ✅ Added {custom_name}")
|
||||
|
||||
|
||||
def setup_markets(sources: dict):
|
||||
"""Interactive market configuration."""
|
||||
print("\n📊 Market Coverage\n")
|
||||
print("Enable/disable market regions:\n")
|
||||
|
||||
for market_id, market_config in sources['markets'].items():
|
||||
name = market_config.get('name', market_id)
|
||||
current = market_config.get('enabled', True)
|
||||
enabled = prompt_bool(f" {name}", current)
|
||||
sources['markets'][market_id]['enabled'] = enabled
|
||||
|
||||
|
||||
def setup_delivery(sources: dict):
|
||||
"""Interactive delivery channel configuration."""
|
||||
print("\n📤 Delivery Channels\n")
|
||||
|
||||
# Ensure delivery dict exists
|
||||
if 'delivery' not in sources:
|
||||
sources['delivery'] = {
|
||||
'whatsapp': {'enabled': True, 'group': ''},
|
||||
'telegram': {'enabled': False, 'group': ''}
|
||||
}
|
||||
|
||||
# WhatsApp
|
||||
wa_enabled = prompt_bool("Enable WhatsApp delivery",
|
||||
sources.get('delivery', {}).get('whatsapp', {}).get('enabled', True))
|
||||
sources['delivery']['whatsapp']['enabled'] = wa_enabled
|
||||
|
||||
if wa_enabled:
|
||||
wa_group = prompt(" WhatsApp group name or JID",
|
||||
sources['delivery']['whatsapp'].get('group', ''))
|
||||
sources['delivery']['whatsapp']['group'] = wa_group
|
||||
|
||||
# Telegram
|
||||
tg_enabled = prompt_bool("Enable Telegram delivery",
|
||||
sources['delivery']['telegram'].get('enabled', False))
|
||||
sources['delivery']['telegram']['enabled'] = tg_enabled
|
||||
|
||||
if tg_enabled:
|
||||
tg_group = prompt(" Telegram group name or ID",
|
||||
sources['delivery']['telegram'].get('group', ''))
|
||||
sources['delivery']['telegram']['group'] = tg_group
|
||||
|
||||
|
||||
def setup_language(sources: dict):
|
||||
"""Interactive language configuration."""
|
||||
print("\n🌐 Language Settings\n")
|
||||
|
||||
current_lang = sources['language'].get('default', 'de')
|
||||
lang = prompt("Default language (de/en)", current_lang)
|
||||
if lang in sources['language']['supported']:
|
||||
sources['language']['default'] = lang
|
||||
else:
|
||||
print(f" ⚠️ Unsupported language '{lang}', keeping '{current_lang}'")
|
||||
|
||||
|
||||
def setup_schedule(sources: dict):
|
||||
"""Interactive schedule configuration."""
|
||||
print("\n⏰ Briefing Schedule\n")
|
||||
|
||||
# Morning
|
||||
morning = sources['schedule']['morning']
|
||||
morning_enabled = prompt_bool(f"Enable morning briefing ({morning['description']})",
|
||||
morning.get('enabled', True))
|
||||
sources['schedule']['morning']['enabled'] = morning_enabled
|
||||
|
||||
if morning_enabled:
|
||||
morning_cron = prompt(" Morning cron expression", morning.get('cron', '30 6 * * 1-5'))
|
||||
sources['schedule']['morning']['cron'] = morning_cron
|
||||
|
||||
# Evening
|
||||
evening = sources['schedule']['evening']
|
||||
evening_enabled = prompt_bool(f"Enable evening briefing ({evening['description']})",
|
||||
evening.get('enabled', True))
|
||||
sources['schedule']['evening']['enabled'] = evening_enabled
|
||||
|
||||
if evening_enabled:
|
||||
evening_cron = prompt(" Evening cron expression", evening.get('cron', '0 13 * * 1-5'))
|
||||
sources['schedule']['evening']['cron'] = evening_cron
|
||||
|
||||
# Timezone
|
||||
tz = prompt("Timezone", sources['schedule']['morning'].get('timezone', 'America/Los_Angeles'))
|
||||
sources['schedule']['morning']['timezone'] = tz
|
||||
sources['schedule']['evening']['timezone'] = tz
|
||||
|
||||
|
||||
def setup_cron_jobs(sources: dict):
|
||||
"""Set up OpenClaw cron jobs based on configuration."""
|
||||
print("\n📅 Setting up cron jobs...\n")
|
||||
|
||||
schedule = sources.get('schedule', {})
|
||||
delivery = sources.get('delivery', {})
|
||||
language = sources.get('language', {}).get('default', 'de')
|
||||
|
||||
# Determine delivery target
|
||||
if delivery.get('whatsapp', {}).get('enabled'):
|
||||
group = delivery['whatsapp'].get('group', '')
|
||||
send_cmd = f"--send --group '{group}'" if group else ""
|
||||
elif delivery.get('telegram', {}).get('enabled'):
|
||||
group = delivery['telegram'].get('group', '')
|
||||
send_cmd = f"--send --group '{group}'" # Would need telegram support
|
||||
else:
|
||||
send_cmd = ""
|
||||
|
||||
# Morning job
|
||||
if schedule.get('morning', {}).get('enabled'):
|
||||
morning_cron = schedule['morning'].get('cron', '30 6 * * 1-5')
|
||||
tz = schedule['morning'].get('timezone', 'America/Los_Angeles')
|
||||
|
||||
print(f" Creating morning briefing job: {morning_cron} ({tz})")
|
||||
# Note: Actual cron creation would happen via openclaw cron add
|
||||
print(f" ✅ Morning briefing configured")
|
||||
|
||||
# Evening job
|
||||
if schedule.get('evening', {}).get('enabled'):
|
||||
evening_cron = schedule['evening'].get('cron', '0 13 * * 1-5')
|
||||
tz = schedule['evening'].get('timezone', 'America/Los_Angeles')
|
||||
|
||||
print(f" Creating evening briefing job: {evening_cron} ({tz})")
|
||||
print(f" ✅ Evening briefing configured")
|
||||
|
||||
|
||||
def run_setup(args):
|
||||
"""Run interactive setup wizard."""
|
||||
print("\n" + "="*60)
|
||||
print("📈 Finance News Skill - Setup Wizard")
|
||||
print("="*60)
|
||||
|
||||
# Load existing or default config
|
||||
if args.reset:
|
||||
sources = get_default_sources()
|
||||
print("\n⚠️ Starting with fresh configuration")
|
||||
else:
|
||||
sources = load_sources()
|
||||
if SOURCES_FILE.exists():
|
||||
print(f"\n📂 Loaded existing configuration from {SOURCES_FILE}")
|
||||
else:
|
||||
print("\n📂 No existing configuration found, using defaults")
|
||||
|
||||
# Run through each section
|
||||
if not args.section or args.section == 'feeds':
|
||||
setup_rss_feeds(sources)
|
||||
|
||||
if not args.section or args.section == 'markets':
|
||||
setup_markets(sources)
|
||||
|
||||
if not args.section or args.section == 'delivery':
|
||||
setup_delivery(sources)
|
||||
|
||||
if not args.section or args.section == 'language':
|
||||
setup_language(sources)
|
||||
|
||||
if not args.section or args.section == 'schedule':
|
||||
setup_schedule(sources)
|
||||
|
||||
# Save configuration
|
||||
print("\n" + "-"*60)
|
||||
if prompt_bool("Save configuration?", True):
|
||||
save_sources(sources)
|
||||
|
||||
# Set up cron jobs
|
||||
if prompt_bool("Set up cron jobs now?", True):
|
||||
setup_cron_jobs(sources)
|
||||
else:
|
||||
print("❌ Configuration not saved")
|
||||
|
||||
print("\n✅ Setup complete!")
|
||||
print("\nNext steps:")
|
||||
print(" • Run 'finance-news portfolio-list' to check your watchlist")
|
||||
print(" • Run 'finance-news briefing --morning' to test a briefing")
|
||||
print(" • Run 'finance-news market' to see market overview")
|
||||
print()
|
||||
|
||||
|
||||
def show_config(args):
|
||||
"""Show current configuration."""
|
||||
sources = load_sources()
|
||||
print(json.dumps(sources, indent=2))
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Finance News Setup')
|
||||
subparsers = parser.add_subparsers(dest='command')
|
||||
|
||||
# Setup command (default)
|
||||
setup_parser = subparsers.add_parser('wizard', help='Run setup wizard')
|
||||
setup_parser.add_argument('--reset', action='store_true', help='Reset to defaults')
|
||||
setup_parser.add_argument('--section', choices=['feeds', 'markets', 'delivery', 'language', 'schedule'],
|
||||
help='Configure specific section only')
|
||||
setup_parser.set_defaults(func=run_setup)
|
||||
|
||||
# Show config
|
||||
show_parser = subparsers.add_parser('show', help='Show current configuration')
|
||||
show_parser.set_defaults(func=show_config)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.command:
|
||||
args.func(args)
|
||||
else:
|
||||
# Default to wizard
|
||||
args.reset = False
|
||||
args.section = None
|
||||
run_setup(args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
335
scripts/stocks.py
Normal file
335
scripts/stocks.py
Normal file
@@ -0,0 +1,335 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
stocks.py - Unified stock management for holdings and watchlist.
|
||||
|
||||
Single source of truth for:
|
||||
- Holdings (stocks you own)
|
||||
- Watchlist (stocks you're watching to buy)
|
||||
|
||||
Usage:
|
||||
from stocks import load_stocks, save_stocks, get_holdings, get_watchlist
|
||||
from stocks import add_to_watchlist, add_to_holdings, move_to_holdings
|
||||
|
||||
CLI:
|
||||
stocks.py list [--holdings|--watchlist]
|
||||
stocks.py add-watchlist TICKER [--target 380] [--notes "Buy zone"]
|
||||
stocks.py add-holding TICKER --name "Company" [--category "Tech"]
|
||||
stocks.py move TICKER # watchlist → holdings (you bought it)
|
||||
stocks.py remove TICKER [--from holdings|watchlist]
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
# Default path - can be overridden
|
||||
STOCKS_FILE = Path(__file__).parent.parent / "config" / "stocks.json"
|
||||
|
||||
|
||||
def load_stocks(path: Optional[Path] = None) -> dict:
|
||||
"""Load the unified stocks file."""
|
||||
path = path or STOCKS_FILE
|
||||
if not path.exists():
|
||||
return {
|
||||
"version": "1.0",
|
||||
"updated": datetime.now().strftime("%Y-%m-%d"),
|
||||
"holdings": [],
|
||||
"watchlist": [],
|
||||
"alert_definitions": {}
|
||||
}
|
||||
|
||||
with open(path, 'r') as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def save_stocks(data: dict, path: Optional[Path] = None):
|
||||
"""Save the unified stocks file."""
|
||||
path = path or STOCKS_FILE
|
||||
data["updated"] = datetime.now().strftime("%Y-%m-%d")
|
||||
|
||||
with open(path, 'w') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
|
||||
|
||||
def get_holdings(data: Optional[dict] = None) -> list:
|
||||
"""Get list of holdings."""
|
||||
if data is None:
|
||||
data = load_stocks()
|
||||
return data.get("holdings", [])
|
||||
|
||||
|
||||
def get_watchlist(data: Optional[dict] = None) -> list:
|
||||
"""Get list of watchlist items."""
|
||||
if data is None:
|
||||
data = load_stocks()
|
||||
return data.get("watchlist", [])
|
||||
|
||||
|
||||
def get_holding_tickers(data: Optional[dict] = None) -> set:
|
||||
"""Get set of holding tickers for quick lookup."""
|
||||
holdings = get_holdings(data)
|
||||
return {h.get("ticker") for h in holdings}
|
||||
|
||||
|
||||
def get_watchlist_tickers(data: Optional[dict] = None) -> set:
|
||||
"""Get set of watchlist tickers for quick lookup."""
|
||||
watchlist = get_watchlist(data)
|
||||
return {w.get("ticker") for w in watchlist}
|
||||
|
||||
|
||||
def add_to_watchlist(
|
||||
ticker: str,
|
||||
target: Optional[float] = None,
|
||||
stop: Optional[float] = None,
|
||||
notes: str = "",
|
||||
alerts: Optional[list] = None
|
||||
) -> bool:
|
||||
"""Add a stock to the watchlist."""
|
||||
data = load_stocks()
|
||||
|
||||
# Check if already in watchlist
|
||||
for w in data["watchlist"]:
|
||||
if w.get("ticker") == ticker:
|
||||
# Update existing
|
||||
if target is not None:
|
||||
w["target"] = target
|
||||
if stop is not None:
|
||||
w["stop"] = stop
|
||||
if notes:
|
||||
w["notes"] = notes
|
||||
if alerts is not None:
|
||||
w["alerts"] = alerts
|
||||
save_stocks(data)
|
||||
return True
|
||||
|
||||
# Add new
|
||||
data["watchlist"].append({
|
||||
"ticker": ticker,
|
||||
"target": target,
|
||||
"stop": stop,
|
||||
"alerts": alerts or [],
|
||||
"notes": notes
|
||||
})
|
||||
data["watchlist"].sort(key=lambda x: x.get("ticker", ""))
|
||||
save_stocks(data)
|
||||
return True
|
||||
|
||||
|
||||
def add_to_holdings(
|
||||
ticker: str,
|
||||
name: str = "",
|
||||
category: str = "",
|
||||
notes: str = "",
|
||||
target: Optional[float] = None,
|
||||
stop: Optional[float] = None,
|
||||
alerts: Optional[list] = None
|
||||
) -> bool:
|
||||
"""Add a stock to holdings. Target/stop for 'buy more' alerts."""
|
||||
data = load_stocks()
|
||||
|
||||
# Check if already in holdings
|
||||
for h in data["holdings"]:
|
||||
if h.get("ticker") == ticker:
|
||||
# Update existing
|
||||
if name:
|
||||
h["name"] = name
|
||||
if category:
|
||||
h["category"] = category
|
||||
if notes:
|
||||
h["notes"] = notes
|
||||
if target is not None:
|
||||
h["target"] = target
|
||||
if stop is not None:
|
||||
h["stop"] = stop
|
||||
if alerts is not None:
|
||||
h["alerts"] = alerts
|
||||
save_stocks(data)
|
||||
return True
|
||||
|
||||
# Add new
|
||||
data["holdings"].append({
|
||||
"ticker": ticker,
|
||||
"name": name,
|
||||
"category": category,
|
||||
"notes": notes,
|
||||
"target": target,
|
||||
"stop": stop,
|
||||
"alerts": alerts or []
|
||||
})
|
||||
data["holdings"].sort(key=lambda x: x.get("ticker", ""))
|
||||
save_stocks(data)
|
||||
return True
|
||||
|
||||
|
||||
def move_to_holdings(
|
||||
ticker: str,
|
||||
name: str = "",
|
||||
category: str = "",
|
||||
notes: str = ""
|
||||
) -> bool:
|
||||
"""Move a stock from watchlist to holdings (you bought it)."""
|
||||
data = load_stocks()
|
||||
|
||||
# Find in watchlist
|
||||
watchlist_item = None
|
||||
for i, w in enumerate(data["watchlist"]):
|
||||
if w.get("ticker") == ticker:
|
||||
watchlist_item = data["watchlist"].pop(i)
|
||||
break
|
||||
|
||||
if not watchlist_item:
|
||||
print(f"⚠️ {ticker} not found in watchlist", file=sys.stderr)
|
||||
return False
|
||||
|
||||
# Add to holdings
|
||||
data["holdings"].append({
|
||||
"ticker": ticker,
|
||||
"name": name or watchlist_item.get("notes", ""),
|
||||
"category": category,
|
||||
"notes": notes or f"Bought (was on watchlist with target ${watchlist_item.get('target', 'N/A')})"
|
||||
})
|
||||
data["holdings"].sort(key=lambda x: x.get("ticker", ""))
|
||||
save_stocks(data)
|
||||
return True
|
||||
|
||||
|
||||
def remove_stock(ticker: str, from_list: str = "both") -> bool:
|
||||
"""Remove a stock from holdings, watchlist, or both."""
|
||||
data = load_stocks()
|
||||
removed = False
|
||||
|
||||
if from_list in ("holdings", "both"):
|
||||
original_len = len(data["holdings"])
|
||||
data["holdings"] = [h for h in data["holdings"] if h.get("ticker") != ticker]
|
||||
if len(data["holdings"]) < original_len:
|
||||
removed = True
|
||||
|
||||
if from_list in ("watchlist", "both"):
|
||||
original_len = len(data["watchlist"])
|
||||
data["watchlist"] = [w for w in data["watchlist"] if w.get("ticker") != ticker]
|
||||
if len(data["watchlist"]) < original_len:
|
||||
removed = True
|
||||
|
||||
if removed:
|
||||
save_stocks(data)
|
||||
return removed
|
||||
|
||||
|
||||
def list_stocks(show_holdings: bool = True, show_watchlist: bool = True):
|
||||
"""Print stocks list."""
|
||||
data = load_stocks()
|
||||
|
||||
if show_holdings:
|
||||
print(f"\n📊 HOLDINGS ({len(data['holdings'])})")
|
||||
print("-" * 50)
|
||||
for h in data["holdings"][:20]:
|
||||
print(f" {h['ticker']:10} {h.get('name', '')[:30]}")
|
||||
if len(data["holdings"]) > 20:
|
||||
print(f" ... and {len(data['holdings']) - 20} more")
|
||||
|
||||
if show_watchlist:
|
||||
print(f"\n👀 WATCHLIST ({len(data['watchlist'])})")
|
||||
print("-" * 50)
|
||||
for w in data["watchlist"][:20]:
|
||||
target = f"${w['target']}" if w.get('target') else "no target"
|
||||
print(f" {w['ticker']:10} {target:>10} {w.get('notes', '')[:25]}")
|
||||
if len(data["watchlist"]) > 20:
|
||||
print(f" ... and {len(data['watchlist']) - 20} more")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Unified stock management")
|
||||
subparsers = parser.add_subparsers(dest="command", help="Commands")
|
||||
|
||||
# list
|
||||
list_parser = subparsers.add_parser("list", help="List stocks")
|
||||
list_parser.add_argument("--holdings", action="store_true", help="Show only holdings")
|
||||
list_parser.add_argument("--watchlist", action="store_true", help="Show only watchlist")
|
||||
|
||||
# add-watchlist
|
||||
add_watch = subparsers.add_parser("add-watchlist", help="Add to watchlist")
|
||||
add_watch.add_argument("ticker", help="Stock ticker")
|
||||
add_watch.add_argument("--target", type=float, help="Target price")
|
||||
add_watch.add_argument("--stop", type=float, help="Stop loss")
|
||||
add_watch.add_argument("--notes", default="", help="Notes")
|
||||
|
||||
# add-holding
|
||||
add_hold = subparsers.add_parser("add-holding", help="Add to holdings")
|
||||
add_hold.add_argument("ticker", help="Stock ticker")
|
||||
add_hold.add_argument("--name", default="", help="Company name")
|
||||
add_hold.add_argument("--category", default="", help="Category")
|
||||
add_hold.add_argument("--notes", default="", help="Notes")
|
||||
add_hold.add_argument("--target", type=float, help="Buy-more target price")
|
||||
add_hold.add_argument("--stop", type=float, help="Stop loss price")
|
||||
|
||||
# move (watchlist → holdings)
|
||||
move = subparsers.add_parser("move", help="Move from watchlist to holdings")
|
||||
move.add_argument("ticker", help="Stock ticker")
|
||||
move.add_argument("--name", default="", help="Company name")
|
||||
move.add_argument("--category", default="", help="Category")
|
||||
|
||||
# remove
|
||||
remove = subparsers.add_parser("remove", help="Remove stock")
|
||||
remove.add_argument("ticker", help="Stock ticker")
|
||||
remove.add_argument("--from", dest="from_list", choices=["holdings", "watchlist", "both"],
|
||||
default="both", help="Remove from which list")
|
||||
|
||||
# set-alert (for existing holdings)
|
||||
set_alert = subparsers.add_parser("set-alert", help="Set buy-more/stop alert on holding")
|
||||
set_alert.add_argument("ticker", help="Stock ticker")
|
||||
set_alert.add_argument("--target", type=float, help="Buy-more target price")
|
||||
set_alert.add_argument("--stop", type=float, help="Stop loss price")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.command == "list":
|
||||
show_h = not args.watchlist or args.holdings
|
||||
show_w = not args.holdings or args.watchlist
|
||||
if not args.holdings and not args.watchlist:
|
||||
show_h = show_w = True
|
||||
list_stocks(show_holdings=show_h, show_watchlist=show_w)
|
||||
|
||||
elif args.command == "add-watchlist":
|
||||
add_to_watchlist(args.ticker.upper(), args.target, args.stop, args.notes)
|
||||
print(f"✅ Added {args.ticker.upper()} to watchlist")
|
||||
|
||||
elif args.command == "add-holding":
|
||||
add_to_holdings(args.ticker.upper(), args.name, args.category, args.notes,
|
||||
args.target, args.stop)
|
||||
print(f"✅ Added {args.ticker.upper()} to holdings")
|
||||
|
||||
elif args.command == "move":
|
||||
if move_to_holdings(args.ticker.upper(), args.name, args.category):
|
||||
print(f"✅ Moved {args.ticker.upper()} from watchlist to holdings")
|
||||
|
||||
elif args.command == "remove":
|
||||
if remove_stock(args.ticker.upper(), args.from_list):
|
||||
print(f"✅ Removed {args.ticker.upper()}")
|
||||
else:
|
||||
print(f"⚠️ {args.ticker.upper()} not found")
|
||||
|
||||
elif args.command == "set-alert":
|
||||
data = load_stocks()
|
||||
found = False
|
||||
for h in data["holdings"]:
|
||||
if h.get("ticker") == args.ticker.upper():
|
||||
if args.target is not None:
|
||||
h["target"] = args.target
|
||||
if args.stop is not None:
|
||||
h["stop"] = args.stop
|
||||
save_stocks(data)
|
||||
found = True
|
||||
print(f"✅ Set alert on {args.ticker.upper()}: target=${args.target}, stop=${args.stop}")
|
||||
break
|
||||
if not found:
|
||||
print(f"⚠️ {args.ticker.upper()} not found in holdings")
|
||||
|
||||
else:
|
||||
parser.print_help()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
1728
scripts/summarize.py
Normal file
1728
scripts/summarize.py
Normal file
File diff suppressed because it is too large
Load Diff
158
scripts/translate_portfolio.py
Normal file
158
scripts/translate_portfolio.py
Normal file
@@ -0,0 +1,158 @@
|
||||
#!/usr/bin/env python3
|
||||
"""Translate portfolio headlines in briefing JSON using openclaw.
|
||||
|
||||
Usage: python3 translate_portfolio.py /path/to/briefing.json [--lang de]
|
||||
|
||||
Reads briefing JSON, translates portfolio article headlines via openclaw,
|
||||
writes back the modified JSON.
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
|
||||
def extract_headlines(portfolio_message: str) -> list[str]:
|
||||
"""Extract article headlines (lines starting with •) from portfolio message."""
|
||||
headlines = []
|
||||
for line in portfolio_message.split('\n'):
|
||||
line = line.strip()
|
||||
if line.startswith('•'):
|
||||
# Remove bullet, reference number, and clean up
|
||||
# Format: "• Headline text [1]"
|
||||
match = re.match(r'•\s*(.+?)\s*\[\d+\]$', line)
|
||||
if match:
|
||||
headlines.append(match.group(1))
|
||||
else:
|
||||
# No reference number
|
||||
headlines.append(line[1:].strip())
|
||||
return headlines
|
||||
|
||||
|
||||
def translate_headlines(headlines: list[str], lang: str = "de") -> list[str]:
|
||||
"""Translate headlines using openclaw agent."""
|
||||
if not headlines:
|
||||
return []
|
||||
|
||||
prompt = f"""Translate these English headlines to German.
|
||||
Return ONLY a JSON array of strings in the same order.
|
||||
Example: ["Übersetzung 1", "Übersetzung 2"]
|
||||
Do not add commentary.
|
||||
|
||||
Headlines:
|
||||
"""
|
||||
for idx, title in enumerate(headlines, start=1):
|
||||
prompt += f"{idx}. {title}\n"
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
[
|
||||
'openclaw', 'agent',
|
||||
'--session-id', 'finance-news-translate-portfolio',
|
||||
'--message', prompt,
|
||||
'--json',
|
||||
'--timeout', '60'
|
||||
],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=90
|
||||
)
|
||||
except (subprocess.TimeoutExpired, FileNotFoundError, OSError) as e:
|
||||
print(f"⚠️ Translation failed: {e}", file=sys.stderr)
|
||||
return headlines
|
||||
|
||||
if result.returncode != 0:
|
||||
print(f"⚠️ openclaw error: {result.stderr}", file=sys.stderr)
|
||||
return headlines
|
||||
|
||||
# Extract reply from openclaw JSON output
|
||||
# Format: {"result": {"payloads": [{"text": "..."}]}}
|
||||
# Note: openclaw may print plugin loading messages before JSON, so find the JSON start
|
||||
stdout = result.stdout
|
||||
json_start = stdout.find('{')
|
||||
if json_start > 0:
|
||||
stdout = stdout[json_start:]
|
||||
|
||||
try:
|
||||
output = json.loads(stdout)
|
||||
payloads = output.get('result', {}).get('payloads', [])
|
||||
if payloads and payloads[0].get('text'):
|
||||
reply = payloads[0]['text']
|
||||
else:
|
||||
reply = output.get('reply', '') or output.get('message', '') or stdout
|
||||
except json.JSONDecodeError:
|
||||
reply = stdout
|
||||
|
||||
# Parse JSON array from reply
|
||||
json_text = reply.strip()
|
||||
if "```" in json_text:
|
||||
match = re.search(r'```(?:json)?\s*(.*?)```', json_text, re.DOTALL)
|
||||
if match:
|
||||
json_text = match.group(1).strip()
|
||||
|
||||
try:
|
||||
translated = json.loads(json_text)
|
||||
if isinstance(translated, list) and len(translated) == len(headlines):
|
||||
print(f"✅ Translated {len(headlines)} portfolio headlines", file=sys.stderr)
|
||||
return translated
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"⚠️ JSON parse error: {e}", file=sys.stderr)
|
||||
|
||||
print(f"⚠️ Translation failed, using original headlines", file=sys.stderr)
|
||||
return headlines
|
||||
|
||||
|
||||
def replace_headlines(portfolio_message: str, original: list[str], translated: list[str]) -> str:
|
||||
"""Replace original headlines with translated ones in portfolio message."""
|
||||
result = portfolio_message
|
||||
for orig, trans in zip(original, translated):
|
||||
if orig != trans:
|
||||
# Replace the headline text, preserving bullet and reference
|
||||
result = result.replace(f"• {orig}", f"• {trans}")
|
||||
return result
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Translate portfolio headlines')
|
||||
parser.add_argument('json_file', help='Path to briefing JSON file')
|
||||
parser.add_argument('--lang', default='de', help='Target language (default: de)')
|
||||
args = parser.parse_args()
|
||||
|
||||
# Read JSON
|
||||
try:
|
||||
with open(args.json_file, 'r') as f:
|
||||
data = json.load(f)
|
||||
except (FileNotFoundError, json.JSONDecodeError) as e:
|
||||
print(f"❌ Error reading {args.json_file}: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
portfolio_message = data.get('portfolio_message', '')
|
||||
if not portfolio_message:
|
||||
print("No portfolio_message to translate", file=sys.stderr)
|
||||
print(json.dumps(data, ensure_ascii=False, indent=2))
|
||||
return
|
||||
|
||||
# Extract, translate, replace
|
||||
headlines = extract_headlines(portfolio_message)
|
||||
if not headlines:
|
||||
print("No headlines found in portfolio_message", file=sys.stderr)
|
||||
print(json.dumps(data, ensure_ascii=False, indent=2))
|
||||
return
|
||||
|
||||
print(f"📝 Found {len(headlines)} headlines to translate", file=sys.stderr)
|
||||
translated = translate_headlines(headlines, args.lang)
|
||||
|
||||
# Update portfolio message
|
||||
data['portfolio_message'] = replace_headlines(portfolio_message, headlines, translated)
|
||||
|
||||
# Write back
|
||||
with open(args.json_file, 'w') as f:
|
||||
json.dump(data, f, ensure_ascii=False, indent=2)
|
||||
|
||||
print(f"✅ Updated {args.json_file}", file=sys.stderr)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
45
scripts/utils.py
Normal file
45
scripts/utils.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""Shared helpers."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
def ensure_venv() -> None:
|
||||
"""Re-exec inside local venv if available and not already active."""
|
||||
if os.environ.get("FINANCE_NEWS_VENV_BOOTSTRAPPED") == "1":
|
||||
return
|
||||
if sys.prefix != sys.base_prefix:
|
||||
return
|
||||
venv_python = Path(__file__).resolve().parent.parent / "venv" / "bin" / "python3"
|
||||
if not venv_python.exists():
|
||||
print("⚠️ finance-news venv missing; run scripts from the repo venv to avoid dependency errors.", file=sys.stderr)
|
||||
return
|
||||
env = os.environ.copy()
|
||||
env["FINANCE_NEWS_VENV_BOOTSTRAPPED"] = "1"
|
||||
os.execvpe(str(venv_python), [str(venv_python)] + sys.argv, env)
|
||||
|
||||
|
||||
def compute_deadline(deadline_sec: int | None) -> float | None:
|
||||
if deadline_sec is None:
|
||||
return None
|
||||
if deadline_sec <= 0:
|
||||
return None
|
||||
return time.monotonic() + deadline_sec
|
||||
|
||||
|
||||
def time_left(deadline: float | None) -> int | None:
|
||||
if deadline is None:
|
||||
return None
|
||||
remaining = int(deadline - time.monotonic())
|
||||
return remaining
|
||||
|
||||
|
||||
def clamp_timeout(default_timeout: int, deadline: float | None, minimum: int = 1) -> int:
|
||||
remaining = time_left(deadline)
|
||||
if remaining is None:
|
||||
return default_timeout
|
||||
if remaining <= 0:
|
||||
raise TimeoutError("Deadline exceeded")
|
||||
return max(min(default_timeout, remaining), minimum)
|
||||
109
scripts/venv-setup.sh
Normal file
109
scripts/venv-setup.sh
Normal file
@@ -0,0 +1,109 @@
|
||||
#!/usr/bin/env bash
|
||||
# Finance News - venv Setup Script
|
||||
# Creates or rebuilds the Python virtual environment
|
||||
# Handles NixOS libstdc++ issues automatically
|
||||
|
||||
set -e
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
BASE_DIR="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||
VENV_DIR="${BASE_DIR}/venv"
|
||||
|
||||
echo "📦 Finance News - venv Setup"
|
||||
echo "============================"
|
||||
echo ""
|
||||
|
||||
# Check Python version
|
||||
PYTHON_BIN="${PYTHON_BIN:-python3}"
|
||||
PYTHON_VERSION=$("$PYTHON_BIN" --version 2>&1)
|
||||
echo "Using: $PYTHON_VERSION"
|
||||
echo "Path: $(command -v "$PYTHON_BIN" 2>/dev/null || echo "$PYTHON_BIN")"
|
||||
echo ""
|
||||
|
||||
# Remove existing venv if --force flag
|
||||
if [[ "$1" == "--force" || "$1" == "-f" ]]; then
|
||||
if [[ -d "$VENV_DIR" ]]; then
|
||||
echo "🗑️ Removing existing venv..."
|
||||
rm -rf "$VENV_DIR"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Check if venv exists
|
||||
if [[ -d "$VENV_DIR" ]]; then
|
||||
echo "⚠️ venv already exists at $VENV_DIR"
|
||||
echo " Use --force to rebuild"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Create venv
|
||||
echo "📁 Creating virtual environment..."
|
||||
"$PYTHON_BIN" -m venv "$VENV_DIR"
|
||||
|
||||
# Activate venv
|
||||
source "$VENV_DIR/bin/activate"
|
||||
|
||||
# Upgrade pip
|
||||
echo "⬆️ Upgrading pip..."
|
||||
pip install --upgrade pip --quiet
|
||||
|
||||
# Install requirements
|
||||
echo "📥 Installing dependencies..."
|
||||
pip install -r "$BASE_DIR/requirements.txt" --quiet
|
||||
|
||||
# NixOS-specific: Add LD_LIBRARY_PATH to activate script
|
||||
if [[ -d "/nix/store" ]]; then
|
||||
echo "🐧 NixOS detected - configuring libstdc++ path..."
|
||||
|
||||
ACTIVATE_SCRIPT="$VENV_DIR/bin/activate"
|
||||
|
||||
# Find libstdc++ path
|
||||
LIBSTDCXX_PATH=""
|
||||
if [[ -d "/home/linuxbrew/.linuxbrew/lib" ]]; then
|
||||
LIBSTDCXX_PATH="/home/linuxbrew/.linuxbrew/lib"
|
||||
elif [[ -d "$HOME/.linuxbrew/lib" ]]; then
|
||||
LIBSTDCXX_PATH="$HOME/.linuxbrew/lib"
|
||||
else
|
||||
# Try nix store - only set if find returns a result
|
||||
GCC_LIB_DIR=$(find /nix/store -maxdepth 2 -name "*-gcc-*-lib" -print -quit 2>/dev/null)
|
||||
if [[ -n "$GCC_LIB_DIR" && -d "$GCC_LIB_DIR/lib" ]]; then
|
||||
LIBSTDCXX_PATH="$GCC_LIB_DIR/lib"
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ -n "$LIBSTDCXX_PATH" && -d "$LIBSTDCXX_PATH" ]]; then
|
||||
# Add to activate script if not already there
|
||||
if ! grep -q "FINANCE_NEWS_LD_LIBRARY_PATH" "$ACTIVATE_SCRIPT"; then
|
||||
cat >> "$ACTIVATE_SCRIPT" << EOF
|
||||
|
||||
# NixOS libstdc++ fix for numpy/yfinance (added by venv-setup.sh)
|
||||
if [[ -z "\${FINANCE_NEWS_LD_LIBRARY_PATH:-}" ]]; then
|
||||
export FINANCE_NEWS_LD_LIBRARY_PATH=1
|
||||
if [[ -z "\${LD_LIBRARY_PATH:-}" ]]; then
|
||||
export LD_LIBRARY_PATH="$LIBSTDCXX_PATH"
|
||||
else
|
||||
export LD_LIBRARY_PATH="$LIBSTDCXX_PATH:\$LD_LIBRARY_PATH"
|
||||
fi
|
||||
fi
|
||||
EOF
|
||||
echo " Added LD_LIBRARY_PATH=$LIBSTDCXX_PATH to activate script"
|
||||
fi
|
||||
else
|
||||
echo " ⚠️ Could not find libstdc++.so.6 path"
|
||||
echo " Install Linuxbrew: /bin/bash -c \"\$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/HEAD/install.sh)\""
|
||||
fi
|
||||
fi
|
||||
|
||||
# Verify installation
|
||||
echo ""
|
||||
echo "✅ venv created successfully!"
|
||||
echo ""
|
||||
echo "Verifying installation..."
|
||||
"$VENV_DIR/bin/python3" -c "import feedparser; print(' ✓ feedparser')"
|
||||
"$VENV_DIR/bin/python3" -c "import yfinance; print(' ✓ yfinance')" 2>/dev/null || echo " ⚠️ yfinance import failed (may need LD_LIBRARY_PATH)"
|
||||
|
||||
echo ""
|
||||
echo "To activate manually:"
|
||||
echo " source $VENV_DIR/bin/activate"
|
||||
echo ""
|
||||
echo "Or just use the CLI:"
|
||||
echo " ./scripts/finance-news briefing --morning"
|
||||
Reference in New Issue
Block a user