From 7d03078316008e9ca825ca4077c1c21650aeddcd Mon Sep 17 00:00:00 2001 From: zlei9 Date: Sun, 29 Mar 2026 14:34:25 +0800 Subject: [PATCH] Initial commit with translated description --- App-Plan.md | 442 ++++++ README.md | 214 +++ SKILL.md | 248 ++++ TODO.md | 394 +++++ _meta.json | 6 + docs/ARCHITECTURE.md | 408 +++++ docs/CONCEPT.md | 233 +++ docs/HOT_SCANNER.md | 288 ++++ docs/README.md | 95 ++ docs/USAGE.md | 465 ++++++ scripts/analyze_stock.py | 2532 ++++++++++++++++++++++++++++++++ scripts/dividends.py | 365 +++++ scripts/hot_scanner.py | 582 ++++++++ scripts/portfolio.py | 548 +++++++ scripts/rumor_scanner.py | 342 +++++ scripts/test_stock_analysis.py | 381 +++++ scripts/watchlist.py | 336 +++++ 17 files changed, 7879 insertions(+) create mode 100644 App-Plan.md create mode 100644 README.md create mode 100644 SKILL.md create mode 100644 TODO.md create mode 100644 _meta.json create mode 100644 docs/ARCHITECTURE.md create mode 100644 docs/CONCEPT.md create mode 100644 docs/HOT_SCANNER.md create mode 100644 docs/README.md create mode 100644 docs/USAGE.md create mode 100644 scripts/analyze_stock.py create mode 100644 scripts/dividends.py create mode 100644 scripts/hot_scanner.py create mode 100644 scripts/portfolio.py create mode 100644 scripts/rumor_scanner.py create mode 100644 scripts/test_stock_analysis.py create mode 100644 scripts/watchlist.py diff --git a/App-Plan.md b/App-Plan.md new file mode 100644 index 0000000..7119ee7 --- /dev/null +++ b/App-Plan.md @@ -0,0 +1,442 @@ +# StockPulse - Commercial Product Roadmap + +## Vision + +Transform the stock-analysis skill into **StockPulse**, a commercial mobile app for retail investors with AI-powered stock and crypto analysis, portfolio tracking, and personalized alerts. + +## Technical Decisions + +- **Mobile:** Flutter (iOS + Android cross-platform) +- **Backend:** Python FastAPI on AWS (ECS/Lambda) +- **Database:** PostgreSQL (RDS) + Redis (ElastiCache) +- **Auth:** AWS Cognito or Firebase Auth +- **Monetization:** Freemium + Subscription ($9.99/mo or $79.99/yr) + +--- + +## Architecture Overview + +``` +┌─────────────────────────────────────────────────────────────────┐ +│ MOBILE APP (Flutter) │ +│ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ │ +│ │Dashboard │ │Portfolio │ │ Analysis │ │ Alerts │ │ +│ └──────────┘ └──────────┘ └──────────┘ └──────────┘ │ +└─────────────────────────────────────────────────────────────────┘ + │ HTTPS/REST + ▼ +┌─────────────────────────────────────────────────────────────────┐ +│ API GATEWAY (AWS) │ +│ Rate Limiting, Auth, Caching │ +└─────────────────────────────────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────────┐ +│ BACKEND (FastAPI on ECS) │ +│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │ +│ │ Auth Service │ │ Analysis API │ │ Portfolio API│ │ +│ └──────────────┘ └──────────────┘ └──────────────┘ │ +│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │ +│ │ Alerts Svc │ │ Subscription │ │ User Service │ │ +│ └──────────────┘ └──────────────┘ └──────────────┘ │ +└─────────────────────────────────────────────────────────────────┘ + │ + ┌─────────────────────┼─────────────────────┐ + ▼ ▼ ▼ +┌──────────────┐ ┌──────────────┐ ┌──────────────┐ +│ PostgreSQL │ │ Redis │ │ S3 │ +│ (RDS) │ │ (ElastiCache)│ │ (Reports) │ +└──────────────┘ └──────────────┘ └──────────────┘ + + BACKGROUND WORKERS (Lambda/ECS) +┌─────────────────────────────────────────────────────────────────┐ +│ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ │ +│ │Price Updater │ │Alert Checker │ │Daily Reports │ │ +│ │ (5 min) │ │ (1 min) │ │ (Daily) │ │ +│ └──────────────┘ └──────────────┘ └──────────────┘ │ +└─────────────────────────────────────────────────────────────────┘ +``` + +--- + +## Feature Tiers + +### Free Tier +- 1 portfolio (max 10 assets) +- Basic stock/crypto analysis +- Daily market summary +- Limited to 5 analyses/day +- Ads displayed + +### Premium ($9.99/mo) +- Unlimited portfolios & assets +- Full 8-dimension analysis +- Real-time price alerts +- Push notifications +- Period reports (daily/weekly/monthly) +- No ads +- Priority support + +### Pro ($19.99/mo) - Future +- API access +- Custom watchlists +- Advanced screeners +- Export to CSV/PDF +- Portfolio optimization suggestions + +--- + +## Development Phases + +### Phase 1: Backend API + +**Goal:** Convert Python scripts to production REST API + +#### Tasks: +1. **Project Setup** + - FastAPI project structure + - Docker containerization + - CI/CD pipeline (GitHub Actions) + - AWS infrastructure (Terraform) + +2. **Core API Endpoints** + ``` + POST /auth/register + POST /auth/login + POST /auth/refresh + + GET /analysis/{ticker} + POST /analysis/batch + + GET /portfolios + POST /portfolios + PUT /portfolios/{id} + DELETE /portfolios/{id} + + GET /portfolios/{id}/assets + POST /portfolios/{id}/assets + PUT /portfolios/{id}/assets/{ticker} + DELETE /portfolios/{id}/assets/{ticker} + + GET /portfolios/{id}/performance?period=weekly + GET /portfolios/{id}/summary + + GET /alerts + POST /alerts + DELETE /alerts/{id} + + GET /user/subscription + POST /user/subscription/upgrade + ``` + +3. **Database Schema** + ```sql + users (id, email, password_hash, created_at, subscription_tier) + portfolios (id, user_id, name, created_at, updated_at) + assets (id, portfolio_id, ticker, asset_type, quantity, cost_basis) + alerts (id, user_id, ticker, condition, threshold, enabled) + analysis_cache (ticker, data, expires_at) + subscriptions (id, user_id, stripe_id, status, expires_at) + ``` + +4. **Refactor Existing Code** + - Extract `analyze_stock.py` into modules: + - `analysis/earnings.py` + - `analysis/fundamentals.py` + - `analysis/sentiment.py` + - `analysis/crypto.py` + - `analysis/market_context.py` + - Add async support throughout + - Implement proper caching (Redis) + - Rate limiting per user tier + +#### Files to Create: +``` +backend/ +├── app/ +│ ├── main.py # FastAPI app +│ ├── config.py # Settings +│ ├── models/ # SQLAlchemy models +│ ├── schemas/ # Pydantic schemas +│ ├── routers/ # API routes +│ │ ├── auth.py +│ │ ├── analysis.py +│ │ ├── portfolios.py +│ │ └── alerts.py +│ ├── services/ # Business logic +│ │ ├── analysis/ # Refactored from analyze_stock.py +│ │ ├── portfolio.py +│ │ └── alerts.py +│ └── workers/ # Background tasks +├── tests/ +├── Dockerfile +├── docker-compose.yml +└── requirements.txt +``` + +--- + +### Phase 2: Flutter Mobile App + +**Goal:** Build polished cross-platform mobile app + +#### Screens: +1. **Onboarding** - Welcome, feature highlights, sign up/login +2. **Dashboard** - Market overview, portfolio summary, alerts +3. **Analysis** - Search ticker, view full analysis, save to portfolio +4. **Portfolio** - List portfolios, asset breakdown, P&L chart +5. **Alerts** - Manage price alerts, notification settings +6. **Settings** - Account, subscription, preferences + +#### Key Flutter Packages: +```yaml +dependencies: + flutter_bloc: ^8.0.0 # State management + dio: ^5.0.0 # HTTP client + go_router: ^12.0.0 # Navigation + fl_chart: ^0.65.0 # Charts + firebase_messaging: ^14.0.0 # Push notifications + in_app_purchase: ^3.0.0 # Subscriptions + shared_preferences: ^2.0.0 + flutter_secure_storage: ^9.0.0 +``` + +#### App Structure: +``` +lib/ +├── main.dart +├── app/ +│ ├── routes.dart +│ └── theme.dart +├── features/ +│ ├── auth/ +│ │ ├── bloc/ +│ │ ├── screens/ +│ │ └── widgets/ +│ ├── dashboard/ +│ ├── analysis/ +│ ├── portfolio/ +│ ├── alerts/ +│ └── settings/ +├── core/ +│ ├── api/ +│ ├── models/ +│ └── utils/ +└── shared/ + └── widgets/ +``` + +--- + +### Phase 3: Infrastructure & DevOps + +**Goal:** Production-ready cloud infrastructure + +#### AWS Services: +- **ECS Fargate** - Backend containers +- **RDS PostgreSQL** - Database +- **ElastiCache Redis** - Caching +- **S3** - Static assets, reports +- **CloudFront** - CDN +- **Cognito** - Authentication +- **SES** - Email notifications +- **SNS** - Push notifications +- **CloudWatch** - Monitoring +- **WAF** - Security + +#### Terraform Modules: +``` +infrastructure/ +├── main.tf +├── variables.tf +├── modules/ +│ ├── vpc/ +│ ├── ecs/ +│ ├── rds/ +│ ├── elasticache/ +│ └── cognito/ +└── environments/ + ├── dev/ + ├── staging/ + └── prod/ +``` + +#### Estimated Monthly Costs (Production): +| Service | Est. Cost | +|---------|-----------| +| ECS Fargate (2 tasks) | $50-100 | +| RDS (db.t3.small) | $30-50 | +| ElastiCache (cache.t3.micro) | $15-25 | +| S3 + CloudFront | $10-20 | +| Other (Cognito, SES, etc.) | $20-30 | +| **Total** | **$125-225/mo** | + +--- + +### Phase 4: Payments & Subscriptions + +**Goal:** Integrate Stripe for subscriptions + +#### Implementation: +1. Stripe subscription products (Free, Premium, Pro) +2. In-app purchase for iOS/Android +3. Webhook handlers for subscription events +4. Grace period handling +5. Receipt validation + +#### Stripe Integration: +```python +# Backend webhook handler +@router.post("/webhooks/stripe") +async def stripe_webhook(request: Request): + event = stripe.Webhook.construct_event(...) + + if event.type == "customer.subscription.updated": + update_user_tier(event.data.object) + elif event.type == "customer.subscription.deleted": + downgrade_to_free(event.data.object) +``` + +--- + +### Phase 5: Push Notifications & Alerts + +**Goal:** Real-time price alerts and notifications + +#### Alert Types: +- Price above/below threshold +- Percentage change (daily) +- Earnings announcement +- Breaking news (geopolitical) +- Portfolio performance + +#### Implementation: +- Firebase Cloud Messaging (FCM) +- Background worker checks alerts every minute +- Rate limit: max 10 alerts/day per free user + +--- + +### Phase 6: Analytics & Monitoring + +**Goal:** Track usage, errors, business metrics + +#### Tools: +- **Mixpanel/Amplitude** - Product analytics +- **Sentry** - Error tracking +- **CloudWatch** - Infrastructure metrics +- **Custom dashboard** - Business KPIs + +#### Key Metrics: +- DAU/MAU +- Conversion rate (free → premium) +- Churn rate +- API response times +- Analysis accuracy feedback + +--- + +## Security Considerations + +1. **Authentication** + - JWT tokens with refresh rotation + - OAuth2 (Google, Apple Sign-In) + - 2FA optional for premium users + +2. **Data Protection** + - Encrypt PII at rest (RDS encryption) + - TLS 1.3 for all API traffic + - No plaintext passwords + +3. **API Security** + - Rate limiting per tier + - Input validation (Pydantic) + - SQL injection prevention (SQLAlchemy ORM) + - CORS configuration + +4. **Compliance** + - Privacy policy + - Terms of service + - GDPR data export/deletion + - Financial disclaimer (not investment advice) + +--- + +## Risks & Mitigations + +| Risk | Impact | Mitigation | +|------|--------|------------| +| Yahoo Finance rate limits | High | Implement caching, use paid API fallback | +| App store rejection | Medium | Follow guidelines, proper disclaimers | +| Data accuracy issues | High | Clear disclaimers, data validation | +| Security breach | Critical | Security audit, penetration testing | +| Low conversion rate | Medium | A/B testing, feature gating | + +--- + +## Success Metrics (Year 1) + +| Metric | Target | +|--------|--------| +| App downloads | 10,000+ | +| DAU | 1,000+ | +| Premium subscribers | 500+ | +| Monthly revenue | $5,000+ | +| App store rating | 4.5+ stars | +| Churn rate | <5%/month | + +--- + +## Next Steps (Immediate) + +1. **Validate idea** - User interviews, landing page +2. **Design** - Figma mockups for key screens +3. **Backend MVP** - Core API endpoints +4. **Flutter prototype** - Basic app with analysis feature +5. **Beta testing** - TestFlight/Google Play beta + +--- + +## Repository Structure (Final) + +``` +stockpulse/ +├── backend/ # FastAPI backend +│ ├── app/ +│ ├── tests/ +│ ├── Dockerfile +│ └── requirements.txt +├── mobile/ # Flutter app +│ ├── lib/ +│ ├── test/ +│ ├── ios/ +│ ├── android/ +│ └── pubspec.yaml +├── infrastructure/ # Terraform +│ ├── modules/ +│ └── environments/ +├── docs/ # Documentation +│ ├── api/ +│ └── architecture/ +└── scripts/ # Utility scripts +``` + +--- + +## Timeline Summary (Planning Only) + +| Phase | Duration | Dependencies | +|-------|----------|--------------| +| 1. Backend API | 4-6 weeks | - | +| 2. Flutter App | 6-8 weeks | Phase 1 | +| 3. Infrastructure | 2-3 weeks | Phase 1 | +| 4. Payments | 2 weeks | Phase 2, 3 | +| 5. Notifications | 2 weeks | Phase 2, 3 | +| 6. Analytics | 1 week | Phase 2 | +| **Total** | **17-22 weeks** | | + +This is a planning document. No fixed timeline - execute phases as resources allow. + +--- + +**Disclaimer:** This tool is for informational purposes only and does NOT constitute financial advice. diff --git a/README.md b/README.md new file mode 100644 index 0000000..5ca2907 --- /dev/null +++ b/README.md @@ -0,0 +1,214 @@ +# 📈 Stock Analysis v6.1 + +> AI-powered stock & crypto analysis with portfolio management, watchlists, dividend analysis, and **viral trend detection**. + +[![ClawHub Downloads](https://img.shields.io/badge/ClawHub-1500%2B%20downloads-blue)](https://clawhub.ai) +[![OpenClaw Skill](https://img.shields.io/badge/OpenClaw-Skill-green)](https://openclaw.ai) + +## What's New in v6.1 + +- 🔥 **Hot Scanner** — Find viral stocks & crypto across multiple sources +- 🐦 **Twitter/X Integration** — Social sentiment via bird CLI +- 📰 **Multi-Source Aggregation** — CoinGecko, Google News, Yahoo Finance +- ⏰ **Cron Support** — Daily trend reports + +## What's New in v6.0 + +- 🆕 **Watchlist + Alerts** — Price targets, stop losses, signal change notifications +- 🆕 **Dividend Analysis** — Yield, payout ratio, growth rate, safety score +- 🆕 **Fast Mode** — Skip slow analyses for quick checks +- 🆕 **Improved Commands** — Better OpenClaw/Telegram integration +- 🆕 **Test Suite** — Unit tests for core functionality + +## Features + +| Feature | Description | +|---------|-------------| +| **8-Dimension Analysis** | Earnings, fundamentals, analysts, momentum, sentiment, sector, market, history | +| **Crypto Support** | Top 20 cryptos with market cap, BTC correlation, momentum | +| **Portfolio Management** | Track holdings, P&L, concentration warnings | +| **Watchlist + Alerts** | Price targets, stop losses, signal changes | +| **Dividend Analysis** | Yield, payout, growth, safety score | +| **Risk Detection** | Geopolitical, earnings timing, overbought, risk-off | +| **Breaking News** | Crisis keyword scanning (last 24h) | + +## Quick Start + +### Analyze Stocks +```bash +uv run scripts/analyze_stock.py AAPL +uv run scripts/analyze_stock.py AAPL MSFT GOOGL +uv run scripts/analyze_stock.py AAPL --fast # Skip slow analyses +``` + +### Analyze Crypto +```bash +uv run scripts/analyze_stock.py BTC-USD +uv run scripts/analyze_stock.py ETH-USD SOL-USD +``` + +### Dividend Analysis +```bash +uv run scripts/dividends.py JNJ PG KO +``` + +### Watchlist +```bash +uv run scripts/watchlist.py add AAPL --target 200 --stop 150 +uv run scripts/watchlist.py list +uv run scripts/watchlist.py check --notify +``` + +### Portfolio +```bash +uv run scripts/portfolio.py create "My Portfolio" +uv run scripts/portfolio.py add AAPL --quantity 100 --cost 150 +uv run scripts/portfolio.py show +``` + +### 🔥 Hot Scanner (NEW) +```bash +# Full scan with all sources +python3 scripts/hot_scanner.py + +# Fast scan (skip social media) +python3 scripts/hot_scanner.py --no-social + +# JSON output for automation +python3 scripts/hot_scanner.py --json +``` + +## Analysis Dimensions + +### Stocks (8 dimensions) +1. **Earnings Surprise** (30%) — EPS beat/miss +2. **Fundamentals** (20%) — P/E, margins, growth, debt +3. **Analyst Sentiment** (20%) — Ratings, price targets +4. **Historical Patterns** (10%) — Past earnings reactions +5. **Market Context** (10%) — VIX, SPY/QQQ trends +6. **Sector Performance** (15%) — Relative strength +7. **Momentum** (15%) — RSI, 52-week range +8. **Sentiment** (10%) — Fear/Greed, shorts, insiders + +### Crypto (3 dimensions) +- Market Cap & Category +- BTC Correlation (30-day) +- Momentum (RSI, range) + +## Dividend Metrics + +| Metric | Description | +|--------|-------------| +| Yield | Annual dividend / price | +| Payout Ratio | Dividend / EPS | +| 5Y Growth | CAGR of dividend | +| Consecutive Years | Years of increases | +| Safety Score | 0-100 composite | +| Income Rating | Excellent → Poor | + +## 🔥 Hot Scanner + +Find what's trending RIGHT NOW across stocks & crypto. + +### Data Sources + +| Source | What it finds | +|--------|---------------| +| **CoinGecko Trending** | Top 15 trending coins | +| **CoinGecko Movers** | Biggest gainers/losers (>3%) | +| **Google News** | Breaking finance & crypto news | +| **Yahoo Finance** | Top gainers, losers, most active | +| **Twitter/X** | Social sentiment (requires auth) | + +### Output + +``` +📊 TOP TRENDING (by buzz): + 1. BTC (6 pts) [CoinGecko, Google News] 📉 bearish (-2.5%) + 2. ETH (5 pts) [CoinGecko, Twitter] 📉 bearish (-7.2%) + 3. NVDA (3 pts) [Google News, Yahoo] 📰 Earnings beat... + +🪙 CRYPTO HIGHLIGHTS: + 🚀 RIVER River +14.0% + 📉 BTC Bitcoin -2.5% + +📈 STOCK MOVERS: + 🟢 NVDA (gainers) + 🔴 TSLA (losers) + +📰 BREAKING NEWS: + [BTC, ETH] Crypto crash: $2.5B liquidated... +``` + +### Twitter/X Setup (Optional) + +1. Install bird CLI: `npm install -g @steipete/bird` +2. Login to x.com in Safari/Chrome +3. Create `.env` file: +``` +AUTH_TOKEN=your_auth_token +CT0=your_ct0_token +``` + +Get tokens from browser DevTools → Application → Cookies → x.com + +### Automation + +Set up a daily cron job for morning reports: +```bash +# Run at 8 AM daily +0 8 * * * python3 /path/to/hot_scanner.py --no-social >> /var/log/hot_scanner.log +``` + +## Risk Detection + +- ⚠️ Pre-earnings warning (< 14 days) +- ⚠️ Post-earnings spike (> 15% in 5 days) +- ⚠️ Overbought (RSI > 70 + near 52w high) +- ⚠️ Risk-off mode (GLD/TLT/UUP rising) +- ⚠️ Geopolitical keywords (Taiwan, China, etc.) +- ⚠️ Breaking news alerts + +## Performance Options + +| Flag | Speed | Description | +|------|-------|-------------| +| (default) | 5-10s | Full analysis | +| `--no-insider` | 3-5s | Skip SEC EDGAR | +| `--fast` | 2-3s | Skip insider + news | + +## Data Sources + +- [Yahoo Finance](https://finance.yahoo.com) — Prices, fundamentals, movers +- [CoinGecko](https://coingecko.com) — Crypto trending, market data +- [CNN Fear & Greed](https://money.cnn.com/data/fear-and-greed/) — Sentiment +- [SEC EDGAR](https://www.sec.gov/edgar) — Insider trading +- [Google News RSS](https://news.google.com) — Breaking news +- [Twitter/X](https://x.com) — Social sentiment (via bird CLI) + +## Storage + +| Data | Location | +|------|----------| +| Portfolios | `~/.clawdbot/skills/stock-analysis/portfolios.json` | +| Watchlist | `~/.clawdbot/skills/stock-analysis/watchlist.json` | + +## Testing + +```bash +uv run pytest scripts/test_stock_analysis.py -v +``` + +## Limitations + +- Yahoo Finance may lag 15-20 minutes +- Short interest lags ~2 weeks (FINRA) +- US markets only + +## Disclaimer + +⚠️ **NOT FINANCIAL ADVICE.** For informational purposes only. Consult a licensed financial advisor before making investment decisions. + +--- + +Built for [OpenClaw](https://openclaw.ai) 🦞 | [ClawHub](https://clawhub.ai) diff --git a/SKILL.md b/SKILL.md new file mode 100644 index 0000000..f9609f9 --- /dev/null +++ b/SKILL.md @@ -0,0 +1,248 @@ +--- +name: stock-analysis +description: "使用Yahoo Finance数据股票和加密货币分析。" +version: 6.2.0 +homepage: https://finance.yahoo.com +commands: + - /stock - Analyze a stock or crypto (e.g., /stock AAPL) + - /stock_compare - Compare multiple tickers + - /stock_dividend - Analyze dividend metrics + - /stock_watch - Add/remove from watchlist + - /stock_alerts - Check triggered alerts + - /stock_hot - Find trending stocks & crypto (Hot Scanner) + - /stock_rumors - Find early signals, M&A rumors, insider activity (Rumor Scanner) + - /portfolio - Show portfolio summary + - /portfolio_add - Add asset to portfolio +metadata: {"clawdbot":{"emoji":"📈","requires":{"bins":["uv"],"env":[]},"install":[{"id":"uv-brew","kind":"brew","formula":"uv","bins":["uv"],"label":"Install uv (brew)"}]}} +--- + +# Stock Analysis v6.1 + +Analyze US stocks and cryptocurrencies with 8-dimension analysis, portfolio management, watchlists, alerts, dividend analysis, and **viral trend detection**. + +## What's New in v6.2 + +- 🔮 **Rumor Scanner** — Early signals before mainstream news + - M&A rumors and takeover bids + - Insider buying/selling activity + - Analyst upgrades/downgrades + - Twitter/X "hearing that...", "sources say..." detection +- 🎯 **Impact Scoring** — Rumors ranked by potential market impact + +## What's in v6.1 + +- 🔥 **Hot Scanner** — Find viral stocks & crypto across multiple sources +- 🐦 **Twitter/X Integration** — Social sentiment via bird CLI +- 📰 **Multi-Source Aggregation** — CoinGecko, Google News, Yahoo Finance +- ⏰ **Cron Support** — Daily trend reports + +## What's in v6.0 + +- 🆕 **Watchlist + Alerts** — Price targets, stop losses, signal changes +- 🆕 **Dividend Analysis** — Yield, payout ratio, growth, safety score +- 🆕 **Fast Mode** — `--fast` skips slow analyses (insider, news) +- 🆕 **Improved Performance** — `--no-insider` for faster runs + +## Quick Commands + +### Stock Analysis +```bash +# Basic analysis +uv run {baseDir}/scripts/analyze_stock.py AAPL + +# Fast mode (skips insider trading & breaking news) +uv run {baseDir}/scripts/analyze_stock.py AAPL --fast + +# Compare multiple +uv run {baseDir}/scripts/analyze_stock.py AAPL MSFT GOOGL + +# Crypto +uv run {baseDir}/scripts/analyze_stock.py BTC-USD ETH-USD +``` + +### Dividend Analysis (NEW v6.0) +```bash +# Analyze dividends +uv run {baseDir}/scripts/dividends.py JNJ + +# Compare dividend stocks +uv run {baseDir}/scripts/dividends.py JNJ PG KO MCD --output json +``` + +**Dividend Metrics:** +- Dividend Yield & Annual Payout +- Payout Ratio (safe/moderate/high/unsustainable) +- 5-Year Dividend Growth (CAGR) +- Consecutive Years of Increases +- Safety Score (0-100) +- Income Rating (excellent/good/moderate/poor) + +### Watchlist + Alerts (NEW v6.0) +```bash +# Add to watchlist +uv run {baseDir}/scripts/watchlist.py add AAPL + +# With price target alert +uv run {baseDir}/scripts/watchlist.py add AAPL --target 200 + +# With stop loss alert +uv run {baseDir}/scripts/watchlist.py add AAPL --stop 150 + +# Alert on signal change (BUY→SELL) +uv run {baseDir}/scripts/watchlist.py add AAPL --alert-on signal + +# View watchlist +uv run {baseDir}/scripts/watchlist.py list + +# Check for triggered alerts +uv run {baseDir}/scripts/watchlist.py check +uv run {baseDir}/scripts/watchlist.py check --notify # Telegram format + +# Remove from watchlist +uv run {baseDir}/scripts/watchlist.py remove AAPL +``` + +**Alert Types:** +- 🎯 **Target Hit** — Price >= target +- 🛑 **Stop Hit** — Price <= stop +- 📊 **Signal Change** — BUY/HOLD/SELL changed + +### Portfolio Management +```bash +# Create portfolio +uv run {baseDir}/scripts/portfolio.py create "Tech Portfolio" + +# Add assets +uv run {baseDir}/scripts/portfolio.py add AAPL --quantity 100 --cost 150 +uv run {baseDir}/scripts/portfolio.py add BTC-USD --quantity 0.5 --cost 40000 + +# View portfolio +uv run {baseDir}/scripts/portfolio.py show + +# Analyze with period returns +uv run {baseDir}/scripts/analyze_stock.py --portfolio "Tech Portfolio" --period weekly +``` + +### 🔥 Hot Scanner (NEW v6.1) +```bash +# Full scan - find what's trending NOW +python3 {baseDir}/scripts/hot_scanner.py + +# Fast scan (skip social media) +python3 {baseDir}/scripts/hot_scanner.py --no-social + +# JSON output for automation +python3 {baseDir}/scripts/hot_scanner.py --json +``` + +**Data Sources:** +- 📊 CoinGecko Trending — Top 15 trending coins +- 📈 CoinGecko Movers — Biggest gainers/losers +- 📰 Google News — Finance & crypto headlines +- 📉 Yahoo Finance — Gainers, losers, most active +- 🐦 Twitter/X — Social sentiment (requires auth) + +**Output:** +- Top trending by mention count +- Crypto highlights with 24h changes +- Stock movers by category +- Breaking news with tickers + +**Twitter Setup (Optional):** +1. Install bird: `npm install -g @steipete/bird` +2. Login to x.com in Safari/Chrome +3. Create `.env` with `AUTH_TOKEN` and `CT0` + +### 🔮 Rumor Scanner (NEW v6.2) +```bash +# Find early signals, M&A rumors, insider activity +python3 {baseDir}/scripts/rumor_scanner.py +``` + +**What it finds:** +- 🏢 **M&A Rumors** — Merger, acquisition, takeover bids +- 👔 **Insider Activity** — CEO/Director buying/selling +- 📊 **Analyst Actions** — Upgrades, downgrades, price target changes +- 🐦 **Twitter Whispers** — "hearing that...", "sources say...", "rumor" +- ⚖️ **SEC Activity** — Investigations, filings + +**Impact Scoring:** +- Each rumor is scored by potential market impact (1-10) +- M&A/Takeover: +5 points +- Insider buying: +4 points +- Upgrade/Downgrade: +3 points +- "Hearing"/"Sources say": +2 points +- High engagement: +2 bonus + +**Best Practice:** Run at 07:00 before US market open to catch pre-market signals. + +## Analysis Dimensions (8 for stocks, 3 for crypto) + +### Stocks +| Dimension | Weight | Description | +|-----------|--------|-------------| +| Earnings Surprise | 30% | EPS beat/miss | +| Fundamentals | 20% | P/E, margins, growth | +| Analyst Sentiment | 20% | Ratings, price targets | +| Historical | 10% | Past earnings reactions | +| Market Context | 10% | VIX, SPY/QQQ trends | +| Sector | 15% | Relative strength | +| Momentum | 15% | RSI, 52-week range | +| Sentiment | 10% | Fear/Greed, shorts, insiders | + +### Crypto +- Market Cap & Category +- BTC Correlation (30-day) +- Momentum (RSI, range) + +## Sentiment Sub-Indicators + +| Indicator | Source | Signal | +|-----------|--------|--------| +| Fear & Greed | CNN | Contrarian (fear=buy) | +| Short Interest | Yahoo | Squeeze potential | +| VIX Structure | Futures | Stress detection | +| Insider Trades | SEC EDGAR | Smart money | +| Put/Call Ratio | Options | Sentiment extreme | + +## Risk Detection + +- ⚠️ **Pre-Earnings** — Warns if < 14 days to earnings +- ⚠️ **Post-Spike** — Flags if up >15% in 5 days +- ⚠️ **Overbought** — RSI >70 + near 52w high +- ⚠️ **Risk-Off** — GLD/TLT/UUP rising together +- ⚠️ **Geopolitical** — Taiwan, China, Russia, Middle East keywords +- ⚠️ **Breaking News** — Crisis keywords in last 24h + +## Performance Options + +| Flag | Effect | Speed | +|------|--------|-------| +| (default) | Full analysis | 5-10s | +| `--no-insider` | Skip SEC EDGAR | 3-5s | +| `--fast` | Skip insider + news | 2-3s | + +## Supported Cryptos (Top 20) + +BTC, ETH, BNB, SOL, XRP, ADA, DOGE, AVAX, DOT, MATIC, LINK, ATOM, UNI, LTC, BCH, XLM, ALGO, VET, FIL, NEAR + +(Use `-USD` suffix: `BTC-USD`, `ETH-USD`) + +## Data Storage + +| File | Location | +|------|----------| +| Portfolios | `~/.clawdbot/skills/stock-analysis/portfolios.json` | +| Watchlist | `~/.clawdbot/skills/stock-analysis/watchlist.json` | + +## Limitations + +- Yahoo Finance may lag 15-20 minutes +- Short interest lags ~2 weeks (FINRA) +- Insider trades lag 2-3 days (SEC filing) +- US markets only (non-US incomplete) +- Breaking news: 1h cache, keyword-based + +## Disclaimer + +⚠️ **NOT FINANCIAL ADVICE.** For informational purposes only. Consult a licensed financial advisor before making investment decisions. diff --git a/TODO.md b/TODO.md new file mode 100644 index 0000000..1722745 --- /dev/null +++ b/TODO.md @@ -0,0 +1,394 @@ +# Stock Analysis - Future Enhancements + +## Roadmap Overview + +### v4.0.0 (Current) - Geopolitical Risk & News Sentiment +✅ 8 analysis dimensions with Fear/Greed, short interest, VIX structure, put/call ratio +✅ Safe-haven indicators (GLD, TLT, UUP) with risk-off detection +✅ Breaking news alerts via Google News RSS +✅ Geopolitical risk mapping (Taiwan, China, Russia, Middle East, Banking) +✅ Sector-specific crisis flagging with confidence penalties +✅ 1h caching for shared indicators (Fear/Greed, VIX structure, breaking news) +✅ Async parallel sentiment fetching (5 indicators with 10s timeouts) + +### v5.0.0 (Current) - Portfolio & Crypto +✅ Portfolio management (create, add, remove, show assets) +✅ Cryptocurrency support (Top 20 by market cap) +✅ Portfolio analysis with --portfolio flag +✅ Periodic returns (--period daily/weekly/monthly/quarterly/yearly) +✅ Concentration warnings (>30% single asset) +✅ Crypto fundamentals (market cap, category, BTC correlation) + +### v4.1.0 - Performance & Completeness +✅ Full insider trading parsing via edgartools (Task #1) +✅ Market context caching with 1h TTL (Task #3b) +🔧 SEC EDGAR rate limit monitoring (Task #4 - low priority) + +### Future (v6.0+) +💡 Research phase: Social sentiment, fund flows, on-chain metrics + +--- + +## Sentiment Analysis Improvements + +### 1. Implement Full Insider Trading Parsing +**Status**: ✅ DONE +**Priority**: Medium +**Effort**: 2-3 hours + +**Current State**: +- ✅ `get_insider_activity()` fetches Form 4 filings via edgartools +- ✅ SEC identity configured (`stock-analysis@clawd.bot`) +- ✅ Aggregates buys/sells over 90-day window +- ✅ Scoring logic: strong buying (+0.8), moderate (+0.4), neutral (0), moderate selling (-0.4), strong (-0.8) + +**Tasks**: +- [ ] Research edgartools API for Form 4 parsing +- [ ] Implement transaction aggregation (90-day window) +- [ ] Calculate net shares bought/sold +- [ ] Calculate net value in millions USD +- [ ] Apply scoring logic: + - Strong buying (>100K shares or >$1M): +0.8 + - Moderate buying (>10K shares or >$0.1M): +0.4 + - Neutral: 0 + - Moderate selling: -0.4 + - Strong selling: -0.8 +- [ ] Add error handling for missing/incomplete filings +- [ ] Test with multiple tickers (BAC, TSLA, AAPL) +- [ ] Verify SEC rate limit compliance (10 req/s) + +**Expected Impact**: +- Insider activity detection for 4th sentiment indicator +- Increase from 3/5 to 4/5 indicators typically available + +--- + +### 2. Add Parallel Async Fetching +**Status**: ✅ DONE (sentiment indicators) +**Priority**: High +**Effort**: 4-6 hours + +**Current State**: +- ✅ Sentiment indicators fetched in parallel via `asyncio.gather()` +- ✅ 10s timeout per indicator +- Main data fetches (yfinance) still sequential (acceptable) + +**Tasks**: +- [ ] Convert sentiment helper functions to async + - [ ] `async def get_fear_greed_index()` + - [ ] `async def get_short_interest(data)` + - [ ] `async def get_vix_term_structure()` + - [ ] `async def get_insider_activity(ticker)` + - [ ] `async def get_put_call_ratio(data)` +- [ ] Update `analyze_sentiment()` to use `asyncio.gather()` +- [ ] Handle yfinance thread safety (may need locks) +- [ ] Add timeout per indicator (10s max) +- [ ] Test with multiple stocks in sequence +- [ ] Measure actual runtime improvement +- [ ] Update SKILL.md with new runtime (target: 3-4s) + +**Expected Impact**: +- Reduce runtime from 6-10s to 3-4s per stock +- Better user experience for multi-stock analysis + +--- + +### 3. Add Caching for Shared Indicators +**Status**: ✅ DONE (sentiment + breaking news) +**Priority**: Medium +**Effort**: 2-3 hours + +**Current State**: +- ✅ Fear & Greed Index cached (1h TTL) +- ✅ VIX term structure cached (1h TTL) +- ✅ Breaking news cached (1h TTL) +- ✅ Market context (VIX/SPY/QQQ/GLD/TLT/UUP) cached (1h TTL) + +**Tasks**: +- [ ] Design cache structure (simple dict or functools.lru_cache) +- [ ] Implement TTL (time-to-live): + - Fear & Greed: 1 hour + - VIX structure: 1 hour + - Short interest: No cache (per-stock) + - Insider activity: No cache (per-stock) + - Put/Call ratio: No cache (per-stock) +- [ ] Add cache invalidation logic +- [ ] Add verbose logging for cache hits/misses +- [ ] Test multi-stock analysis (e.g., `BAC TSLA AAPL`) +- [ ] Measure performance improvement +- [ ] Document caching behavior in SKILL.md + +**Expected Impact**: +- Multi-stock analysis faster (e.g., 3 stocks: 18-30s → 10-15s) +- Reduced API calls to Fear/Greed and VIX data sources +- Same-session analysis efficiency + +--- + +### 4. Monitor SEC EDGAR Rate Limits +**Status**: Not Started +**Priority**: Low (until insider trading implemented) +**Effort**: 1-2 hours + +**Current State**: +- SEC EDGAR API has 10 requests/second rate limit +- No rate limit tracking or logging +- edgartools may handle rate limiting internally + +**Tasks**: +- [ ] Research edgartools rate limit handling +- [ ] Add request counter/tracker if needed +- [ ] Implement exponential backoff on 429 errors +- [ ] Add logging for rate limit hits +- [ ] Test with high-volume scenarios (10+ stocks in quick succession) +- [ ] Document rate limit behavior +- [ ] Add error message if rate limited: "SEC API rate limited, try again in 1 minute" + +**Expected Impact**: +- Robust handling of SEC API limits in production +- Clear user feedback if limits hit +- Prevent API blocking/banning + +--- + +## Stock Analysis 4.0: Geopolitical Risk & News Sentiment + +### What's Currently Missing + +The current implementation captures: +- ✅ VIX (general market fear) +- ✅ SPY/QQQ trends (market direction) +- ✅ Sector performance + +What we **don't** have yet: +- ❌ Geopolitical risk indicators +- ❌ News sentiment analysis +- ❌ Sector-specific crisis flags + +--- + +### 7. Geopolitical Risk Index +**Status**: ✅ DONE (keyword-based) +**Priority**: High +**Effort**: 8-12 hours + +**Proposed Approach**: +Option A: Use GPRD (Geopolitical Risk Daily Index) from policyuncertainty.com +Option B: Scan news APIs (NewsAPI, GDELT) for geopolitical keywords + +**Tasks**: +- [ ] Research free geopolitical risk data sources + - [ ] Check policyuncertainty.com API availability + - [ ] Evaluate NewsAPI free tier limits + - [ ] Consider GDELT Project (free, comprehensive) +- [ ] Design risk scoring system (0-100 scale) +- [ ] Implement data fetching with caching (4-hour TTL) +- [ ] Map risk levels to sentiment scores: + - Low risk (0-30): +0.2 (bullish) + - Moderate risk (30-60): 0 (neutral) + - High risk (60-80): -0.3 (caution) + - Extreme risk (80-100): -0.5 (bearish) +- [ ] Add to sentiment analysis as 6th indicator +- [ ] Test with historical crisis periods +- [ ] Update SKILL.md with geopolitical indicator + +**Expected Impact**: +- Early warning for market-wide risk events +- Better context for earnings-season volatility +- Complement to VIX (VIX is reactive, geopolitical is predictive) + +**Example Output**: +``` +⚠️ GEOPOLITICAL RISK: HIGH (72/100) + Context: Elevated Taiwan tensions detected + Market Impact: Risk-off sentiment likely +``` + +--- + +### 8. Sector-Specific Crisis Mapping +**Status**: ✅ DONE +**Priority**: High +**Effort**: 6-8 hours + +**Current Gap**: +- No mapping between geopolitical events and affected sectors +- No automatic flagging of at-risk holdings + +**Proposed Risk Mapping**: + +| Geopolitical Event | Affected Sectors | Example Tickers | +|-------------------|------------------|-----------------| +| Taiwan conflict | Semiconductors | NVDA, AMD, TSM, INTC | +| Russia-Ukraine | Energy, Agriculture | XLE, MOS, CF, NTR | +| Middle East escalation | Oil, Defense | XOM, CVX, LMT, RTX | +| China tensions | Tech supply chain, Retail | AAPL, QCOM, NKE, SBUX | +| Banking crisis | Financials | JPM, BAC, WFC, C | + +**Tasks**: +- [ ] Build event → sector → ticker mapping database +- [ ] Implement keyword detection in news feeds: + - "Taiwan" + "military" → Semiconductors ⚠️ + - "Russia" + "sanctions" → Energy ⚠️ + - "Iran" + "attack" → Oil, Defense ⚠️ + - "China" + "tariffs" → Tech, Consumer ⚠️ +- [ ] Add sector exposure check to analysis +- [ ] Generate automatic warnings in output +- [ ] Apply confidence penalty for high-risk sectors +- [ ] Test with historical crisis events +- [ ] Document in SKILL.md + +**Expected Impact**: +- Automatic detection of sector-specific risks +- Clear warnings for exposed holdings +- Reduced false positives (only flag relevant sectors) + +**Example Output**: +``` +⚠️ SECTOR RISK ALERT: Semiconductors + Event: Taiwan military exercises (elevated tensions) + Impact: NVDA HIGH RISK - supply chain exposure + Recommendation: HOLD → downgraded from BUY +``` + +--- + +### 9. Breaking News Check +**Status**: ✅ DONE +**Priority**: Medium +**Effort**: 4-6 hours + +**Current Gap**: +- No real-time news scanning before analysis +- User might get stale recommendation during breaking events + +**Proposed Solution**: +- Scan Google News or Reuters RSS before analysis +- Flag high-impact keywords within last 24 hours + +**Tasks**: +- [ ] Choose news source (Google News RSS, Reuters API, or NewsAPI) +- [ ] Implement news fetching with 24-hour lookback +- [ ] Define crisis keywords: + - **War/Conflict**: "war", "invasion", "military strike", "attack" + - **Economic**: "recession", "crisis", "collapse", "default" + - **Regulatory**: "sanctions", "embargo", "ban", "investigation" + - **Natural disaster**: "earthquake", "hurricane", "pandemic" +- [ ] Add ticker-specific news check (company name + keywords) +- [ ] Generate automatic caveat in output +- [ ] Cache news check results (1 hour TTL) +- [ ] Add `--skip-news` flag for offline mode +- [ ] Test with historical crisis dates +- [ ] Document in SKILL.md + +**Expected Impact**: +- Real-time awareness of breaking events +- Automatic caveats during high volatility +- User protection from stale recommendations + +**Example Output**: +``` +⚠️ BREAKING NEWS ALERT (last 6 hours): + "Fed announces emergency rate hike" + Impact: Market-wide volatility expected + Caveat: Analysis may be outdated - rerun in 24h +``` + +--- + +### 10. Safe-Haven Indicators +**Status**: ✅ DONE +**Priority**: Medium +**Effort**: 3-4 hours + +**Current Gap**: +- No detection of "risk-off" market regime +- VIX alone is insufficient (measures implied volatility, not capital flows) + +**Proposed Indicators**: +- Gold (GLD) - Flight to safety +- US Treasuries (TLT) - Bond market fear +- USD Index (UUP) - Dollar strength during crisis + +**Risk-Off Detection Logic**: +``` +IF GLD +2% AND TLT +1% AND UUP +1% (all rising together) +THEN Market Regime = RISK-OFF +``` + +**Tasks**: +- [ ] Fetch GLD, TLT, UUP price data (5-day change) +- [ ] Implement risk-off detection algorithm +- [ ] Add to market context analysis +- [ ] Apply broad risk penalty: + - Risk-off detected → Reduce all BUY confidence by 30% + - Add caveat: "Market in risk-off mode - defensive positioning recommended" +- [ ] Test with historical crisis periods (2008, 2020, 2022) +- [ ] Add verbose output for safe-haven movements +- [ ] Document in SKILL.md + +**Expected Impact**: +- Detect market-wide flight to safety +- Automatic risk reduction during panics +- Complement geopolitical risk scoring + +**Example Output**: +``` +🛡️ SAFE-HAVEN ALERT: Risk-off mode detected + - Gold (GLD): +3.2% (5d) + - Treasuries (TLT): +2.1% (5d) + - USD Index: +1.8% (5d) + Recommendation: Reduce equity exposure, favor defensives +``` + +--- + +## General Improvements + +### 11. Add Social Sentiment (Future Phase) +**Status**: Deferred +**Priority**: Low +**Effort**: 8-12 hours + +**Notes**: +- Requires free API (Twitter/Reddit alternatives?) +- Most sentiment APIs are paid (StockTwits, etc.) +- Research needed for viable free sources + +### 12. Add Fund Flows (Future Phase) +**Status**: Deferred +**Priority**: Low +**Effort**: 6-8 hours + +**Notes**: +- Requires ETF flow data +- May need paid data source +- Research free alternatives + +--- + +## Implementation Priorities + +### v4.1.0 Complete +- ✅ Task #1 - Insider trading parsing via edgartools +- ✅ Task #3b - Market context caching (1h TTL) +- 🔧 Task #4 - SEC EDGAR rate limits (low priority, only if hitting limits) + +### Completed in v4.0.0 +- ✅ Task #2 - Async parallel fetching (sentiment) +- ✅ Task #3 - Caching for shared indicators (sentiment + news) +- ✅ Task #7 - Geopolitical risk (keyword-based) +- ✅ Task #8 - Sector-specific crisis mapping +- ✅ Task #9 - Breaking news check +- ✅ Task #10 - Safe-haven indicators + +--- + +## Version History + +- **v5.0.0** (2026-01-16): Portfolio management, cryptocurrency support (Top 20), periodic analysis +- **v4.1.0** (2026-01-16): Full insider trading parsing via edgartools, market context caching +- **v4.0.0** (2026-01-15): Geopolitical risk, breaking news, safe-haven detection, sector crisis mapping +- **v3.0.0** (2026-01-15): Sentiment analysis added with 5 indicators (3-4 typically working) +- **v2.0.0**: Market context, sector performance, earnings timing, momentum +- **v1.0.0**: Initial release with earnings, fundamentals, analysts, historical diff --git a/_meta.json b/_meta.json new file mode 100644 index 0000000..75ecbf2 --- /dev/null +++ b/_meta.json @@ -0,0 +1,6 @@ +{ + "ownerId": "kn77fv9851hjcqe52zqx0bhhbx7z680h", + "slug": "stock-analysis", + "version": "6.2.0", + "publishedAt": 1770041353575 +} \ No newline at end of file diff --git a/docs/ARCHITECTURE.md b/docs/ARCHITECTURE.md new file mode 100644 index 0000000..42f3830 --- /dev/null +++ b/docs/ARCHITECTURE.md @@ -0,0 +1,408 @@ +# Technical Architecture + +How Stock Analysis v6.0 works under the hood. + +## System Overview + +``` +┌─────────────────────────────────────────────────────────────────────┐ +│ Stock Analysis v6.0 │ +├─────────────────────────────────────────────────────────────────────┤ +│ │ +│ ┌──────────────────────────────────────────────────────────────┐ │ +│ │ CLI Interface │ │ +│ │ analyze_stock.py | dividends.py | watchlist.py | portfolio.py│ │ +│ └────────────────────────────┬─────────────────────────────────┘ │ +│ │ │ +│ ┌────────────────────────────▼─────────────────────────────────┐ │ +│ │ Analysis Engine │ │ +│ │ │ │ +│ │ ┌─────────┐ ┌─────────┐ ┌─────────┐ ┌─────────┐ │ │ +│ │ │Earnings │ │Fundmtls │ │Analysts │ │Historical│ │ │ +│ │ └────┬────┘ └────┬────┘ └────┬────┘ └────┬────┘ │ │ +│ │ │ │ │ │ │ │ +│ │ ┌────┴────┐ ┌────┴────┐ ┌────┴────┐ ┌────┴────┐ │ │ +│ │ │ Market │ │ Sector │ │Momentum │ │Sentiment│ │ │ +│ │ └────┬────┘ └────┬────┘ └────┬────┘ └────┬────┘ │ │ +│ │ │ │ │ │ │ │ +│ │ └───────────┴───────────┴───────────┘ │ │ +│ │ │ │ │ +│ │ [Synthesizer] │ │ +│ │ │ │ │ +│ │ [Signal Output] │ │ +│ └──────────────────────────────────────────────────────────────┘ │ +│ │ │ +│ ┌────────────────────────────▼─────────────────────────────────┐ │ +│ │ Data Sources │ │ +│ │ │ │ +│ │ ┌─────────┐ ┌─────────┐ ┌─────────┐ ┌─────────┐ │ │ +│ │ │ Yahoo │ │ CNN │ │ SEC │ │ Google │ │ │ +│ │ │ Finance │ │Fear/Grd │ │ EDGAR │ │ News │ │ │ +│ │ └─────────┘ └─────────┘ └─────────┘ └─────────┘ │ │ +│ └──────────────────────────────────────────────────────────────┘ │ +│ │ +└─────────────────────────────────────────────────────────────────────┘ +``` + +--- + +## Core Components + +### 1. Data Fetching (`fetch_stock_data`) + +```python +def fetch_stock_data(ticker: str, verbose: bool = False) -> StockData | None: + """Fetch stock data from Yahoo Finance with retry logic.""" +``` + +**Features:** +- 3 retries with exponential backoff +- Graceful handling of missing data +- Asset type detection (stock vs crypto) + +**Returns:** `StockData` dataclass with: +- `info`: Company fundamentals +- `earnings_history`: Past earnings +- `analyst_info`: Ratings and targets +- `price_history`: 1-year OHLCV + +### 2. Analysis Modules + +Each dimension has its own analyzer: + +| Module | Function | Returns | +|--------|----------|---------| +| Earnings | `analyze_earnings_surprise()` | `EarningsSurprise` | +| Fundamentals | `analyze_fundamentals()` | `Fundamentals` | +| Analysts | `analyze_analyst_sentiment()` | `AnalystSentiment` | +| Historical | `analyze_historical_patterns()` | `HistoricalPatterns` | +| Market | `analyze_market_context()` | `MarketContext` | +| Sector | `analyze_sector_performance()` | `SectorComparison` | +| Momentum | `analyze_momentum()` | `MomentumAnalysis` | +| Sentiment | `analyze_sentiment()` | `SentimentAnalysis` | + +### 3. Sentiment Sub-Analyzers + +Sentiment runs 5 parallel async tasks: + +```python +results = await asyncio.gather( + get_fear_greed_index(), # CNN Fear & Greed + get_short_interest(data), # Yahoo Finance + get_vix_term_structure(), # VIX Futures + get_insider_activity(), # SEC EDGAR + get_put_call_ratio(data), # Options Chain + return_exceptions=True +) +``` + +**Timeout:** 10 seconds per indicator +**Minimum:** 2 of 5 indicators required + +### 4. Signal Synthesis + +```python +def synthesize_signal( + ticker, company_name, + earnings, fundamentals, analysts, historical, + market_context, sector, earnings_timing, + momentum, sentiment, + breaking_news, geopolitical_risk_warning, geopolitical_risk_penalty +) -> Signal: +``` + +**Scoring:** +1. Collect available component scores +2. Apply normalized weights +3. Calculate weighted average → `final_score` +4. Apply adjustments (timing, overbought, risk-off) +5. Determine recommendation threshold + +**Thresholds:** +```python +if final_score > 0.33: + recommendation = "BUY" +elif final_score < -0.33: + recommendation = "SELL" +else: + recommendation = "HOLD" +``` + +--- + +## Caching Strategy + +### What's Cached + +| Data | TTL | Key | +|------|-----|-----| +| Market Context | 1 hour | `market_context` | +| Fear & Greed | 1 hour | `fear_greed` | +| VIX Structure | 1 hour | `vix_structure` | +| Breaking News | 1 hour | `breaking_news` | + +### Cache Implementation + +```python +_SENTIMENT_CACHE = {} +_CACHE_TTL_SECONDS = 3600 # 1 hour + +def _get_cached(key: str): + if key in _SENTIMENT_CACHE: + value, timestamp = _SENTIMENT_CACHE[key] + if time.time() - timestamp < _CACHE_TTL_SECONDS: + return value + return None + +def _set_cache(key: str, value): + _SENTIMENT_CACHE[key] = (value, time.time()) +``` + +### Why This Matters + +- First stock: ~8 seconds (full fetch) +- Second stock: ~4 seconds (reuses market data) +- Same stock again: ~4 seconds (no stock-level cache) + +--- + +## Data Flow + +### Single Stock Analysis + +``` +User Input: "AAPL" + │ + ▼ +┌─────────────────────────────────────────────────────────────┐ +│ 1. FETCH DATA (yfinance) │ +│ - Stock info, earnings, price history │ +│ - ~2 seconds │ +└────────────────────────┬────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────┐ +│ 2. PARALLEL ANALYSIS │ +│ │ +│ ┌──────────┐ ┌──────────┐ ┌──────────┐ │ +│ │ Earnings │ │Fundmtls │ │ Analysts │ ... (sync) │ +│ └──────────┘ └──────────┘ └──────────┘ │ +│ │ +│ ┌────────────────────────────────────┐ │ +│ │ Market Context (cached or fetch) │ ~1 second │ +│ └────────────────────────────────────┘ │ +│ │ +│ ┌────────────────────────────────────┐ │ +│ │ Sentiment (5 async tasks) │ ~3-5 seconds │ +│ │ - Fear/Greed (cached) │ │ +│ │ - Short Interest │ │ +│ │ - VIX Structure (cached) │ │ +│ │ - Insider Trading (slow!) │ │ +│ │ - Put/Call Ratio │ │ +│ └────────────────────────────────────┘ │ +└────────────────────────┬────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────┐ +│ 3. SYNTHESIZE SIGNAL │ +│ - Combine scores with weights │ +│ - Apply adjustments │ +│ - Generate caveats │ +│ - ~10 ms │ +└────────────────────────┬────────────────────────────────────┘ + │ + ▼ +┌─────────────────────────────────────────────────────────────┐ +│ 4. OUTPUT │ +│ - Text or JSON format │ +│ - Include disclaimer │ +└─────────────────────────────────────────────────────────────┘ +``` + +--- + +## Risk Detection + +### Geopolitical Risk + +```python +GEOPOLITICAL_RISK_MAP = { + "taiwan": { + "keywords": ["taiwan", "tsmc", "strait"], + "sectors": ["Technology", "Communication Services"], + "affected_tickers": ["NVDA", "AMD", "TSM", ...], + "impact": "Semiconductor supply chain disruption", + }, + # ... china, russia_ukraine, middle_east, banking_crisis +} +``` + +**Process:** +1. Check breaking news for keywords +2. If keyword found, check if ticker in affected list +3. Apply confidence penalty (30% direct, 15% sector) + +### Breaking News + +```python +def check_breaking_news(verbose: bool = False) -> list[str] | None: + """Scan Google News RSS for crisis keywords (last 24h).""" +``` + +**Crisis Keywords:** +```python +CRISIS_KEYWORDS = { + "war": ["war", "invasion", "military strike", ...], + "economic": ["recession", "crisis", "collapse", ...], + "regulatory": ["sanctions", "embargo", "ban", ...], + "disaster": ["earthquake", "hurricane", "pandemic", ...], + "financial": ["emergency rate", "bailout", ...], +} +``` + +--- + +## File Structure + +``` +stock-analysis/ +├── scripts/ +│ ├── analyze_stock.py # Main analysis engine (2500+ lines) +│ ├── portfolio.py # Portfolio management +│ ├── dividends.py # Dividend analysis +│ ├── watchlist.py # Watchlist + alerts +│ └── test_stock_analysis.py # Unit tests +├── docs/ +│ ├── CONCEPT.md # Philosophy & ideas +│ ├── USAGE.md # Practical guide +│ └── ARCHITECTURE.md # This file +├── SKILL.md # OpenClaw skill definition +├── README.md # Project overview +└── .clawdhub/ # ClawHub metadata +``` + +--- + +## Data Storage + +### Portfolio (`portfolios.json`) + +```json +{ + "portfolios": [ + { + "name": "Retirement", + "created_at": "2024-01-01T00:00:00Z", + "assets": [ + { + "ticker": "AAPL", + "quantity": 100, + "cost_basis": 150.00, + "type": "stock", + "added_at": "2024-01-01T00:00:00Z" + } + ] + } + ] +} +``` + +### Watchlist (`watchlist.json`) + +```json +[ + { + "ticker": "NVDA", + "added_at": "2024-01-15T10:30:00Z", + "price_at_add": 700.00, + "target_price": 800.00, + "stop_price": 600.00, + "alert_on_signal": true, + "last_signal": "BUY", + "last_check": "2024-01-20T08:00:00Z" + } +] +``` + +--- + +## Dependencies + +```python +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "yfinance>=0.2.40", # Stock data +# "pandas>=2.0.0", # Data manipulation +# "fear-and-greed>=0.4", # CNN Fear & Greed +# "edgartools>=2.0.0", # SEC EDGAR filings +# "feedparser>=6.0.0", # RSS parsing +# ] +# /// +``` + +**Why These:** +- `yfinance`: Most reliable free stock API +- `pandas`: Industry standard for financial data +- `fear-and-greed`: Simple CNN F&G wrapper +- `edgartools`: Clean SEC EDGAR access +- `feedparser`: Robust RSS parsing + +--- + +## Performance Optimization + +### Current + +| Operation | Time | +|-----------|------| +| yfinance fetch | ~2s | +| Market context | ~1s (cached after) | +| Insider trading | ~3-5s (slowest!) | +| Sentiment (parallel) | ~3-5s | +| Synthesis | ~10ms | +| **Total** | **5-10s** | + +### Fast Mode (`--fast`) + +Skips: +- Insider trading (SEC EDGAR) +- Breaking news scan + +**Result:** 2-3 seconds + +### Future Optimizations + +1. **Stock-level caching** — Cache fundamentals for 24h +2. **Batch API calls** — yfinance supports multiple tickers +3. **Background refresh** — Pre-fetch watchlist data +4. **Local SEC data** — Avoid EDGAR API calls + +--- + +## Error Handling + +### Retry Strategy + +```python +max_retries = 3 +for attempt in range(max_retries): + try: + # fetch data + except Exception as e: + wait_time = 2 ** attempt # Exponential backoff: 1, 2, 4 seconds + time.sleep(wait_time) +``` + +### Graceful Degradation + +- Missing earnings → Skip dimension, reweight +- Missing analysts → Skip dimension, reweight +- Missing sentiment → Skip dimension, reweight +- API failure → Return None, continue with partial data + +### Minimum Requirements + +- At least 2 of 8 dimensions required +- At least 2 of 5 sentiment indicators required +- Otherwise → HOLD with low confidence diff --git a/docs/CONCEPT.md b/docs/CONCEPT.md new file mode 100644 index 0000000..3e24552 --- /dev/null +++ b/docs/CONCEPT.md @@ -0,0 +1,233 @@ +# Concept & Philosophy + +## The Problem + +Making investment decisions is hard. There's too much data, too many opinions, and too much noise. Most retail investors either: + +1. **Over-simplify** — Buy based on headlines or tips +2. **Over-complicate** — Get lost in endless research +3. **Freeze** — Analysis paralysis, never act + +## The Solution + +Stock Analysis provides a **structured, multi-dimensional framework** that: + +- Aggregates data from multiple sources +- Weighs different factors objectively +- Produces a clear **BUY / HOLD / SELL** signal +- Explains the reasoning with bullet points +- Flags risks and caveats + +Think of it as a **second opinion** — not a replacement for your judgment, but a systematic check. + +--- + +## Core Philosophy + +### 1. Multiple Perspectives Beat Single Metrics + +No single metric tells the whole story: +- A low P/E might mean "cheap" or "dying business" +- High analyst ratings might mean "priced in" or "genuine upside" +- Strong momentum might mean "trend" or "overbought" + +By combining **8 dimensions**, we get a more complete picture. + +### 2. Contrarian Signals Matter + +Some of our best signals are **contrarian**: + +| Indicator | Crowd Says | We Interpret | +|-----------|------------|--------------| +| Extreme Fear (Fear & Greed < 25) | "Sell everything!" | Potential buy opportunity | +| Extreme Greed (> 75) | "Easy money!" | Caution, reduce exposure | +| High Short Interest + Days to Cover | "Stock is doomed" | Squeeze potential | +| Insider Buying | (often ignored) | Smart money signal | + +### 3. Timing Matters + +A good stock at the wrong time is a bad trade: + +- **Pre-earnings** — Even strong stocks can gap down 10%+ +- **Post-spike** — Buying after a 20% run often means buying the top +- **Overbought** — RSI > 70 + near 52-week high = high-risk entry + +We detect these timing issues and adjust recommendations accordingly. + +### 4. Context Changes Everything + +The same stock behaves differently in different market regimes: + +| Regime | Characteristics | Impact | +|--------|-----------------|--------| +| **Bull** | VIX < 20, SPY up | BUY signals more reliable | +| **Bear** | VIX > 30, SPY down | Even good stocks fall | +| **Risk-Off** | GLD/TLT/UUP rising | Flight to safety, reduce equity | +| **Geopolitical** | Crisis keywords | Sector-specific penalties | + +### 5. Dividends Are Different + +Income investors have different priorities than growth investors: + +| Growth Investor | Income Investor | +|-----------------|-----------------| +| Price appreciation | Dividend yield | +| Revenue growth | Payout sustainability | +| Market share | Dividend growth rate | +| P/E ratio | Safety of payment | + +That's why we have a **separate dividend analysis** module. + +--- + +## The 8 Dimensions + +### Why These 8? + +Each dimension captures a different aspect of investment quality: + +``` +┌─────────────────────────────────────────────────────────────┐ +│ FUNDAMENTAL VALUE │ +│ ┌─────────────────┐ ┌─────────────────┐ │ +│ │ Earnings │ │ Fundamentals │ │ +│ │ Surprise │ │ (P/E, etc.) │ │ +│ │ (30%) │ │ (20%) │ │ +│ └─────────────────┘ └─────────────────┘ │ +├─────────────────────────────────────────────────────────────┤ +│ EXTERNAL VALIDATION │ +│ ┌─────────────────┐ ┌─────────────────┐ │ +│ │ Analyst │ │ Historical │ │ +│ │ Sentiment │ │ Patterns │ │ +│ │ (20%) │ │ (10%) │ │ +│ └─────────────────┘ └─────────────────┘ │ +├─────────────────────────────────────────────────────────────┤ +│ MARKET ENVIRONMENT │ +│ ┌─────────────────┐ ┌─────────────────┐ │ +│ │ Market │ │ Sector │ │ +│ │ Context │ │ Performance │ │ +│ │ (10%) │ │ (15%) │ │ +│ └─────────────────┘ └─────────────────┘ │ +├─────────────────────────────────────────────────────────────┤ +│ TECHNICAL & SENTIMENT │ +│ ┌─────────────────┐ ┌─────────────────┐ │ +│ │ Momentum │ │ Sentiment │ │ +│ │ (RSI, range) │ │ (Fear, shorts) │ │ +│ │ (15%) │ │ (10%) │ │ +│ └─────────────────┘ └─────────────────┘ │ +└─────────────────────────────────────────────────────────────┘ +``` + +### Weight Rationale + +| Weight | Dimension | Rationale | +|--------|-----------|-----------| +| 30% | Earnings | Most direct measure of company performance | +| 20% | Fundamentals | Long-term value indicators | +| 20% | Analysts | Professional consensus (with skepticism) | +| 15% | Sector | Relative performance matters | +| 15% | Momentum | Trend is your friend (until it isn't) | +| 10% | Market | Rising tide lifts all boats | +| 10% | Sentiment | Contrarian edge | +| 10% | Historical | Past behavior predicts future reactions | + +**Note:** Weights auto-normalize when data is missing. + +--- + +## Risk Detection Philosophy + +### "Don't Lose Money" + +Warren Buffett's Rule #1. Our risk detection is designed to **prevent bad entries**: + +1. **Pre-Earnings Hold** — Don't buy right before a binary event +2. **Post-Spike Caution** — Don't chase a run-up +3. **Overbought Warning** — Technical exhaustion +4. **Risk-Off Mode** — When even good stocks fall +5. **Geopolitical Flags** — Sector-specific event risk + +### False Positive vs False Negative + +We err on the side of **caution**: + +- Missing a 10% gain is annoying +- Catching a 30% loss is devastating + +That's why our caveats are prominent, and we downgrade BUY → HOLD liberally. + +--- + +## Crypto Adaptation + +Crypto is fundamentally different from stocks: + +| Stocks | Crypto | +|--------|--------| +| Earnings | No earnings | +| P/E Ratio | Market cap tiers | +| Sector ETFs | BTC correlation | +| Dividends | Staking yields (not tracked) | +| SEC Filings | No filings | + +We adapted the framework: +- **3 dimensions** instead of 8 +- **BTC correlation** as a key metric +- **Category classification** (L1, DeFi, etc.) +- **No sentiment** (no insider data for crypto) + +--- + +## Why Not Just Use [X]? + +### vs. Stock Screeners (Finviz, etc.) +- Screeners show data, we provide **recommendations** +- We combine fundamental + technical + sentiment +- We flag timing and risk issues + +### vs. Analyst Reports +- Analysts have conflicts of interest +- Reports are often stale +- We aggregate multiple signals + +### vs. Trading Bots +- Bots execute, we advise +- We explain reasoning +- Human stays in control + +### vs. ChatGPT/AI Chat +- We have **structured scoring**, not just conversation +- Real-time data fetching +- Consistent methodology + +--- + +## Limitations We Acknowledge + +1. **Data Lag** — Yahoo Finance is 15-20 min delayed +2. **US Focus** — International stocks have incomplete data +3. **No Execution** — We advise, you decide and execute +4. **Past ≠ Future** — All models have limits +5. **Black Swans** — Can't predict unpredictable events + +**This is a tool, not a crystal ball.** + +--- + +## The Bottom Line + +Stock Analysis v6.0 is designed to be your **systematic second opinion**: + +- ✅ Multi-dimensional analysis +- ✅ Clear recommendations +- ✅ Risk detection +- ✅ Explained reasoning +- ✅ Fast and automated + +**NOT:** +- ❌ Financial advice +- ❌ Guaranteed returns +- ❌ Replacement for research +- ❌ Trading signals + +Use it wisely. 📈 diff --git a/docs/HOT_SCANNER.md b/docs/HOT_SCANNER.md new file mode 100644 index 0000000..efc8c20 --- /dev/null +++ b/docs/HOT_SCANNER.md @@ -0,0 +1,288 @@ +# 🔥 Hot Scanner + +Find viral stocks & crypto trends in real-time by aggregating multiple data sources. + +## Overview + +The Hot Scanner answers one question: **"What's hot right now?"** + +It aggregates data from: +- CoinGecko (trending coins, biggest movers) +- Google News (finance & crypto headlines) +- Yahoo Finance (gainers, losers, most active) +- Twitter/X (social sentiment, optional) + +## Quick Start + +```bash +# Full scan with all sources +python3 scripts/hot_scanner.py + +# Skip social media (faster) +python3 scripts/hot_scanner.py --no-social + +# JSON output for automation +python3 scripts/hot_scanner.py --json +``` + +## Output Format + +### Console Output + +``` +============================================================ +🔥 HOT SCANNER v2 - What's Trending Right Now? +📅 2026-02-02 10:45:30 UTC +============================================================ + +📊 TOP TRENDING (by buzz): + 1. BTC (6 pts) [CoinGecko, Google News] 📉 bearish (-2.5%) + 2. ETH (5 pts) [CoinGecko, Twitter] 📉 bearish (-7.2%) + 3. NVDA (3 pts) [Google News, Yahoo] 📰 Earnings beat... + +🪙 CRYPTO HIGHLIGHTS: + 🚀 RIVER River +14.0% + 📉 BTC Bitcoin -2.5% + 📉 ETH Ethereum -7.2% + +📈 STOCK MOVERS: + 🟢 NVDA (gainers) + 🔴 TSLA (losers) + 📊 AAPL (most active) + +🐦 SOCIAL BUZZ: + [twitter] Bitcoin to $100k prediction... + [reddit_wsb] GME yolo update... + +📰 BREAKING NEWS: + [BTC, ETH] Crypto crash: $2.5B liquidated... + [NVDA] Nvidia beats earnings expectations... +``` + +### JSON Output + +```json +{ + "scan_time": "2026-02-02T10:45:30+00:00", + "top_trending": [ + { + "symbol": "BTC", + "mentions": 6, + "sources": ["CoinGecko Trending", "Google News"], + "signals": ["📉 bearish (-2.5%)"] + } + ], + "crypto_highlights": [...], + "stock_highlights": [...], + "social_buzz": [...], + "breaking_news": [...] +} +``` + +## Data Sources + +### CoinGecko (No Auth Required) + +| Endpoint | Data | +|----------|------| +| `/search/trending` | Top 15 trending coins | +| `/coins/markets` | Top 100 by market cap with 24h changes | + +**Scoring:** Trending coins get 2 points, movers with >3% change get 1 point. + +### Google News RSS (No Auth Required) + +| Feed | Content | +|------|---------| +| Business News | General finance headlines | +| Crypto Search | Bitcoin, Ethereum, crypto keywords | + +**Ticker Extraction:** Uses regex patterns and company name mappings. + +### Yahoo Finance (No Auth Required) + +| Page | Data | +|------|------| +| `/gainers` | Top gaining stocks | +| `/losers` | Top losing stocks | +| `/most-active` | Highest volume stocks | + +**Note:** Requires gzip decompression. + +### Twitter/X (Auth Required) + +Uses [bird CLI](https://github.com/steipete/bird) for Twitter search. + +**Searches:** +- `stock OR $SPY OR $QQQ OR earnings` +- `bitcoin OR ethereum OR crypto OR $BTC` + +## Twitter/X Setup + +### 1. Install bird CLI + +```bash +# macOS +brew install steipete/tap/bird + +# npm +npm install -g @steipete/bird +``` + +### 2. Get Auth Tokens + +**Option A: Browser cookies (macOS)** +1. Login to x.com in Safari/Chrome +2. Grant Terminal "Full Disk Access" in System Settings +3. Run `bird whoami` to verify + +**Option B: Manual extraction** +1. Open x.com in Chrome +2. DevTools (F12) → Application → Cookies → x.com +3. Copy `auth_token` and `ct0` values + +### 3. Configure + +Create `.env` file in the skill directory: + +```bash +# /path/to/stock-analysis/.env +AUTH_TOKEN=your_auth_token_here +CT0=your_ct0_token_here +``` + +Or export as environment variables: + +```bash +export AUTH_TOKEN="..." +export CT0="..." +``` + +### 4. Verify + +```bash +bird whoami +# Should show: 🙋 @YourUsername +``` + +## Scoring System + +Each mention from a source adds points: + +| Source | Points | +|--------|--------| +| CoinGecko Trending | 2 | +| CoinGecko Movers | 1 | +| Google News | 1 | +| Yahoo Finance | 1 | +| Twitter/X | 1 | +| Reddit (high score) | 2 | +| Reddit (normal) | 1 | + +Symbols are ranked by total points across all sources. + +## Ticker Extraction + +### Patterns + +```python +# Cashtag: $AAPL +r'\$([A-Z]{1,5})\b' + +# Parentheses: (AAPL) +r'\(([A-Z]{2,5})\)' + +# Stock mentions: AAPL stock, AAPL shares +r'\b([A-Z]{2,5})(?:\'s|:|\s+stock|\s+shares)' +``` + +### Company Mappings + +```python +{ + "Apple": "AAPL", + "Microsoft": "MSFT", + "Tesla": "TSLA", + "Nvidia": "NVDA", + "Bitcoin": "BTC", + "Ethereum": "ETH", + # ... etc +} +``` + +### Crypto Keywords + +```python +{ + "bitcoin": "BTC", + "ethereum": "ETH", + "solana": "SOL", + "dogecoin": "DOGE", + # ... etc +} +``` + +## Automation + +### Cron Job + +```bash +# Daily at 8 AM +0 8 * * * cd /path/to/stock-analysis && python3 scripts/hot_scanner.py --json > cache/daily_scan.json +``` + +### OpenClaw Integration + +```yaml +# Cron job config +name: "🔥 Daily Hot Scanner" +schedule: + kind: cron + expr: "0 8 * * *" + tz: "Europe/Berlin" +payload: + kind: agentTurn + message: "Run hot scanner and summarize results" + deliver: true +sessionTarget: isolated +``` + +## Caching + +Results are saved to: +- `cache/hot_scan_latest.json` — Most recent scan + +## Limitations + +- **Reddit:** Blocked without OAuth (403). Requires API application. +- **Twitter:** Requires auth tokens, may expire. +- **Yahoo:** Sometimes rate-limited. +- **Google News:** RSS URLs may change. + +## Future Enhancements + +- [ ] Reddit API integration (PRAW) +- [ ] StockTwits integration +- [ ] Google Trends +- [ ] Historical trend tracking +- [ ] Alert thresholds (notify when score > X) + +## Troubleshooting + +### Twitter not working + +```bash +# Check auth +bird whoami + +# Should see your username +# If not, re-export tokens +``` + +### Yahoo 403 or gzip errors + +The scanner handles gzip automatically. If issues persist, Yahoo may be rate-limiting. + +### No tickers found + +Check that news headlines contain recognizable patterns. The scanner uses conservative extraction to avoid false positives. diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..44124b5 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,95 @@ +# Documentation + +## Stock Analysis v6.1 + +This folder contains detailed documentation for the Stock Analysis skill. + +## Contents + +| Document | Description | +|----------|-------------| +| [CONCEPT.md](./CONCEPT.md) | Philosophy, ideas, and design rationale | +| [USAGE.md](./USAGE.md) | Practical usage guide with examples | +| [ARCHITECTURE.md](./ARCHITECTURE.md) | Technical implementation details | +| [HOT_SCANNER.md](./HOT_SCANNER.md) | 🔥 Viral trend detection (NEW) | + +## Quick Links + +### For Users + +Start with **[USAGE.md](./USAGE.md)** — it has practical examples for: +- Basic stock analysis +- Comparing stocks +- Crypto analysis +- Dividend investing +- Portfolio management +- Watchlist & alerts + +### For Understanding + +Read **[CONCEPT.md](./CONCEPT.md)** to understand: +- Why 8 dimensions? +- How scoring works +- Contrarian signals +- Risk detection philosophy +- Limitations we acknowledge + +### For Developers + +Check **[ARCHITECTURE.md](./ARCHITECTURE.md)** for: +- System overview diagram +- Data flow +- Caching strategy +- File structure +- Performance optimization + +## Quick Start + +```bash +# Analyze a stock +uv run scripts/analyze_stock.py AAPL + +# Fast mode (2-3 seconds) +uv run scripts/analyze_stock.py AAPL --fast + +# Dividend analysis +uv run scripts/dividends.py JNJ + +# Watchlist +uv run scripts/watchlist.py add AAPL --target 200 +uv run scripts/watchlist.py check +``` + +## Key Concepts + +### The 8 Dimensions + +1. **Earnings Surprise** (30%) — Did they beat expectations? +2. **Fundamentals** (20%) — P/E, margins, growth, debt +3. **Analyst Sentiment** (20%) — Professional consensus +4. **Historical Patterns** (10%) — Past earnings reactions +5. **Market Context** (10%) — VIX, SPY/QQQ trends +6. **Sector Performance** (15%) — Relative strength +7. **Momentum** (15%) — RSI, 52-week range +8. **Sentiment** (10%) — Fear/Greed, shorts, insiders + +### Signal Thresholds + +| Score | Recommendation | +|-------|----------------| +| > +0.33 | **BUY** | +| -0.33 to +0.33 | **HOLD** | +| < -0.33 | **SELL** | + +### Risk Flags + +- ⚠️ Pre-earnings (< 14 days) +- ⚠️ Post-spike (> 15% in 5 days) +- ⚠️ Overbought (RSI > 70 + near 52w high) +- ⚠️ Risk-off mode (GLD/TLT/UUP rising) +- ⚠️ Geopolitical keywords +- ⚠️ Breaking news alerts + +## Disclaimer + +⚠️ **NOT FINANCIAL ADVICE.** For informational purposes only. Always do your own research and consult a licensed financial advisor. diff --git a/docs/USAGE.md b/docs/USAGE.md new file mode 100644 index 0000000..10ab7a7 --- /dev/null +++ b/docs/USAGE.md @@ -0,0 +1,465 @@ +# Usage Guide + +Practical examples for using Stock Analysis v6.0 in real scenarios. + +## Table of Contents + +1. [Basic Stock Analysis](#basic-stock-analysis) +2. [Comparing Stocks](#comparing-stocks) +3. [Crypto Analysis](#crypto-analysis) +4. [Dividend Investing](#dividend-investing) +5. [Portfolio Management](#portfolio-management) +6. [Watchlist & Alerts](#watchlist--alerts) +7. [Performance Tips](#performance-tips) +8. [Interpreting Results](#interpreting-results) + +--- + +## Basic Stock Analysis + +### Single Stock + +```bash +uv run scripts/analyze_stock.py AAPL +``` + +**Output:** +``` +=========================================================================== +STOCK ANALYSIS: AAPL (Apple Inc.) +Generated: 2024-02-01T10:30:00 +=========================================================================== + +RECOMMENDATION: BUY (Confidence: 72%) + +SUPPORTING POINTS: +• Beat by 8.2% - EPS $2.18 vs $2.01 expected +• Strong margin: 24.1% +• Analyst consensus: Buy with 12.3% upside (42 analysts) +• Momentum: RSI 58 (neutral) +• Sector: Technology uptrend (+5.2% 1m) + +CAVEATS: +• Earnings in 12 days - high volatility expected +• High market volatility (VIX 24) + +=========================================================================== +DISCLAIMER: NOT FINANCIAL ADVICE. +=========================================================================== +``` + +### JSON Output + +For programmatic use: + +```bash +uv run scripts/analyze_stock.py AAPL --output json | jq '.recommendation, .confidence' +``` + +### Verbose Mode + +See what's happening under the hood: + +```bash +uv run scripts/analyze_stock.py AAPL --verbose +``` + +--- + +## Comparing Stocks + +### Side-by-Side Analysis + +```bash +uv run scripts/analyze_stock.py AAPL MSFT GOOGL +``` + +Each stock gets a full analysis. Compare recommendations and confidence levels. + +### Sector Comparison + +Compare stocks in the same sector: + +```bash +# Banks +uv run scripts/analyze_stock.py JPM BAC WFC GS + +# Tech +uv run scripts/analyze_stock.py AAPL MSFT GOOGL AMZN META +``` + +--- + +## Crypto Analysis + +### Basic Crypto + +```bash +uv run scripts/analyze_stock.py BTC-USD +``` + +**Crypto-Specific Output:** +- Market cap classification (large/mid/small) +- Category (Smart Contract L1, DeFi, etc.) +- BTC correlation (30-day) +- Momentum (RSI, price range) + +### Compare Cryptos + +```bash +uv run scripts/analyze_stock.py BTC-USD ETH-USD SOL-USD +``` + +### Supported Cryptos + +``` +BTC, ETH, BNB, SOL, XRP, ADA, DOGE, AVAX, DOT, MATIC, +LINK, ATOM, UNI, LTC, BCH, XLM, ALGO, VET, FIL, NEAR +``` + +Use `-USD` suffix: `BTC-USD`, `ETH-USD`, etc. + +--- + +## Dividend Investing + +### Analyze Dividend Stock + +```bash +uv run scripts/dividends.py JNJ +``` + +**Output:** +``` +============================================================ +DIVIDEND ANALYSIS: JNJ (Johnson & Johnson) +============================================================ + +Current Price: $160.50 +Annual Dividend: $4.76 +Dividend Yield: 2.97% +Payment Freq: quarterly +Ex-Dividend: 2024-02-15 + +Payout Ratio: 65.0% (moderate) +5Y Div Growth: +5.8% +Consecutive Yrs: 62 + +SAFETY SCORE: 78/100 +INCOME RATING: GOOD + +Safety Factors: + • Moderate payout ratio (65%) + • Good dividend growth (+5.8% CAGR) + • Dividend Aristocrat (62+ years) + +Dividend History: + 2023: $4.52 + 2022: $4.36 + 2021: $4.24 + 2020: $4.04 + 2019: $3.80 +============================================================ +``` + +### Compare Dividend Stocks + +```bash +uv run scripts/dividends.py JNJ PG KO MCD VZ T +``` + +### Dividend Aristocrats Screen + +Look for stocks with: +- Yield > 2% +- Payout < 60% +- Growth > 5% +- Consecutive years > 25 + +--- + +## Portfolio Management + +### Create Portfolio + +```bash +uv run scripts/portfolio.py create "Retirement" +``` + +### Add Holdings + +```bash +# Stocks +uv run scripts/portfolio.py add AAPL --quantity 100 --cost 150.00 + +# Crypto +uv run scripts/portfolio.py add BTC-USD --quantity 0.5 --cost 40000 +``` + +### View Portfolio + +```bash +uv run scripts/portfolio.py show +``` + +**Output:** +``` +Portfolio: Retirement +==================== + +Assets: + AAPL 100 shares @ $150.00 = $15,000.00 + Current: $185.00 = $18,500.00 (+23.3%) + + BTC-USD 0.5 @ $40,000 = $20,000.00 + Current: $45,000 = $22,500.00 (+12.5%) + +Total Cost: $35,000.00 +Current Value: $41,000.00 +Total P&L: +$6,000.00 (+17.1%) +``` + +### Analyze Portfolio + +```bash +# Full analysis of all holdings +uv run scripts/analyze_stock.py --portfolio "Retirement" + +# With period returns +uv run scripts/analyze_stock.py --portfolio "Retirement" --period monthly +``` + +### Rebalance Check + +The analysis flags concentration warnings: +``` +⚠️ CONCENTRATION WARNINGS: + • AAPL: 45.1% (>30% of portfolio) +``` + +--- + +## Watchlist & Alerts + +### Add to Watchlist + +```bash +# Basic watch +uv run scripts/watchlist.py add NVDA + +# With price target +uv run scripts/watchlist.py add NVDA --target 800 + +# With stop loss +uv run scripts/watchlist.py add NVDA --stop 600 + +# Alert on signal change +uv run scripts/watchlist.py add NVDA --alert-on signal + +# All options +uv run scripts/watchlist.py add NVDA --target 800 --stop 600 --alert-on signal +``` + +### View Watchlist + +```bash +uv run scripts/watchlist.py list +``` + +**Output:** +```json +{ + "success": true, + "items": [ + { + "ticker": "NVDA", + "current_price": 725.50, + "price_at_add": 700.00, + "change_pct": 3.64, + "target_price": 800.00, + "to_target_pct": 10.27, + "stop_price": 600.00, + "to_stop_pct": -17.30, + "alert_on_signal": true, + "last_signal": "BUY", + "added_at": "2024-01-15" + } + ], + "count": 1 +} +``` + +### Check Alerts + +```bash +# Check for triggered alerts +uv run scripts/watchlist.py check + +# Format for notification (Telegram) +uv run scripts/watchlist.py check --notify +``` + +**Alert Example:** +``` +📢 Stock Alerts + +🎯 NVDA hit target! $802.50 >= $800.00 +🛑 TSLA hit stop! $195.00 <= $200.00 +📊 AAPL signal changed: HOLD → BUY +``` + +### Remove from Watchlist + +```bash +uv run scripts/watchlist.py remove NVDA +``` + +--- + +## Performance Tips + +### Fast Mode + +Skip slow analyses for quick checks: + +```bash +# Skip insider trading + breaking news +uv run scripts/analyze_stock.py AAPL --fast +``` + +**Speed comparison:** +| Mode | Time | What's Skipped | +|------|------|----------------| +| Default | 5-10s | Nothing | +| `--no-insider` | 3-5s | SEC EDGAR | +| `--fast` | 2-3s | Insider + News | + +### Batch Analysis + +Analyze multiple stocks in one command: + +```bash +uv run scripts/analyze_stock.py AAPL MSFT GOOGL AMZN META +``` + +### Caching + +Market context is cached for 1 hour: +- VIX, SPY, QQQ trends +- Fear & Greed Index +- VIX term structure +- Breaking news + +Second analysis of different stock reuses cached data. + +--- + +## Interpreting Results + +### Recommendation Thresholds + +| Score | Recommendation | +|-------|----------------| +| > +0.33 | BUY | +| -0.33 to +0.33 | HOLD | +| < -0.33 | SELL | + +### Confidence Levels + +| Confidence | Meaning | +|------------|---------| +| > 80% | Strong conviction | +| 60-80% | Moderate conviction | +| 40-60% | Mixed signals | +| < 40% | Low conviction | + +### Reading Caveats + +**Always read the caveats!** They often contain critical information: + +``` +CAVEATS: +• Earnings in 5 days - high volatility expected ← Timing risk +• RSI 78 (overbought) + near 52w high ← Technical risk +• ⚠️ BREAKING NEWS: Fed emergency rate discussion ← External risk +• ⚠️ SECTOR RISK: China tensions affect tech ← Geopolitical +``` + +### When to Ignore the Signal + +- **Pre-earnings:** Even BUY → wait until after +- **Overbought:** Consider smaller position +- **Risk-off:** Reduce overall exposure +- **Low confidence:** Do more research + +### When to Trust the Signal + +- **High confidence + no major caveats** +- **Multiple supporting points align** +- **Sector is strong** +- **Market regime is favorable** + +--- + +## Common Workflows + +### Morning Check + +```bash +# Check watchlist alerts +uv run scripts/watchlist.py check --notify + +# Quick portfolio update +uv run scripts/analyze_stock.py --portfolio "Main" --fast +``` + +### Research New Stock + +```bash +# Full analysis +uv run scripts/analyze_stock.py XYZ + +# If dividend stock +uv run scripts/dividends.py XYZ + +# Add to watchlist for monitoring +uv run scripts/watchlist.py add XYZ --alert-on signal +``` + +### Weekly Review + +```bash +# Full portfolio analysis +uv run scripts/analyze_stock.py --portfolio "Main" --period weekly + +# Check dividend holdings +uv run scripts/dividends.py JNJ PG KO +``` + +--- + +## Troubleshooting + +### "Invalid ticker" + +- Check spelling +- For crypto, use `-USD` suffix +- Non-US stocks may not work + +### "Insufficient data" + +- Stock might be too new +- ETFs have limited data +- OTC stocks often fail + +### Slow Performance + +- Use `--fast` for quick checks +- Insider trading is slowest +- Breaking news adds ~2s + +### Missing Data + +- Not all stocks have analyst coverage +- Some metrics require options chains +- Crypto has no sentiment data diff --git a/scripts/analyze_stock.py b/scripts/analyze_stock.py new file mode 100644 index 0000000..f8bc669 --- /dev/null +++ b/scripts/analyze_stock.py @@ -0,0 +1,2532 @@ +#!/usr/bin/env python3 +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "yfinance>=0.2.40", +# "pandas>=2.0.0", +# "fear-and-greed>=0.4", +# "edgartools>=2.0.0", +# "feedparser>=6.0.0", +# ] +# /// +""" +Stock analysis using Yahoo Finance data. + +Usage: + uv run analyze_stock.py TICKER [TICKER2 ...] [--output text|json] [--verbose] +""" + +import argparse +import asyncio +import json +import sys +import time +from dataclasses import dataclass, asdict +from datetime import datetime +from typing import Literal + +import pandas as pd +import yfinance as yf + + +# Top 20 supported cryptocurrencies +SUPPORTED_CRYPTOS = { + "BTC-USD", "ETH-USD", "BNB-USD", "SOL-USD", "XRP-USD", + "ADA-USD", "DOGE-USD", "AVAX-USD", "DOT-USD", "MATIC-USD", + "LINK-USD", "ATOM-USD", "UNI-USD", "LTC-USD", "BCH-USD", + "XLM-USD", "ALGO-USD", "VET-USD", "FIL-USD", "NEAR-USD", +} + +# Crypto category mapping for sector-like analysis +CRYPTO_CATEGORIES = { + "BTC-USD": "Store of Value", + "ETH-USD": "Smart Contract L1", + "BNB-USD": "Exchange Token", + "SOL-USD": "Smart Contract L1", + "XRP-USD": "Payment", + "ADA-USD": "Smart Contract L1", + "DOGE-USD": "Meme", + "AVAX-USD": "Smart Contract L1", + "DOT-USD": "Interoperability", + "MATIC-USD": "Layer 2", + "LINK-USD": "Oracle", + "ATOM-USD": "Interoperability", + "UNI-USD": "DeFi", + "LTC-USD": "Payment", + "BCH-USD": "Payment", + "XLM-USD": "Payment", + "ALGO-USD": "Smart Contract L1", + "VET-USD": "Enterprise", + "FIL-USD": "Storage", + "NEAR-USD": "Smart Contract L1", +} + + +def detect_asset_type(ticker: str) -> Literal["stock", "crypto"]: + """Detect asset type from ticker format.""" + ticker_upper = ticker.upper() + if ticker_upper.endswith("-USD"): + base = ticker_upper[:-4] + if base.isalpha(): + return "crypto" + return "stock" + + +@dataclass +class StockData: + ticker: str + info: dict + earnings_history: pd.DataFrame | None + analyst_info: dict | None + price_history: pd.DataFrame | None + asset_type: Literal["stock", "crypto"] = "stock" + + +@dataclass +class CryptoFundamentals: + """Crypto-specific fundamentals (replaces P/E, margins for crypto).""" + market_cap: float | None + market_cap_rank: str # "large", "mid", "small" + volume_24h: float | None + circulating_supply: float | None + category: str | None # "Smart Contract L1", "DeFi", etc. + btc_correlation: float | None # 30-day correlation to BTC + score: float + explanation: str + + +@dataclass +class EarningsSurprise: + score: float + explanation: str + actual_eps: float | None = None + expected_eps: float | None = None + surprise_pct: float | None = None + + +@dataclass +class Fundamentals: + score: float + key_metrics: dict + explanation: str + + +@dataclass +class AnalystSentiment: + score: float | None + summary: str + consensus_rating: str | None = None + price_target: float | None = None + current_price: float | None = None + upside_pct: float | None = None + num_analysts: int | None = None + + +@dataclass +class HistoricalPatterns: + score: float + pattern_desc: str + beats_last_4q: int | None = None + avg_reaction_pct: float | None = None + + +@dataclass +class MarketContext: + vix_level: float + vix_status: str # "calm", "elevated", "fear" + spy_trend_10d: float + qqq_trend_10d: float + market_regime: str # "bull", "bear", "choppy" + score: float + explanation: str + # Safe-haven indicators (v4.0.0) + gld_change_5d: float | None = None # Gold ETF % change + tlt_change_5d: float | None = None # Treasury ETF % change + uup_change_5d: float | None = None # USD Index ETF % change + risk_off_detected: bool = False # True if flight to safety detected + + +@dataclass +class SectorComparison: + sector_name: str + industry_name: str + stock_return_1m: float + sector_return_1m: float + relative_strength: float + sector_trend: str # "strong uptrend", "downtrend", etc. + score: float + explanation: str + + +@dataclass +class EarningsTiming: + days_until_earnings: int | None + days_since_earnings: int | None + next_earnings_date: str | None + last_earnings_date: str | None + timing_flag: str # "pre_earnings", "post_earnings", "safe" + price_change_5d: float | None + confidence_adjustment: float + caveats: list[str] + + +@dataclass +class MomentumAnalysis: + rsi_14d: float | None + rsi_status: str # "overbought", "oversold", "neutral" + price_vs_52w_low: float | None + price_vs_52w_high: float | None + near_52w_high: bool + near_52w_low: bool + volume_ratio: float | None + relative_strength_vs_sector: float | None + score: float + explanation: str + + +@dataclass +class SentimentAnalysis: + score: float # Overall -1.0 to 1.0 + explanation: str # Human-readable summary + + # Sub-indicator scores + fear_greed_score: float | None = None + short_interest_score: float | None = None + vix_structure_score: float | None = None + insider_activity_score: float | None = None + put_call_score: float | None = None + + # Raw data + fear_greed_value: int | None = None # 0-100 + fear_greed_status: str | None = None # "Extreme Fear", etc. + short_interest_pct: float | None = None + days_to_cover: float | None = None + vix_structure: str | None = None # "contango", "backwardation", "flat" + vix_slope: float | None = None + insider_net_shares: int | None = None + insider_net_value: float | None = None # Millions USD + put_call_ratio: float | None = None + put_volume: int | None = None + call_volume: int | None = None + + # Metadata + indicators_available: int = 0 + data_freshness_warnings: list[str] | None = None + + +@dataclass +class Signal: + ticker: str + company_name: str + recommendation: Literal["BUY", "HOLD", "SELL"] + confidence: float + final_score: float + supporting_points: list[str] + caveats: list[str] + timestamp: str + components: dict + + +def fetch_stock_data(ticker: str, verbose: bool = False) -> StockData | None: + """Fetch stock data from Yahoo Finance with retry logic.""" + max_retries = 3 + for attempt in range(max_retries): + try: + if verbose: + print(f"Fetching data for {ticker}... (attempt {attempt + 1}/{max_retries})", file=sys.stderr) + + stock = yf.Ticker(ticker) + info = stock.info + + # Validate ticker + if not info or "regularMarketPrice" not in info: + return None + + # Fetch earnings history + try: + earnings_history = stock.earnings_dates + except Exception: + earnings_history = None + + # Fetch analyst info + try: + analyst_info = { + "recommendations": stock.recommendations, + "analyst_price_targets": stock.analyst_price_targets, + } + except Exception: + analyst_info = None + + # Fetch price history (1 year for historical patterns) + try: + price_history = stock.history(period="1y") + except Exception: + price_history = None + + return StockData( + ticker=ticker, + info=info, + earnings_history=earnings_history, + analyst_info=analyst_info, + price_history=price_history, + asset_type=detect_asset_type(ticker), + ) + + except Exception as e: + if attempt < max_retries - 1: + wait_time = 2 ** attempt # Exponential backoff + if verbose: + print(f"Error fetching {ticker}: {e}. Retrying in {wait_time}s...", file=sys.stderr) + time.sleep(wait_time) + else: + if verbose: + print(f"Failed to fetch {ticker} after {max_retries} attempts", file=sys.stderr) + return None + + return None + + +def analyze_earnings_surprise(data: StockData) -> EarningsSurprise | None: + """Analyze earnings surprise from most recent quarter.""" + if data.earnings_history is None or data.earnings_history.empty: + return None + + try: + # Get most recent earnings with actual data + recent = data.earnings_history.sort_index(ascending=False).head(10) + + for idx, row in recent.iterrows(): + if pd.notna(row.get("Reported EPS")) and pd.notna(row.get("EPS Estimate")): + actual = float(row["Reported EPS"]) + expected = float(row["EPS Estimate"]) + + if expected == 0: + continue + + surprise_pct = ((actual - expected) / abs(expected)) * 100 + + # Score based on surprise percentage + if surprise_pct > 10: + score = 1.0 + elif surprise_pct > 5: + score = 0.7 + elif surprise_pct > 0: + score = 0.3 + elif surprise_pct > -5: + score = -0.3 + elif surprise_pct > -10: + score = -0.7 + else: + score = -1.0 + + explanation = f"{'Beat' if surprise_pct > 0 else 'Missed'} by {abs(surprise_pct):.1f}%" + + return EarningsSurprise( + score=score, + explanation=explanation, + actual_eps=actual, + expected_eps=expected, + surprise_pct=surprise_pct, + ) + + return None + + except Exception: + return None + + +def analyze_fundamentals(data: StockData) -> Fundamentals | None: + """Analyze fundamental metrics.""" + info = data.info + scores = [] + metrics = {} + explanations = [] + + try: + # P/E Ratio (lower is better, but consider growth) + pe_ratio = info.get("trailingPE") or info.get("forwardPE") + if pe_ratio and pe_ratio > 0: + metrics["pe_ratio"] = round(pe_ratio, 2) + if pe_ratio < 15: + scores.append(0.5) + explanations.append(f"Attractive P/E: {pe_ratio:.1f}x") + elif pe_ratio > 30: + scores.append(-0.3) + explanations.append(f"Elevated P/E: {pe_ratio:.1f}x") + else: + scores.append(0.1) + + # Operating Margin + op_margin = info.get("operatingMargins") + if op_margin: + metrics["operating_margin"] = round(op_margin, 3) + if op_margin > 0.15: + scores.append(0.5) + explanations.append(f"Strong margin: {op_margin*100:.1f}%") + elif op_margin < 0.05: + scores.append(-0.5) + explanations.append(f"Weak margin: {op_margin*100:.1f}%") + + # Revenue Growth + rev_growth = info.get("revenueGrowth") + if rev_growth: + metrics["revenue_growth_yoy"] = round(rev_growth, 3) + if rev_growth > 0.20: + scores.append(0.5) + explanations.append(f"Strong growth: {rev_growth*100:.1f}% YoY") + elif rev_growth < 0.05: + scores.append(-0.3) + explanations.append(f"Slow growth: {rev_growth*100:.1f}% YoY") + else: + scores.append(0.2) + + # Debt to Equity + debt_equity = info.get("debtToEquity") + if debt_equity is not None: + metrics["debt_to_equity"] = round(debt_equity / 100, 2) + if debt_equity < 50: + scores.append(0.3) + elif debt_equity > 200: + scores.append(-0.5) + explanations.append(f"High debt: D/E {debt_equity/100:.1f}x") + + if not scores: + return None + + # Average and normalize + avg_score = sum(scores) / len(scores) + normalized_score = max(-1.0, min(1.0, avg_score)) + + explanation = "; ".join(explanations) if explanations else "Mixed fundamentals" + + return Fundamentals( + score=normalized_score, + key_metrics=metrics, + explanation=explanation, + ) + + except Exception: + return None + + +def analyze_crypto_fundamentals(data: StockData, verbose: bool = False) -> CryptoFundamentals | None: + """Analyze crypto-specific fundamentals (market cap, supply, category).""" + if data.asset_type != "crypto": + return None + + info = data.info + ticker = data.ticker.upper() + + try: + # Market cap analysis + market_cap = info.get("marketCap") + if not market_cap: + return None + + # Categorize by market cap + if market_cap >= 10_000_000_000: # $10B+ + market_cap_rank = "large" + cap_score = 0.3 # Large caps are more stable + elif market_cap >= 1_000_000_000: # $1B-$10B + market_cap_rank = "mid" + cap_score = 0.1 + else: + market_cap_rank = "small" + cap_score = -0.2 # Small caps are riskier + + # Volume analysis + volume_24h = info.get("volume") or info.get("volume24Hr") + volume_score = 0.0 + if volume_24h and market_cap: + volume_to_cap = volume_24h / market_cap + if volume_to_cap > 0.05: # >5% daily turnover + volume_score = 0.2 # High liquidity + elif volume_to_cap < 0.01: + volume_score = -0.2 # Low liquidity + + # Circulating supply + circulating_supply = info.get("circulatingSupply") + + # Get crypto category + category = CRYPTO_CATEGORIES.get(ticker, "Unknown") + + # Calculate BTC correlation (30 days) + btc_correlation = None + try: + if ticker != "BTC-USD" and data.price_history is not None: + btc = yf.Ticker("BTC-USD") + btc_hist = btc.history(period="1mo") + if not btc_hist.empty and len(data.price_history) > 5: + # Align dates and calculate correlation + crypto_returns = data.price_history["Close"].pct_change().dropna() + btc_returns = btc_hist["Close"].pct_change().dropna() + # Simple correlation on overlapping dates + common_dates = crypto_returns.index.intersection(btc_returns.index) + if len(common_dates) > 10: + btc_correlation = crypto_returns.loc[common_dates].corr(btc_returns.loc[common_dates]) + except Exception: + pass + + # BTC correlation scoring (high correlation = less diversification benefit) + corr_score = 0.0 + if btc_correlation is not None: + if btc_correlation > 0.8: + corr_score = -0.1 # Very correlated to BTC + elif btc_correlation < 0.3: + corr_score = 0.1 # Good diversification + + # Total score + total_score = cap_score + volume_score + corr_score + + # Build explanation + explanations = [] + explanations.append(f"Market cap: ${market_cap/1e9:.1f}B ({market_cap_rank})") + if category != "Unknown": + explanations.append(f"Category: {category}") + if btc_correlation is not None: + explanations.append(f"BTC corr: {btc_correlation:.2f}") + + return CryptoFundamentals( + market_cap=market_cap, + market_cap_rank=market_cap_rank, + volume_24h=volume_24h, + circulating_supply=circulating_supply, + category=category, + btc_correlation=round(btc_correlation, 2) if btc_correlation else None, + score=max(-1.0, min(1.0, total_score)), + explanation="; ".join(explanations), + ) + + except Exception as e: + if verbose: + print(f"Error analyzing crypto fundamentals: {e}", file=sys.stderr) + return None + + +def analyze_analyst_sentiment(data: StockData) -> AnalystSentiment | None: + """Analyze analyst sentiment and price targets.""" + info = data.info + + try: + # Get current price + current_price = info.get("regularMarketPrice") or info.get("currentPrice") + if not current_price: + return None + + # Get target price + target_price = info.get("targetMeanPrice") + + # Get number of analysts + num_analysts = info.get("numberOfAnalystOpinions") + + # Get recommendation + recommendation = info.get("recommendationKey") + + if not target_price or not recommendation: + return AnalystSentiment( + score=None, + summary="No analyst coverage available", + ) + + # Calculate upside + upside_pct = ((target_price - current_price) / current_price) * 100 + + # Score based on recommendation and upside + rec_scores = { + "strong_buy": 1.0, + "buy": 0.7, + "hold": 0.0, + "sell": -0.7, + "strong_sell": -1.0, + } + + base_score = rec_scores.get(recommendation, 0.0) + + # Adjust based on upside + if upside_pct > 20: + score = min(1.0, base_score + 0.3) + elif upside_pct > 10: + score = min(1.0, base_score + 0.15) + elif upside_pct < -10: + score = max(-1.0, base_score - 0.3) + else: + score = base_score + + # Format recommendation + rec_display = recommendation.replace("_", " ").title() + + summary = f"{rec_display} with {abs(upside_pct):.1f}% {'upside' if upside_pct > 0 else 'downside'}" + if num_analysts: + summary += f" ({num_analysts} analysts)" + + return AnalystSentiment( + score=score, + summary=summary, + consensus_rating=rec_display, + price_target=target_price, + current_price=current_price, + upside_pct=upside_pct, + num_analysts=num_analysts, + ) + + except Exception: + return AnalystSentiment( + score=None, + summary="Error analyzing analyst sentiment", + ) + + +def analyze_historical_patterns(data: StockData) -> HistoricalPatterns | None: + """Analyze historical earnings patterns.""" + if data.earnings_history is None or data.price_history is None: + return None + + if data.earnings_history.empty or data.price_history.empty: + return None + + try: + # Get last 4 quarters earnings dates + earnings_dates = data.earnings_history.sort_index(ascending=False).head(4) + + beats = 0 + reactions = [] + + for earnings_date, row in earnings_dates.iterrows(): + if pd.notna(row.get("Reported EPS")) and pd.notna(row.get("EPS Estimate")): + actual = float(row["Reported EPS"]) + expected = float(row["EPS Estimate"]) + + if actual > expected: + beats += 1 + + # Try to get price reaction (day of earnings) + try: + earnings_day = pd.Timestamp(earnings_date).date() + + # Find closest trading day + price_data = data.price_history[data.price_history.index.date == earnings_day] + + if not price_data.empty: + day_change = ((price_data["Close"].iloc[0] - price_data["Open"].iloc[0]) / price_data["Open"].iloc[0]) * 100 + reactions.append(day_change) + except Exception: + continue + + total_quarters = len(earnings_dates) + if total_quarters == 0: + return None + + # Score based on beat rate + beat_rate = beats / total_quarters + + if beat_rate == 1.0: + score = 0.8 + elif beat_rate >= 0.75: + score = 0.5 + elif beat_rate >= 0.5: + score = 0.0 + elif beat_rate >= 0.25: + score = -0.5 + else: + score = -0.8 + + # Pattern description + pattern_desc = f"{beats}/{total_quarters} quarters beat expectations" + + if reactions: + avg_reaction = sum(reactions) / len(reactions) + pattern_desc += f", avg reaction {avg_reaction:+.1f}%" + else: + avg_reaction = None + + return HistoricalPatterns( + score=score, + pattern_desc=pattern_desc, + beats_last_4q=beats, + avg_reaction_pct=avg_reaction, + ) + + except Exception: + return None + + +def analyze_market_context(verbose: bool = False) -> MarketContext | None: + """Analyze overall market conditions using VIX, SPY, QQQ, and safe-havens with 1h cache.""" + # Check cache first + cached = _get_cached("market_context") + if cached is not None: + if verbose: + print("Using cached market context (< 1h old)", file=sys.stderr) + return cached + + try: + if verbose: + print("Fetching market indicators (VIX, SPY, QQQ)...", file=sys.stderr) + + # Fetch market indicators + vix = yf.Ticker("^VIX") + spy = yf.Ticker("SPY") + qqq = yf.Ticker("QQQ") + + # Get current VIX level + vix_info = vix.info + vix_level = vix_info.get("regularMarketPrice") or vix_info.get("currentPrice") + + if not vix_level: + return None + + # Determine VIX status + if vix_level < 20: + vix_status = "calm" + vix_score = 0.2 + elif vix_level < 30: + vix_status = "elevated" + vix_score = 0.0 + else: + vix_status = "fear" + vix_score = -0.5 + + # Get SPY and QQQ 10-day trends + spy_hist = spy.history(period="1mo") + qqq_hist = qqq.history(period="1mo") + + if spy_hist.empty or qqq_hist.empty: + return None + + # Calculate 10-day price changes + spy_10d_ago = spy_hist["Close"].iloc[-min(10, len(spy_hist))] + spy_current = spy_hist["Close"].iloc[-1] + spy_trend_10d = ((spy_current - spy_10d_ago) / spy_10d_ago) * 100 + + qqq_10d_ago = qqq_hist["Close"].iloc[-min(10, len(qqq_hist))] + qqq_current = qqq_hist["Close"].iloc[-1] + qqq_trend_10d = ((qqq_current - qqq_10d_ago) / qqq_10d_ago) * 100 + + # Determine market regime + avg_trend = (spy_trend_10d + qqq_trend_10d) / 2 + + if avg_trend > 3: + market_regime = "bull" + regime_score = 0.3 + elif avg_trend < -3: + market_regime = "bear" + regime_score = -0.4 + else: + market_regime = "choppy" + regime_score = -0.1 + + # Calculate overall score + overall_score = (vix_score + regime_score) / 2 + + # NEW v4.0.0: Fetch safe-haven indicators (GLD, TLT, UUP) + gld_change_5d = None + tlt_change_5d = None + uup_change_5d = None + risk_off_detected = False + + try: + if verbose: + print("Fetching safe-haven indicators (GLD, TLT, UUP)...", file=sys.stderr) + + # Fetch safe-haven ETFs + gld = yf.Ticker("GLD") # Gold + tlt = yf.Ticker("TLT") # 20+ Year Treasury + uup = yf.Ticker("UUP") # USD Index + + gld_hist = gld.history(period="10d") + tlt_hist = tlt.history(period="10d") + uup_hist = uup.history(period="10d") + + # Calculate 5-day changes + if not gld_hist.empty and len(gld_hist) >= 5: + gld_5d_ago = gld_hist["Close"].iloc[-min(5, len(gld_hist))] + gld_current = gld_hist["Close"].iloc[-1] + gld_change_5d = ((gld_current - gld_5d_ago) / gld_5d_ago) * 100 + + if not tlt_hist.empty and len(tlt_hist) >= 5: + tlt_5d_ago = tlt_hist["Close"].iloc[-min(5, len(tlt_hist))] + tlt_current = tlt_hist["Close"].iloc[-1] + tlt_change_5d = ((tlt_current - tlt_5d_ago) / tlt_5d_ago) * 100 + + if not uup_hist.empty and len(uup_hist) >= 5: + uup_5d_ago = uup_hist["Close"].iloc[-min(5, len(uup_hist))] + uup_current = uup_hist["Close"].iloc[-1] + uup_change_5d = ((uup_current - uup_5d_ago) / uup_5d_ago) * 100 + + # Risk-off detection: All three safe-havens rising together + if (gld_change_5d is not None and gld_change_5d >= 2.0 and + tlt_change_5d is not None and tlt_change_5d >= 1.0 and + uup_change_5d is not None and uup_change_5d >= 1.0): + risk_off_detected = True + overall_score -= 0.5 # Reduce score significantly + if verbose: + print(f" 🛡️ RISK-OFF DETECTED: GLD {gld_change_5d:+.1f}%, TLT {tlt_change_5d:+.1f}%, UUP {uup_change_5d:+.1f}%", file=sys.stderr) + + except Exception as e: + if verbose: + print(f" Safe-haven indicators unavailable: {e}", file=sys.stderr) + + # Build explanation + explanation = f"VIX {vix_level:.1f} ({vix_status}), Market {market_regime} (SPY {spy_trend_10d:+.1f}%, QQQ {qqq_trend_10d:+.1f}% 10d)" + if risk_off_detected: + explanation += " ⚠️ RISK-OFF MODE" + + result = MarketContext( + vix_level=vix_level, + vix_status=vix_status, + spy_trend_10d=spy_trend_10d, + qqq_trend_10d=qqq_trend_10d, + market_regime=market_regime, + score=overall_score, + explanation=explanation, + gld_change_5d=gld_change_5d, + tlt_change_5d=tlt_change_5d, + uup_change_5d=uup_change_5d, + risk_off_detected=risk_off_detected, + ) + + # Cache the result for 1 hour + _set_cache("market_context", result) + return result + + except Exception as e: + if verbose: + print(f"Error analyzing market context: {e}", file=sys.stderr) + return None + + +def get_sector_etf_ticker(sector: str) -> str | None: + """Map sector name to corresponding sector ETF ticker.""" + sector_map = { + "Financial Services": "XLF", + "Financials": "XLF", + "Technology": "XLK", + "Healthcare": "XLV", + "Consumer Cyclical": "XLY", + "Consumer Defensive": "XLP", + "Utilities": "XLU", + "Basic Materials": "XLB", + "Real Estate": "XLRE", + "Communication Services": "XLC", + "Industrials": "XLI", + "Energy": "XLE", + } + + return sector_map.get(sector) + + +# ============================================================================ +# Breaking News Check (v4.0.0) +# ============================================================================ + +# Crisis keywords by category +CRISIS_KEYWORDS = { + "war": ["war", "invasion", "military strike", "attack", "conflict", "combat"], + "economic": ["recession", "crisis", "collapse", "default", "bankruptcy", "crash"], + "regulatory": ["sanctions", "embargo", "ban", "investigation", "fraud", "probe"], + "disaster": ["earthquake", "hurricane", "pandemic", "outbreak", "disaster", "catastrophe"], + "financial": ["emergency rate", "fed emergency", "bailout", "circuit breaker", "trading halt"], +} + +# Geopolitical event → sector mapping (v4.0.0) +GEOPOLITICAL_RISK_MAP = { + "taiwan": { + "keywords": ["taiwan", "tsmc", "strait"], + "sectors": ["Technology", "Communication Services"], + "sector_etfs": ["XLK", "XLC"], + "impact": "Semiconductor supply chain disruption", + "affected_tickers": ["NVDA", "AMD", "TSM", "INTC", "QCOM", "AVGO", "MU"], + }, + "china": { + "keywords": ["china", "beijing", "tariff", "trade war"], + "sectors": ["Technology", "Consumer Cyclical", "Consumer Defensive"], + "sector_etfs": ["XLK", "XLY", "XLP"], + "impact": "Tech supply chain and consumer market exposure", + "affected_tickers": ["AAPL", "QCOM", "NKE", "SBUX", "MCD", "YUM", "TGT", "WMT"], + }, + "russia_ukraine": { + "keywords": ["russia", "ukraine", "putin", "kyiv", "moscow"], + "sectors": ["Energy", "Materials"], + "sector_etfs": ["XLE", "XLB"], + "impact": "Energy and commodity price volatility", + "affected_tickers": ["XOM", "CVX", "COP", "SLB", "MOS", "CF", "NTR", "ADM"], + }, + "middle_east": { + "keywords": ["iran", "israel", "gaza", "saudi", "middle east", "gulf"], + "sectors": ["Energy", "Industrials"], + "sector_etfs": ["XLE", "XLI"], + "impact": "Oil price volatility and defense spending", + "affected_tickers": ["XOM", "CVX", "COP", "LMT", "RTX", "NOC", "GD", "BA"], + }, + "banking_crisis": { + "keywords": ["bank failure", "credit crisis", "liquidity crisis", "bank run"], + "sectors": ["Financials"], + "sector_etfs": ["XLF"], + "impact": "Financial sector contagion risk", + "affected_tickers": ["JPM", "BAC", "WFC", "C", "GS", "MS", "USB", "PNC"], + }, +} + + +def check_breaking_news(verbose: bool = False) -> list[str] | None: + """ + Check Google News RSS for breaking market/economic crisis events (last 24h). + Returns list of alert strings or None. + Uses 1h cache to avoid excessive API calls. + """ + # Check cache first + cached = _get_cached("breaking_news") + if cached is not None: + return cached + + alerts = [] + + try: + import feedparser + from datetime import datetime, timezone, timedelta + + if verbose: + print("Checking breaking news (Google News RSS)...", file=sys.stderr) + + # Google News RSS feeds for finance/business + rss_urls = [ + "https://news.google.com/rss/search?q=stock+market+when:24h&hl=en-US&gl=US&ceid=US:en", + "https://news.google.com/rss/search?q=economy+crisis+when:24h&hl=en-US&gl=US&ceid=US:en", + ] + + now = datetime.now(timezone.utc) + cutoff_time = now - timedelta(hours=24) + + for url in rss_urls: + try: + feed = feedparser.parse(url) + + for entry in feed.entries[:20]: # Check top 20 headlines + # Parse publication date + pub_date = None + if hasattr(entry, "published_parsed") and entry.published_parsed: + pub_date = datetime(*entry.published_parsed[:6], tzinfo=timezone.utc) + + # Skip if older than 24h + if pub_date and pub_date < cutoff_time: + continue + + title = entry.get("title", "").lower() + summary = entry.get("summary", "").lower() + text = f"{title} {summary}" + + # Check for crisis keywords + for category, keywords in CRISIS_KEYWORDS.items(): + for keyword in keywords: + if keyword in text: + alert_text = entry.get("title", "Unknown alert") + hours_ago = int((now - pub_date).total_seconds() / 3600) if pub_date else None + time_str = f"{hours_ago}h ago" if hours_ago is not None else "recent" + + alert = f"{alert_text} ({time_str})" + if alert not in alerts: # Deduplicate + alerts.append(alert) + if verbose: + print(f" ⚠️ Alert: {alert}", file=sys.stderr) + break + if len(alerts) >= 3: # Limit to 3 alerts + break + + if len(alerts) >= 3: + break + + except Exception as e: + if verbose: + print(f" Failed to fetch {url}: {e}", file=sys.stderr) + continue + + # Cache results (even if empty) for 1 hour + result = alerts if alerts else None + _set_cache("breaking_news", result) + return result + + except Exception as e: + if verbose: + print(f" Breaking news check failed: {e}", file=sys.stderr) + return None + + +def check_sector_geopolitical_risk( + ticker: str, + sector: str | None, + breaking_news: list[str] | None, + verbose: bool = False +) -> tuple[str | None, float]: + """ + Check if ticker is exposed to geopolitical risks based on breaking news. + Returns (warning_message, confidence_penalty). + + Args: + ticker: Stock ticker symbol + sector: Stock sector (from yfinance) + breaking_news: List of breaking news alerts + verbose: Print debug info + + Returns: + (warning_message, confidence_penalty) where: + - warning_message: None or string like "⚠️ SECTOR RISK: Taiwan tensions affect semiconductors" + - confidence_penalty: 0.0 (no risk) to 0.5 (high risk) + """ + if not breaking_news: + return None, 0.0 + + # Combine all breaking news into single text for keyword matching + news_text = " ".join(breaking_news).lower() + + # Check each geopolitical event + for event_name, event_data in GEOPOLITICAL_RISK_MAP.items(): + # Check if any keywords from this event appear in breaking news + keywords_found = [] + for keyword in event_data["keywords"]: + if keyword in news_text: + keywords_found.append(keyword) + + if not keywords_found: + continue + + # Check if ticker is in affected list + if ticker in event_data["affected_tickers"]: + # Direct ticker exposure + warning = f"⚠️ SECTOR RISK: {event_data['impact']} (detected: {', '.join(keywords_found)})" + penalty = 0.3 # Reduce BUY confidence by 30% + + if verbose: + print(f" Geopolitical risk detected: {event_name} affects {ticker}", file=sys.stderr) + + return warning, penalty + + # Check if sector is affected (even if ticker not in list) + if sector and sector in event_data["sectors"]: + # Sector exposure (weaker signal) + warning = f"⚠️ SECTOR RISK: {sector} sector exposed to {event_data['impact']}" + penalty = 0.15 # Reduce BUY confidence by 15% + + if verbose: + print(f" Sector risk detected: {event_name} affects {sector} sector", file=sys.stderr) + + return warning, penalty + + return None, 0.0 + + +def analyze_sector_performance(data: StockData, verbose: bool = False) -> SectorComparison | None: + """Compare stock performance to its sector.""" + try: + sector = data.info.get("sector") + industry = data.info.get("industry") + + if not sector: + return None + + sector_etf_ticker = get_sector_etf_ticker(sector) + + if not sector_etf_ticker: + if verbose: + print(f"No sector ETF mapping for {sector}", file=sys.stderr) + return None + + if verbose: + print(f"Comparing to sector ETF: {sector_etf_ticker}", file=sys.stderr) + + # Fetch sector ETF data + sector_etf = yf.Ticker(sector_etf_ticker) + sector_hist = sector_etf.history(period="3mo") + + if sector_hist.empty or data.price_history is None or data.price_history.empty: + return None + + # Calculate 1-month returns + stock_1m_ago = data.price_history["Close"].iloc[-min(22, len(data.price_history))] + stock_current = data.price_history["Close"].iloc[-1] + stock_return_1m = ((stock_current - stock_1m_ago) / stock_1m_ago) * 100 + + sector_1m_ago = sector_hist["Close"].iloc[-min(22, len(sector_hist))] + sector_current = sector_hist["Close"].iloc[-1] + sector_return_1m = ((sector_current - sector_1m_ago) / sector_1m_ago) * 100 + + # Calculate relative strength + relative_strength = stock_return_1m / sector_return_1m if sector_return_1m != 0 else 1.0 + + # Sector 10-day trend + sector_10d_ago = sector_hist["Close"].iloc[-min(10, len(sector_hist))] + sector_trend_10d = ((sector_current - sector_10d_ago) / sector_10d_ago) * 100 + + if sector_trend_10d > 5: + sector_trend = "strong uptrend" + elif sector_trend_10d > 2: + sector_trend = "uptrend" + elif sector_trend_10d < -5: + sector_trend = "downtrend" + elif sector_trend_10d < -2: + sector_trend = "weak" + else: + sector_trend = "neutral" + + # Calculate score + score = 0.0 + + # Relative performance score + if relative_strength > 1.05: # Outperforming by >5% + score += 0.3 + elif relative_strength < 0.95: # Underperforming by >5% + score -= 0.3 + + # Sector trend score + if sector_trend_10d > 5: + score += 0.2 + elif sector_trend_10d < -5: + score -= 0.2 + + explanation = f"{sector} sector {sector_trend} ({sector_return_1m:+.1f}% 1m), stock {stock_return_1m:+.1f}% vs sector" + + return SectorComparison( + sector_name=sector, + industry_name=industry or "Unknown", + stock_return_1m=stock_return_1m, + sector_return_1m=sector_return_1m, + relative_strength=relative_strength, + sector_trend=sector_trend, + score=score, + explanation=explanation, + ) + + except Exception as e: + if verbose: + print(f"Error analyzing sector performance: {e}", file=sys.stderr) + return None + + +def analyze_earnings_timing(data: StockData) -> EarningsTiming | None: + """Check earnings timing and flag pre/post-earnings periods.""" + try: + from datetime import datetime, timedelta + + if data.earnings_history is None or data.earnings_history.empty: + return None + + current_date = datetime.now() + earnings_dates = data.earnings_history.sort_index(ascending=False) + + # Find next and last earnings dates + next_earnings_date = None + last_earnings_date = None + + for earnings_date in earnings_dates.index: + earnings_dt = pd.Timestamp(earnings_date).to_pydatetime() + + if earnings_dt > current_date and next_earnings_date is None: + next_earnings_date = earnings_dt + elif earnings_dt <= current_date and last_earnings_date is None: + last_earnings_date = earnings_dt + break + + # Calculate days until/since earnings + days_until_earnings = None + days_since_earnings = None + + if next_earnings_date: + days_until_earnings = (next_earnings_date - current_date).days + + if last_earnings_date: + days_since_earnings = (current_date - last_earnings_date).days + + # Determine timing flag + timing_flag = "safe" + confidence_adjustment = 0.0 + caveats = [] + + # Pre-earnings check (< 14 days) + if days_until_earnings is not None and days_until_earnings <= 14: + timing_flag = "pre_earnings" + confidence_adjustment = -0.3 + caveats.append(f"Earnings in {days_until_earnings} days - high volatility expected") + + # Post-earnings check (< 5 days) + price_change_5d = None + if days_since_earnings is not None and days_since_earnings <= 5: + # Calculate 5-day price change + if data.price_history is not None and len(data.price_history) >= 5: + price_5d_ago = data.price_history["Close"].iloc[-5] + price_current = data.price_history["Close"].iloc[-1] + price_change_5d = ((price_current - price_5d_ago) / price_5d_ago) * 100 + + if price_change_5d > 15: + timing_flag = "post_earnings" + confidence_adjustment = -0.2 + caveats.append(f"Up {price_change_5d:.1f}% in 5 days - gains may be priced in") + + return EarningsTiming( + days_until_earnings=days_until_earnings, + days_since_earnings=days_since_earnings, + next_earnings_date=next_earnings_date.strftime("%Y-%m-%d") if next_earnings_date else None, + last_earnings_date=last_earnings_date.strftime("%Y-%m-%d") if last_earnings_date else None, + timing_flag=timing_flag, + price_change_5d=price_change_5d, + confidence_adjustment=confidence_adjustment, + caveats=caveats, + ) + + except Exception: + return None + + +def calculate_rsi(prices: pd.Series, period: int = 14) -> float | None: + """Calculate RSI (Relative Strength Index).""" + try: + if len(prices) < period + 1: + return None + + # Calculate price changes + delta = prices.diff() + + # Separate gains and losses + gains = delta.where(delta > 0, 0) + losses = -delta.where(delta < 0, 0) + + # Calculate average gains and losses + avg_gain = gains.rolling(window=period).mean() + avg_loss = losses.rolling(window=period).mean() + + # Calculate RS + rs = avg_gain / avg_loss + + # Calculate RSI + rsi = 100 - (100 / (1 + rs)) + + return float(rsi.iloc[-1]) + + except Exception: + return None + + +def analyze_momentum(data: StockData) -> MomentumAnalysis | None: + """Analyze momentum indicators (RSI, 52w range, volume, relative strength).""" + try: + if data.price_history is None or data.price_history.empty: + return None + + # Calculate RSI + rsi_14d = calculate_rsi(data.price_history["Close"], period=14) + + if rsi_14d: + if rsi_14d > 70: + rsi_status = "overbought" + elif rsi_14d < 30: + rsi_status = "oversold" + else: + rsi_status = "neutral" + else: + rsi_status = "unknown" + + # Get 52-week high/low + high_52w = data.info.get("fiftyTwoWeekHigh") + low_52w = data.info.get("fiftyTwoWeekLow") + current_price = data.info.get("regularMarketPrice") or data.info.get("currentPrice") + + price_vs_52w_low = None + price_vs_52w_high = None + near_52w_high = False + near_52w_low = False + + if high_52w and low_52w and current_price: + price_range = high_52w - low_52w + if price_range > 0: + price_vs_52w_low = ((current_price - low_52w) / price_range) * 100 + price_vs_52w_high = ((high_52w - current_price) / price_range) * 100 + + near_52w_high = price_vs_52w_low > 90 + near_52w_low = price_vs_52w_low < 10 + + # Volume analysis + volume_ratio = None + if "Volume" in data.price_history.columns and len(data.price_history) >= 60: + recent_vol = data.price_history["Volume"].iloc[-5:].mean() + avg_vol = data.price_history["Volume"].iloc[-60:].mean() + volume_ratio = recent_vol / avg_vol if avg_vol > 0 else None + + # Calculate score + score = 0.0 + explanations = [] + + if rsi_14d: + if rsi_14d > 70: + score -= 0.5 + explanations.append(f"RSI {rsi_14d:.0f} (overbought)") + elif rsi_14d < 30: + score += 0.5 + explanations.append(f"RSI {rsi_14d:.0f} (oversold)") + + if near_52w_high: + score -= 0.3 + explanations.append("Near 52w high") + elif near_52w_low: + score += 0.3 + explanations.append("Near 52w low") + + if volume_ratio and volume_ratio > 1.5: + explanations.append(f"Volume {volume_ratio:.1f}x average") + + explanation = "; ".join(explanations) if explanations else "Momentum indicators neutral" + + return MomentumAnalysis( + rsi_14d=rsi_14d, + rsi_status=rsi_status, + price_vs_52w_low=price_vs_52w_low, + price_vs_52w_high=price_vs_52w_high, + near_52w_high=near_52w_high, + near_52w_low=near_52w_low, + volume_ratio=volume_ratio, + relative_strength_vs_sector=None, # Could be enhanced with sector comparison + score=score, + explanation=explanation, + ) + + except Exception: + return None + + +# ============================================================================ +# Sentiment Analysis Helper Functions +# ============================================================================ + +# Simple cache for shared indicators (Fear & Greed, VIX) +# Format: {key: (value, timestamp)} +_SENTIMENT_CACHE = {} +_CACHE_TTL_SECONDS = 3600 # 1 hour + + +def _get_cached(key: str): + """Get cached value if still valid (within TTL).""" + if key in _SENTIMENT_CACHE: + value, timestamp = _SENTIMENT_CACHE[key] + if time.time() - timestamp < _CACHE_TTL_SECONDS: + return value + return None + + +def _set_cache(key: str, value): + """Set cached value with current timestamp.""" + _SENTIMENT_CACHE[key] = (value, time.time()) + + +async def get_fear_greed_index() -> tuple[float, int | None, str | None] | None: + """ + Fetch CNN Fear & Greed Index (contrarian indicator) with 1h cache. + Returns: (score, value, status) or None on failure. + """ + # Check cache first + cached = _get_cached("fear_greed") + if cached is not None: + return cached + + def _fetch(): + try: + from fear_and_greed import get as get_fear_greed + result = get_fear_greed() + return result + except Exception: + return None + + try: + result = await asyncio.to_thread(_fetch) + if result is None: + return None + + value = result.value # 0-100 + status = result.description # "Extreme Fear", "Fear", etc. + + # Contrarian scoring + if value <= 25: + score = 0.5 # Extreme fear = buy opportunity + elif value <= 45: + score = 0.2 # Fear = mild buy signal + elif value <= 55: + score = 0.0 # Neutral + elif value <= 75: + score = -0.2 # Greed = caution + else: + score = -0.5 # Extreme greed = warning + + result_tuple = (score, value, status) + _set_cache("fear_greed", result_tuple) + return result_tuple + except Exception: + return None + + +async def get_short_interest(data: StockData) -> tuple[float, float | None, float | None] | None: + """ + Analyze short interest (from yfinance). + Returns: (score, short_interest_pct, days_to_cover) or None. + """ + # This is already synchronous data access (no API call), but make it async for consistency + try: + short_pct = data.info.get("shortPercentOfFloat") + if short_pct is None: + return None + + short_pct_float = float(short_pct) * 100 # Convert to percentage + + # Estimate days to cover (simplified - actual calculation needs volume data) + short_ratio = data.info.get("shortRatio") # Days to cover + days_to_cover = float(short_ratio) if short_ratio else None + + # Scoring logic + if short_pct_float > 20: + if days_to_cover and days_to_cover > 10: + score = 0.4 # High short interest + high days to cover = squeeze potential + else: + score = -0.3 # High short interest but justified + elif short_pct_float < 5: + score = 0.2 # Low short interest = bullish sentiment + else: + score = 0.0 # Normal range + + return (score, short_pct_float, days_to_cover) + except Exception: + return None + + +async def get_vix_term_structure() -> tuple[float, str | None, float | None] | None: + """ + Analyze VIX futures term structure (contango vs backwardation) with 1h cache. + Returns: (score, structure, slope) or None. + """ + # Check cache first + cached = _get_cached("vix_structure") + if cached is not None: + return cached + + def _fetch(): + try: + import yfinance as yf + vix = yf.Ticker("^VIX") + vix_data = vix.history(period="5d") + if vix_data.empty: + return None + return vix_data["Close"].iloc[-1] + except Exception: + return None + + try: + vix_spot = await asyncio.to_thread(_fetch) + if vix_spot is None: + return None + + # Simplified: assume normal contango when VIX < 20, backwardation when VIX > 30 + if vix_spot < 15: + structure = "contango" + slope = 10.0 # Steep contango + score = 0.3 # Complacency/bullish + elif vix_spot < 20: + structure = "contango" + slope = 5.0 + score = 0.1 + elif vix_spot > 30: + structure = "backwardation" + slope = -5.0 + score = -0.3 # Stress/bearish + else: + structure = "flat" + slope = 0.0 + score = 0.0 + + result_tuple = (score, structure, slope) + _set_cache("vix_structure", result_tuple) + return result_tuple + except Exception: + return None + + +async def get_insider_activity(ticker: str, period_days: int = 90) -> tuple[float, int | None, float | None] | None: + """ + Analyze insider trading from SEC Form 4 filings using edgartools. + Returns: (score, net_shares, net_value_millions) or None. + + Scoring logic: + - Strong buying (>100K shares or >$1M): +0.8 + - Moderate buying (>10K shares or >$0.1M): +0.4 + - Neutral: 0 + - Moderate selling: -0.4 + - Strong selling: -0.8 + + Note: SEC EDGAR API requires User-Agent with email. + """ + def _fetch(): + try: + from edgar import Company, set_identity + from datetime import datetime, timedelta + + # Set SEC-required identity + set_identity("stock-analysis@clawd.bot") + + # Get company and Form 4 filings + company = Company(ticker) + filings = company.get_filings(form="4") + + if filings is None or len(filings) == 0: + return None + + # Calculate cutoff date + cutoff_date = datetime.now() - timedelta(days=period_days) + + # Aggregate transactions + total_bought_shares = 0 + total_sold_shares = 0 + total_bought_value = 0.0 + total_sold_value = 0.0 + + # Process recent filings (iterate, don't slice due to pyarrow compatibility) + count = 0 + for filing in filings: + if count >= 50: + break + count += 1 + + try: + # Check filing date + filing_date = filing.filing_date + if hasattr(filing_date, 'to_pydatetime'): + filing_date = filing_date.to_pydatetime() + elif isinstance(filing_date, str): + filing_date = datetime.strptime(filing_date, "%Y-%m-%d") + + # Convert date object to datetime for comparison + if hasattr(filing_date, 'year') and not hasattr(filing_date, 'hour'): + filing_date = datetime.combine(filing_date, datetime.min.time()) + + if filing_date < cutoff_date: + continue + + # Get Form 4 object + form4 = filing.obj() + if form4 is None: + continue + + # Process purchases (edgartools returns DataFrames) + if hasattr(form4, 'common_stock_purchases'): + purchases = form4.common_stock_purchases + if isinstance(purchases, pd.DataFrame) and not purchases.empty: + if 'Shares' in purchases.columns: + total_bought_shares += int(purchases['Shares'].sum()) + if 'Price' in purchases.columns and 'Shares' in purchases.columns: + total_bought_value += float((purchases['Shares'] * purchases['Price']).sum()) + + # Process sales + if hasattr(form4, 'common_stock_sales'): + sales = form4.common_stock_sales + if isinstance(sales, pd.DataFrame) and not sales.empty: + if 'Shares' in sales.columns: + total_sold_shares += int(sales['Shares'].sum()) + if 'Price' in sales.columns and 'Shares' in sales.columns: + total_sold_value += float((sales['Shares'] * sales['Price']).sum()) + + except Exception: + continue + + # Calculate net values + net_shares = total_bought_shares - total_sold_shares + net_value = (total_bought_value - total_sold_value) / 1_000_000 # Millions + + # Apply scoring logic + if net_shares > 100_000 or net_value > 1.0: + score = 0.8 # Strong buying + elif net_shares > 10_000 or net_value > 0.1: + score = 0.4 # Moderate buying + elif net_shares < -100_000 or net_value < -1.0: + score = -0.8 # Strong selling + elif net_shares < -10_000 or net_value < -0.1: + score = -0.4 # Moderate selling + else: + score = 0.0 # Neutral + + return (score, net_shares, net_value) + + except ImportError: + # edgartools not installed + return None + except Exception: + return None + + try: + result = await asyncio.to_thread(_fetch) + return result + except Exception: + return None + + +async def get_put_call_ratio(data: StockData) -> tuple[float, float | None, int | None, int | None] | None: + """ + Calculate put/call ratio from options chain (contrarian indicator). + Returns: (score, ratio, put_volume, call_volume) or None. + """ + def _fetch(): + try: + if data.ticker_obj is None: + return None + + # Get options chain for nearest expiration + expirations = data.ticker_obj.options + if not expirations or len(expirations) == 0: + return None + + nearest_exp = expirations[0] + opt_chain = data.ticker_obj.option_chain(nearest_exp) + + # Calculate total put and call volume + put_volume = opt_chain.puts["volume"].sum() if "volume" in opt_chain.puts.columns else 0 + call_volume = opt_chain.calls["volume"].sum() if "volume" in opt_chain.calls.columns else 0 + + if call_volume == 0 or put_volume == 0: + return None + + ratio = put_volume / call_volume + return (ratio, int(put_volume), int(call_volume)) + except Exception: + return None + + try: + result = await asyncio.to_thread(_fetch) + if result is None: + return None + + ratio, put_volume, call_volume = result + + # Contrarian scoring + if ratio > 1.5: + score = 0.3 # Excessive fear = bullish + elif ratio > 1.0: + score = 0.1 # Mild fear + elif ratio > 0.7: + score = -0.1 # Normal + else: + score = -0.3 # Complacency = bearish + + return (score, ratio, put_volume, call_volume) + except Exception: + return None + + +async def analyze_sentiment(data: StockData, verbose: bool = False, skip_insider: bool = False) -> SentimentAnalysis | None: + """ + Analyze market sentiment using 5 sub-indicators in parallel. + Requires at least 2 of 5 indicators for valid sentiment. + Returns overall sentiment score (-1.0 to +1.0) with sub-metrics. + """ + scores = [] + explanations = [] + warnings = [] + + # Initialize all raw data fields + fear_greed_score = None + fear_greed_value = None + fear_greed_status = None + + short_interest_score = None + short_interest_pct = None + days_to_cover = None + + vix_structure_score = None + vix_structure = None + vix_slope = None + + insider_activity_score = None + insider_net_shares = None + insider_net_value = None + + put_call_score = None + put_call_ratio = None + put_volume = None + call_volume = None + + # Fetch all 5 indicators in parallel with 10s timeout per indicator + # (or 4 if skip_insider=True for faster analysis) + try: + tasks = [ + asyncio.wait_for(get_fear_greed_index(), timeout=10), + asyncio.wait_for(get_short_interest(data), timeout=10), + asyncio.wait_for(get_vix_term_structure(), timeout=10), + ] + + if skip_insider: + tasks.append(asyncio.sleep(0)) # Placeholder - returns None + if verbose: + print(" Skipping insider trading analysis (--no-insider)", file=sys.stderr) + else: + tasks.append(asyncio.wait_for(get_insider_activity(data.ticker, period_days=90), timeout=10)) + + tasks.append(asyncio.wait_for(get_put_call_ratio(data), timeout=10)) + + results = await asyncio.gather(*tasks, return_exceptions=True) + + # Process Fear & Greed Index + fear_greed_result = results[0] + if isinstance(fear_greed_result, tuple) and fear_greed_result is not None: + fear_greed_score, fear_greed_value, fear_greed_status = fear_greed_result + scores.append(fear_greed_score) + explanations.append(f"{fear_greed_status} ({fear_greed_value})") + if verbose: + print(f" Fear & Greed: {fear_greed_status} ({fear_greed_value}) → score {fear_greed_score:+.2f}", file=sys.stderr) + elif verbose and isinstance(fear_greed_result, Exception): + print(f" Fear & Greed: Failed ({fear_greed_result})", file=sys.stderr) + + # Process Short Interest + short_interest_result = results[1] + if isinstance(short_interest_result, tuple) and short_interest_result is not None: + short_interest_score, short_interest_pct, days_to_cover = short_interest_result + scores.append(short_interest_score) + if days_to_cover: + explanations.append(f"Short interest {short_interest_pct:.1f}% (days to cover: {days_to_cover:.1f})") + else: + explanations.append(f"Short interest {short_interest_pct:.1f}%") + warnings.append("Short interest data typically ~2 weeks old (FINRA lag)") + if verbose: + print(f" Short Interest: {short_interest_pct:.1f}% → score {short_interest_score:+.2f}", file=sys.stderr) + elif verbose and isinstance(short_interest_result, Exception): + print(f" Short Interest: Failed ({short_interest_result})", file=sys.stderr) + + # Process VIX Term Structure + vix_result = results[2] + if isinstance(vix_result, tuple) and vix_result is not None: + vix_structure_score, vix_structure, vix_slope = vix_result + scores.append(vix_structure_score) + explanations.append(f"VIX {vix_structure}") + if verbose: + print(f" VIX Structure: {vix_structure} (slope {vix_slope:.1f}%) → score {vix_structure_score:+.2f}", file=sys.stderr) + elif verbose and isinstance(vix_result, Exception): + print(f" VIX Structure: Failed ({vix_result})", file=sys.stderr) + + # Process Insider Activity + insider_result = results[3] + if isinstance(insider_result, tuple) and insider_result is not None: + insider_activity_score, insider_net_shares, insider_net_value = insider_result + scores.append(insider_activity_score) + if insider_net_value: + explanations.append(f"Insider net: ${insider_net_value:.1f}M") + warnings.append("Insider trades may lag filing by 2-3 days") + if verbose: + print(f" Insider Activity: Net ${insider_net_value:.1f}M → score {insider_activity_score:+.2f}", file=sys.stderr) + elif verbose and isinstance(insider_result, Exception): + print(f" Insider Activity: Failed ({insider_result})", file=sys.stderr) + + # Process Put/Call Ratio + put_call_result = results[4] + if isinstance(put_call_result, tuple) and put_call_result is not None: + put_call_score, put_call_ratio, put_volume, call_volume = put_call_result + scores.append(put_call_score) + explanations.append(f"Put/call ratio {put_call_ratio:.2f}") + if verbose: + print(f" Put/Call Ratio: {put_call_ratio:.2f} → score {put_call_score:+.2f}", file=sys.stderr) + elif verbose and isinstance(put_call_result, Exception): + print(f" Put/Call Ratio: Failed ({put_call_result})", file=sys.stderr) + + except Exception as e: + if verbose: + print(f" Sentiment analysis error: {e}", file=sys.stderr) + return None + + # Require at least 2 of 5 indicators for valid sentiment + indicators_available = len(scores) + if indicators_available < 2: + if verbose: + print(f" Sentiment: Insufficient data ({indicators_available}/5 indicators)", file=sys.stderr) + return None + + # Calculate overall score as simple average + overall_score = sum(scores) / len(scores) + explanation = "; ".join(explanations) + + return SentimentAnalysis( + score=overall_score, + explanation=explanation, + fear_greed_score=fear_greed_score, + short_interest_score=short_interest_score, + vix_structure_score=vix_structure_score, + insider_activity_score=insider_activity_score, + put_call_score=put_call_score, + fear_greed_value=fear_greed_value, + fear_greed_status=fear_greed_status, + short_interest_pct=short_interest_pct, + days_to_cover=days_to_cover, + vix_structure=vix_structure, + vix_slope=vix_slope, + insider_net_shares=insider_net_shares, + insider_net_value=insider_net_value, + put_call_ratio=put_call_ratio, + put_volume=put_volume, + call_volume=call_volume, + indicators_available=indicators_available, + data_freshness_warnings=warnings if warnings else None, + ) + + +def synthesize_signal( + ticker: str, + company_name: str, + earnings: EarningsSurprise | None, + fundamentals: Fundamentals | None, + analysts: AnalystSentiment | None, + historical: HistoricalPatterns | None, + market_context: MarketContext | None, + sector: SectorComparison | None, + earnings_timing: EarningsTiming | None, + momentum: MomentumAnalysis | None, + sentiment: SentimentAnalysis | None, + breaking_news: list[str] | None = None, # NEW v4.0.0 + geopolitical_risk_warning: str | None = None, # NEW v4.0.0 + geopolitical_risk_penalty: float = 0.0, # NEW v4.0.0 +) -> Signal: + """Synthesize all components into a final signal.""" + + # Collect available components with weights + components = [] + weights = [] + + if earnings: + components.append(("earnings", earnings.score)) + weights.append(0.30) # reduced from 0.35 + + if fundamentals: + components.append(("fundamentals", fundamentals.score)) + weights.append(0.20) # reduced from 0.25 + + if analysts and analysts.score is not None: + components.append(("analysts", analysts.score)) + weights.append(0.20) # reduced from 0.25 + + if historical: + components.append(("historical", historical.score)) + weights.append(0.10) # reduced from 0.15 + + # NEW COMPONENTS + if market_context: + components.append(("market", market_context.score)) + weights.append(0.10) + + if sector: + components.append(("sector", sector.score)) + weights.append(0.15) + + if momentum: + components.append(("momentum", momentum.score)) + weights.append(0.15) + + if sentiment: + components.append(("sentiment", sentiment.score)) + weights.append(0.10) + + # Require at least 2 components + if len(components) < 2: + return Signal( + ticker=ticker, + company_name=company_name, + recommendation="HOLD", + confidence=0.0, + final_score=0.0, + supporting_points=["Insufficient data for analysis"], + caveats=["Limited data available"], + timestamp=datetime.now().isoformat(), + components={}, + ) + + # Normalize weights + total_weight = sum(weights) + normalized_weights = [w / total_weight for w in weights] + + # Calculate weighted score + final_score = sum(score * weight for (_, score), weight in zip(components, normalized_weights)) + + # Determine recommendation + if final_score > 0.33: + recommendation = "BUY" + elif final_score < -0.33: + recommendation = "SELL" + else: + recommendation = "HOLD" + + confidence = abs(final_score) + + # Apply earnings timing adjustments and overrides + if earnings_timing: + confidence *= (1.0 + earnings_timing.confidence_adjustment) + + # Override recommendation if needed + if earnings_timing.timing_flag == "pre_earnings": + if recommendation == "BUY": + recommendation = "HOLD" + + elif earnings_timing.timing_flag == "post_earnings": + if earnings_timing.price_change_5d and earnings_timing.price_change_5d > 15: + if recommendation == "BUY": + recommendation = "HOLD" + + # Check overbought + near 52w high + if momentum and momentum.rsi_14d and momentum.rsi_14d > 70 and momentum.near_52w_high: + if recommendation == "BUY": + recommendation = "HOLD" + confidence *= 0.7 + + # NEW v4.0.0: Risk-off confidence penalty + if market_context and market_context.risk_off_detected: + if recommendation == "BUY": + confidence *= 0.7 # Reduce BUY confidence by 30% + + # NEW v4.0.0: Geopolitical sector risk penalty + if geopolitical_risk_penalty > 0: + if recommendation == "BUY": + confidence *= (1.0 - geopolitical_risk_penalty) # Apply penalty + + # Generate supporting points + supporting_points = [] + + if earnings and earnings.actual_eps is not None: + supporting_points.append( + f"{earnings.explanation} - EPS ${earnings.actual_eps:.2f} vs ${earnings.expected_eps:.2f} expected" + ) + + if fundamentals and fundamentals.explanation: + supporting_points.append(fundamentals.explanation) + + if analysts and analysts.summary: + supporting_points.append(f"Analyst consensus: {analysts.summary}") + + if historical and historical.pattern_desc: + supporting_points.append(f"Historical pattern: {historical.pattern_desc}") + + if market_context and market_context.explanation: + supporting_points.append(f"Market: {market_context.explanation}") + + if sector and sector.explanation: + supporting_points.append(f"Sector: {sector.explanation}") + + if momentum and momentum.explanation: + supporting_points.append(f"Momentum: {momentum.explanation}") + + if sentiment and sentiment.explanation: + supporting_points.append(f"Sentiment: {sentiment.explanation}") + + # Generate caveats + caveats = [] + + # Add earnings timing caveats first (most important) + if earnings_timing and earnings_timing.caveats: + caveats.extend(earnings_timing.caveats) + + # Add sentiment warnings + if sentiment and sentiment.data_freshness_warnings: + caveats.extend(sentiment.data_freshness_warnings) + + # Add momentum warnings + if momentum and momentum.rsi_14d: + if momentum.rsi_14d > 70 and momentum.near_52w_high: + caveats.append("Overbought conditions - high risk entry") + + # Add sector warnings + if sector and sector.score < -0.2: + caveats.append(f"Sector {sector.sector_name} is weak despite stock fundamentals") + + # Add market warnings + if market_context and market_context.vix_status == "fear": + caveats.append(f"High market volatility (VIX {market_context.vix_level:.0f})") + + # NEW v4.0.0: Risk-off warnings + if market_context and market_context.risk_off_detected: + caveats.append(f"🛡️ RISK-OFF MODE: Flight to safety detected (GLD {market_context.gld_change_5d:+.1f}%, TLT {market_context.tlt_change_5d:+.1f}%, UUP {market_context.uup_change_5d:+.1f}%)") + + # NEW v4.0.0: Breaking news alerts + if breaking_news: + for alert in breaking_news[:2]: # Limit to 2 alerts to avoid overwhelming + caveats.append(f"⚠️ BREAKING NEWS: {alert}") + + # NEW v4.0.0: Geopolitical sector risk warnings + if geopolitical_risk_warning: + caveats.append(geopolitical_risk_warning) + + # Original caveats + if not analysts or analysts.score is None: + caveats.append("Limited or no analyst coverage") + + if not earnings: + caveats.append("No recent earnings data available") + + if len(components) < 4: + caveats.append("Analysis based on limited data components") + + if not caveats: + caveats.append("Market conditions can change rapidly") + + # Limit to 5 caveats + caveats = caveats[:5] + + # Build components dict for output + components_dict = {} + if earnings: + components_dict["earnings_surprise"] = { + "score": earnings.score, + "actual_eps": earnings.actual_eps, + "expected_eps": earnings.expected_eps, + "surprise_pct": earnings.surprise_pct, + "explanation": earnings.explanation, + } + + if fundamentals: + components_dict["fundamentals"] = { + "score": fundamentals.score, + **fundamentals.key_metrics, + } + + if analysts: + components_dict["analyst_sentiment"] = { + "score": analysts.score, + "consensus_rating": analysts.consensus_rating, + "price_target": analysts.price_target, + "current_price": analysts.current_price, + "upside_pct": analysts.upside_pct, + "num_analysts": analysts.num_analysts, + } + + if historical: + components_dict["historical_patterns"] = { + "score": historical.score, + "beats_last_4q": historical.beats_last_4q, + "avg_reaction_pct": historical.avg_reaction_pct, + } + + if market_context: + components_dict["market_context"] = { + "score": market_context.score, + "vix_level": market_context.vix_level, + "vix_status": market_context.vix_status, + "spy_trend_10d": market_context.spy_trend_10d, + "qqq_trend_10d": market_context.qqq_trend_10d, + "market_regime": market_context.market_regime, + "gld_change_5d": market_context.gld_change_5d, + "tlt_change_5d": market_context.tlt_change_5d, + "uup_change_5d": market_context.uup_change_5d, + "risk_off_detected": market_context.risk_off_detected, + } + + if sector: + components_dict["sector_performance"] = { + "score": sector.score, + "sector_name": sector.sector_name, + "stock_return_1m": sector.stock_return_1m, + "sector_return_1m": sector.sector_return_1m, + "relative_strength": sector.relative_strength, + "sector_trend": sector.sector_trend, + } + + if earnings_timing: + components_dict["earnings_timing"] = { + "days_until_earnings": earnings_timing.days_until_earnings, + "days_since_earnings": earnings_timing.days_since_earnings, + "timing_flag": earnings_timing.timing_flag, + "price_change_5d": earnings_timing.price_change_5d, + "confidence_adjustment": earnings_timing.confidence_adjustment, + } + + if momentum: + components_dict["momentum"] = { + "score": momentum.score, + "rsi_14d": momentum.rsi_14d, + "rsi_status": momentum.rsi_status, + "near_52w_high": momentum.near_52w_high, + "near_52w_low": momentum.near_52w_low, + "volume_ratio": momentum.volume_ratio, + } + + if sentiment: + components_dict["sentiment_analysis"] = { + "score": sentiment.score, + "indicators_available": sentiment.indicators_available, + "fear_greed_value": sentiment.fear_greed_value, + "fear_greed_status": sentiment.fear_greed_status, + "short_interest_pct": sentiment.short_interest_pct, + "days_to_cover": sentiment.days_to_cover, + "vix_structure": sentiment.vix_structure, + "vix_slope": sentiment.vix_slope, + "insider_net_value": sentiment.insider_net_value, + "put_call_ratio": sentiment.put_call_ratio, + "data_freshness_warnings": sentiment.data_freshness_warnings, + } + + return Signal( + ticker=ticker, + company_name=company_name, + recommendation=recommendation, + confidence=confidence, + final_score=final_score, + supporting_points=supporting_points[:5], # Limit to 5 + caveats=caveats, # Already limited to 5 earlier + timestamp=datetime.now().isoformat(), + components=components_dict, + ) + + +def format_output_text(signal: Signal) -> str: + """Format signal as text output.""" + lines = [ + "=" * 77, + f"STOCK ANALYSIS: {signal.ticker} ({signal.company_name})", + f"Generated: {signal.timestamp}", + "=" * 77, + "", + f"RECOMMENDATION: {signal.recommendation} (Confidence: {signal.confidence*100:.0f}%)", + "", + "SUPPORTING POINTS:", + ] + + for point in signal.supporting_points: + lines.append(f"• {point}") + + lines.extend([ + "", + "CAVEATS:", + ]) + + for caveat in signal.caveats: + lines.append(f"• {caveat}") + + lines.extend([ + "", + "=" * 77, + "DISCLAIMER: This analysis is for informational purposes only and does NOT", + "constitute financial advice. Consult a licensed financial advisor before", + "making investment decisions. Data provided by Yahoo Finance.", + "=" * 77, + ]) + + return "\n".join(lines) + + +def format_output_json(signal: Signal) -> str: + """Format signal as JSON output.""" + output = { + **asdict(signal), + "disclaimer": "NOT FINANCIAL ADVICE. For informational purposes only.", + } + return json.dumps(output, indent=2) + + +def main(): + parser = argparse.ArgumentParser( + description="Analyze stocks using Yahoo Finance data" + ) + parser.add_argument( + "tickers", + nargs="*", + help="Stock/crypto ticker(s) to analyze" + ) + parser.add_argument( + "--output", + choices=["text", "json"], + default="text", + help="Output format (default: text)" + ) + parser.add_argument( + "--verbose", + action="store_true", + help="Verbose output to stderr" + ) + parser.add_argument( + "--portfolio", "-p", + type=str, + help="Analyze all assets in a portfolio" + ) + parser.add_argument( + "--period", + choices=["daily", "weekly", "monthly", "quarterly", "yearly"], + help="Period for portfolio performance analysis" + ) + parser.add_argument( + "--no-insider", + action="store_true", + help="Skip insider trading analysis (faster, SEC EDGAR is slow)" + ) + parser.add_argument( + "--fast", + action="store_true", + help="Fast mode: skip slow analyses (insider, breaking news)" + ) + + args = parser.parse_args() + + # Fast mode shortcuts + if args.fast: + args.no_insider = True + + # Handle portfolio mode + portfolio_assets = [] + portfolio_name = None + if args.portfolio: + try: + from portfolio import PortfolioStore + store = PortfolioStore() + portfolio = store.get_portfolio(args.portfolio) + if not portfolio: + # Try to find default portfolio if name not found + default_name = store.get_default_portfolio_name() + if default_name and args.portfolio.lower() == "default": + portfolio = store.get_portfolio(default_name) + portfolio_name = default_name + else: + print(f"Error: Portfolio '{args.portfolio}' not found", file=sys.stderr) + sys.exit(1) + else: + portfolio_name = portfolio.name + + if not portfolio.assets: + print(f"Portfolio '{portfolio_name}' has no assets", file=sys.stderr) + sys.exit(1) + + portfolio_assets = [(a.ticker, a.quantity, a.cost_basis, a.type) for a in portfolio.assets] + args.tickers = [a.ticker for a in portfolio.assets] + + if args.verbose: + print(f"Analyzing portfolio: {portfolio_name} ({len(portfolio_assets)} assets)", file=sys.stderr) + + except ImportError: + print("Error: portfolio.py not found", file=sys.stderr) + sys.exit(1) + except Exception as e: + print(f"Error loading portfolio: {e}", file=sys.stderr) + sys.exit(1) + + if not args.tickers: + parser.print_help() + sys.exit(1) + + # NEW v4.0.0: Check for breaking news (market-wide, check once before analyzing tickers) + # Check breaking news (skip in fast mode) + breaking_news = None + if not args.fast: + if args.verbose: + print(f"Checking breaking news (last 24h)...", file=sys.stderr) + breaking_news = check_breaking_news(verbose=args.verbose) + elif args.verbose: + print(f"Skipping breaking news check (--fast mode)", file=sys.stderr) + if breaking_news and args.verbose: + print(f" Found {len(breaking_news)} breaking news alert(s)\n", file=sys.stderr) + + results = [] + + for ticker in args.tickers: + ticker = ticker.upper() + + if args.verbose: + print(f"\n=== Analyzing {ticker} ===\n", file=sys.stderr) + + # Fetch data + data = fetch_stock_data(ticker, verbose=args.verbose) + + if data is None: + print(f"Error: Invalid ticker '{ticker}' or data unavailable", file=sys.stderr) + sys.exit(2) + + # Get company name + company_name = data.info.get("longName") or data.info.get("shortName") or ticker + + # Detect asset type (crypto vs stock) + is_crypto = data.asset_type == "crypto" + + if args.verbose and is_crypto: + print(f" Asset type: CRYPTO (using crypto-specific analysis)", file=sys.stderr) + + # Analyze components (different for crypto vs stock) + if is_crypto: + # Crypto: Skip stock-specific analyses + earnings = None + fundamentals = None + analysts = None + historical = None + earnings_timing = None + sector = None + + # Crypto fundamentals (market cap, category, BTC correlation) + if args.verbose: + print(f"Analyzing crypto fundamentals...", file=sys.stderr) + crypto_fundamentals = analyze_crypto_fundamentals(data, verbose=args.verbose) + + # Convert crypto fundamentals to regular Fundamentals for synthesize_signal + if crypto_fundamentals: + fundamentals = Fundamentals( + score=crypto_fundamentals.score, + key_metrics={ + "market_cap": crypto_fundamentals.market_cap, + "market_cap_rank": crypto_fundamentals.market_cap_rank, + "category": crypto_fundamentals.category, + "btc_correlation": crypto_fundamentals.btc_correlation, + }, + explanation=crypto_fundamentals.explanation, + ) + else: + # Stock: Full analysis + earnings = analyze_earnings_surprise(data) + fundamentals = analyze_fundamentals(data) + analysts = analyze_analyst_sentiment(data) + historical = analyze_historical_patterns(data) + + # Analyze earnings timing (stocks only) + if args.verbose: + print(f"Checking earnings timing...", file=sys.stderr) + earnings_timing = analyze_earnings_timing(data) + + # Analyze sector performance (stocks only) + if args.verbose: + print(f"Analyzing sector performance...", file=sys.stderr) + sector = analyze_sector_performance(data, verbose=args.verbose) + + # Market context (both crypto and stock) + if args.verbose: + print(f"Analyzing market context...", file=sys.stderr) + market_context = analyze_market_context(verbose=args.verbose) + + # Momentum (both crypto and stock) + if args.verbose: + print(f"Analyzing momentum...", file=sys.stderr) + momentum = analyze_momentum(data) + + # Sentiment (stocks get full sentiment, crypto gets limited) + if args.verbose: + print(f"Analyzing market sentiment...", file=sys.stderr) + if is_crypto: + # Skip insider trading and put/call for crypto + sentiment = None + else: + sentiment = asyncio.run(analyze_sentiment(data, verbose=args.verbose, skip_insider=args.no_insider)) + + # Geopolitical risks (stocks only) + if is_crypto: + geopolitical_risk_warning = None + geopolitical_risk_penalty = 0.0 + else: + sector_name = data.info.get("sector") + geopolitical_risk_warning, geopolitical_risk_penalty = check_sector_geopolitical_risk( + ticker=ticker, + sector=sector_name, + breaking_news=breaking_news, + verbose=args.verbose + ) + + if args.verbose: + print(f"Components analyzed:", file=sys.stderr) + if is_crypto: + print(f" Crypto Fundamentals: {'✓' if fundamentals else '✗'}", file=sys.stderr) + print(f" Market Context: {'✓' if market_context else '✗'}", file=sys.stderr) + print(f" Momentum: {'✓' if momentum else '✗'}", file=sys.stderr) + print(f" (Earnings, Sector, Sentiment: N/A for crypto)\n", file=sys.stderr) + else: + print(f" Earnings: {'✓' if earnings else '✗'}", file=sys.stderr) + print(f" Fundamentals: {'✓' if fundamentals else '✗'}", file=sys.stderr) + print(f" Analysts: {'✓' if analysts and analysts.score else '✗'}", file=sys.stderr) + print(f" Historical: {'✓' if historical else '✗'}", file=sys.stderr) + print(f" Market Context: {'✓' if market_context else '✗'}", file=sys.stderr) + print(f" Sector: {'✓' if sector else '✗'}", file=sys.stderr) + print(f" Earnings Timing: {'✓' if earnings_timing else '✗'}", file=sys.stderr) + print(f" Momentum: {'✓' if momentum else '✗'}", file=sys.stderr) + print(f" Sentiment: {'✓' if sentiment else '✗'}\n", file=sys.stderr) + + # Synthesize signal + signal = synthesize_signal( + ticker=ticker, + company_name=company_name, + earnings=earnings, + fundamentals=fundamentals, + analysts=analysts, + historical=historical, + market_context=market_context, # NEW + sector=sector, # NEW + earnings_timing=earnings_timing, # NEW + momentum=momentum, # NEW + sentiment=sentiment, # NEW + breaking_news=breaking_news, # NEW v4.0.0 + geopolitical_risk_warning=geopolitical_risk_warning, # NEW v4.0.0 + geopolitical_risk_penalty=geopolitical_risk_penalty, # NEW v4.0.0 + ) + + results.append(signal) + + # Output results + if args.output == "json": + if len(results) == 1: + print(format_output_json(results[0])) + else: + output_data = [asdict(r) for r in results] + # Add portfolio summary if in portfolio mode + if portfolio_assets: + portfolio_summary = generate_portfolio_summary( + results, portfolio_assets, portfolio_name, args.period + ) + output_data = { + "portfolio": portfolio_name, + "assets": output_data, + "summary": portfolio_summary, + } + print(json.dumps(output_data, indent=2)) + else: + for i, signal in enumerate(results): + if i > 0: + print("\n") + print(format_output_text(signal)) + + # Print portfolio summary if in portfolio mode + if portfolio_assets: + print_portfolio_summary(results, portfolio_assets, portfolio_name, args.period) + + +def generate_portfolio_summary( + results: list, + portfolio_assets: list[tuple[str, float, float, str]], + portfolio_name: str, + period: str | None = None, +) -> dict: + """Generate portfolio summary data.""" + # Map results by ticker + result_map = {r.ticker: r for r in results} + + # Calculate portfolio metrics + total_cost = 0.0 + total_value = 0.0 + asset_values = [] + + for ticker, quantity, cost_basis, asset_type in portfolio_assets: + cost_total = quantity * cost_basis + total_cost += cost_total + + # Get current price from yfinance + try: + stock = yf.Ticker(ticker) + current_price = stock.info.get("regularMarketPrice", 0) or 0 + current_value = quantity * current_price + total_value += current_value + asset_values.append((ticker, current_value, cost_total, asset_type)) + except Exception: + asset_values.append((ticker, 0, cost_total, asset_type)) + + # Calculate period returns if requested + period_return = None + if period and total_value > 0: + period_days = { + "daily": 1, + "weekly": 7, + "monthly": 30, + "quarterly": 90, + "yearly": 365, + }.get(period, 30) + + period_return = calculate_portfolio_period_return(portfolio_assets, period_days) + + # Concentration analysis + concentrations = [] + if total_value > 0: + for ticker, value, _, asset_type in asset_values: + if value > 0: + pct = value / total_value * 100 + if pct > 30: + concentrations.append(f"{ticker}: {pct:.1f}%") + + # Build summary + total_pnl = total_value - total_cost + total_pnl_pct = (total_pnl / total_cost * 100) if total_cost > 0 else 0 + + summary = { + "portfolio_name": portfolio_name, + "total_cost": total_cost, + "total_value": total_value, + "total_pnl": total_pnl, + "total_pnl_pct": total_pnl_pct, + "asset_count": len(portfolio_assets), + "concentration_warnings": concentrations if concentrations else None, + } + + if period_return is not None: + summary["period"] = period + summary["period_return_pct"] = period_return + + return summary + + +def calculate_portfolio_period_return( + portfolio_assets: list[tuple[str, float, float, str]], + period_days: int, +) -> float | None: + """Calculate portfolio return over a period using historical prices.""" + try: + total_start_value = 0.0 + total_current_value = 0.0 + + for ticker, quantity, _, _ in portfolio_assets: + stock = yf.Ticker(ticker) + hist = stock.history(period=f"{period_days + 5}d") + + if hist.empty or len(hist) < 2: + continue + + # Get price at period start and now + current_price = hist["Close"].iloc[-1] + start_price = hist["Close"].iloc[0] + + total_current_value += quantity * current_price + total_start_value += quantity * start_price + + if total_start_value > 0: + return (total_current_value - total_start_value) / total_start_value * 100 + + except Exception: + pass + + return None + + +def print_portfolio_summary( + results: list, + portfolio_assets: list[tuple[str, float, float, str]], + portfolio_name: str, + period: str | None = None, +) -> None: + """Print portfolio summary in text format.""" + summary = generate_portfolio_summary(results, portfolio_assets, portfolio_name, period) + + print("\n" + "=" * 77) + print(f"PORTFOLIO SUMMARY: {portfolio_name}") + print("=" * 77) + + # Value overview + total_cost = summary["total_cost"] + total_value = summary["total_value"] + total_pnl = summary["total_pnl"] + total_pnl_pct = summary["total_pnl_pct"] + + print(f"\nTotal Cost: ${total_cost:,.2f}") + print(f"Current Value: ${total_value:,.2f}") + pnl_sign = "+" if total_pnl >= 0 else "" + print(f"Total P&L: {pnl_sign}${total_pnl:,.2f} ({pnl_sign}{total_pnl_pct:.1f}%)") + + # Period return + if "period_return_pct" in summary: + period_return = summary["period_return_pct"] + period_sign = "+" if period_return >= 0 else "" + print(f"{summary['period'].capitalize()} Return: {period_sign}{period_return:.1f}%") + + # Concentration warnings + if summary.get("concentration_warnings"): + print("\n⚠️ CONCENTRATION WARNINGS:") + for warning in summary["concentration_warnings"]: + print(f" • {warning} (>30% of portfolio)") + + # Recommendation summary + recommendations = {"BUY": 0, "HOLD": 0, "SELL": 0} + for r in results: + recommendations[r.recommendation] = recommendations.get(r.recommendation, 0) + 1 + + print(f"\nRECOMMENDATIONS: {recommendations['BUY']} BUY | {recommendations['HOLD']} HOLD | {recommendations['SELL']} SELL") + print("=" * 77) + + +if __name__ == "__main__": + main() diff --git a/scripts/dividends.py b/scripts/dividends.py new file mode 100644 index 0000000..901f34f --- /dev/null +++ b/scripts/dividends.py @@ -0,0 +1,365 @@ +#!/usr/bin/env python3 +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "yfinance>=0.2.40", +# "pandas>=2.0.0", +# ] +# /// +""" +Dividend Analysis Module. + +Analyzes dividend metrics for income investors: +- Dividend Yield +- Payout Ratio +- Dividend Growth Rate (5Y CAGR) +- Dividend Safety Score +- Ex-Dividend Date + +Usage: + uv run dividends.py AAPL + uv run dividends.py JNJ PG KO --output json +""" + +import argparse +import json +import sys +from dataclasses import dataclass, asdict +from datetime import datetime + +import pandas as pd +import yfinance as yf + + +@dataclass +class DividendAnalysis: + ticker: str + company_name: str + + # Basic metrics + dividend_yield: float | None # Annual yield % + annual_dividend: float | None # Annual dividend per share + current_price: float | None + + # Payout analysis + payout_ratio: float | None # Dividend / EPS + payout_status: str # "safe", "moderate", "high", "unsustainable" + + # Growth + dividend_growth_5y: float | None # 5-year CAGR % + consecutive_years: int | None # Years of consecutive increases + dividend_history: list[dict] | None # Last 5 years + + # Timing + ex_dividend_date: str | None + payment_frequency: str | None # "quarterly", "monthly", "annual" + + # Safety score (0-100) + safety_score: int + safety_factors: list[str] + + # Verdict + income_rating: str # "excellent", "good", "moderate", "poor", "no_dividend" + summary: str + + +def analyze_dividends(ticker: str, verbose: bool = False) -> DividendAnalysis | None: + """Analyze dividend metrics for a stock.""" + try: + stock = yf.Ticker(ticker) + info = stock.info + + company_name = info.get("longName") or info.get("shortName") or ticker + current_price = info.get("regularMarketPrice") or info.get("currentPrice") + + # Basic dividend info + dividend_yield = info.get("dividendYield") + if dividend_yield: + dividend_yield = dividend_yield * 100 # Convert to percentage + + annual_dividend = info.get("dividendRate") + + # No dividend + if not annual_dividend or annual_dividend == 0: + return DividendAnalysis( + ticker=ticker, + company_name=company_name, + dividend_yield=None, + annual_dividend=None, + current_price=current_price, + payout_ratio=None, + payout_status="no_dividend", + dividend_growth_5y=None, + consecutive_years=None, + dividend_history=None, + ex_dividend_date=None, + payment_frequency=None, + safety_score=0, + safety_factors=["No dividend paid"], + income_rating="no_dividend", + summary=f"{ticker} does not pay a dividend.", + ) + + # Payout ratio + trailing_eps = info.get("trailingEps") + payout_ratio = None + payout_status = "unknown" + + if trailing_eps and trailing_eps > 0 and annual_dividend: + payout_ratio = (annual_dividend / trailing_eps) * 100 + + if payout_ratio < 40: + payout_status = "safe" + elif payout_ratio < 60: + payout_status = "moderate" + elif payout_ratio < 80: + payout_status = "high" + else: + payout_status = "unsustainable" + + # Dividend history (for growth calculation) + dividends = stock.dividends + dividend_history = None + dividend_growth_5y = None + consecutive_years = None + + if dividends is not None and len(dividends) > 0: + # Group by year + dividends_df = dividends.reset_index() + dividends_df["Year"] = pd.to_datetime(dividends_df["Date"]).dt.year + yearly = dividends_df.groupby("Year")["Dividends"].sum().sort_index(ascending=False) + + # Last 5 years history + dividend_history = [] + for year in yearly.head(5).index: + dividend_history.append({ + "year": int(year), + "total": round(float(yearly[year]), 4), + }) + + # Calculate 5-year CAGR + if len(yearly) >= 5: + current_div = yearly.iloc[0] + div_5y_ago = yearly.iloc[4] + + if div_5y_ago > 0 and current_div > 0: + dividend_growth_5y = ((current_div / div_5y_ago) ** (1/5) - 1) * 100 + + # Count consecutive years of increases + consecutive_years = 0 + prev_div = None + for div in yearly.values: + if prev_div is not None: + if div >= prev_div: + consecutive_years += 1 + else: + break + prev_div = div + + # Ex-dividend date + ex_dividend_date = info.get("exDividendDate") + if ex_dividend_date: + ex_dividend_date = datetime.fromtimestamp(ex_dividend_date).strftime("%Y-%m-%d") + + # Payment frequency + payment_frequency = None + if dividends is not None and len(dividends) >= 4: + # Count dividends in last year + one_year_ago = pd.Timestamp.now() - pd.DateOffset(years=1) + recent_divs = dividends[dividends.index > one_year_ago] + count = len(recent_divs) + + if count >= 10: + payment_frequency = "monthly" + elif count >= 3: + payment_frequency = "quarterly" + elif count >= 1: + payment_frequency = "annual" + + # Safety score calculation (0-100) + safety_score = 50 # Base score + safety_factors = [] + + # Payout ratio factor (+/- 20) + if payout_ratio: + if payout_ratio < 40: + safety_score += 20 + safety_factors.append(f"Low payout ratio ({payout_ratio:.0f}%)") + elif payout_ratio < 60: + safety_score += 10 + safety_factors.append(f"Moderate payout ratio ({payout_ratio:.0f}%)") + elif payout_ratio < 80: + safety_score -= 10 + safety_factors.append(f"High payout ratio ({payout_ratio:.0f}%)") + else: + safety_score -= 20 + safety_factors.append(f"Unsustainable payout ratio ({payout_ratio:.0f}%)") + + # Growth factor (+/- 15) + if dividend_growth_5y: + if dividend_growth_5y > 10: + safety_score += 15 + safety_factors.append(f"Strong dividend growth ({dividend_growth_5y:.1f}% CAGR)") + elif dividend_growth_5y > 5: + safety_score += 10 + safety_factors.append(f"Good dividend growth ({dividend_growth_5y:.1f}% CAGR)") + elif dividend_growth_5y > 0: + safety_score += 5 + safety_factors.append(f"Positive dividend growth ({dividend_growth_5y:.1f}% CAGR)") + else: + safety_score -= 15 + safety_factors.append(f"Dividend declining ({dividend_growth_5y:.1f}% CAGR)") + + # Consecutive years factor (+/- 15) + if consecutive_years: + if consecutive_years >= 25: + safety_score += 15 + safety_factors.append(f"Dividend Aristocrat ({consecutive_years}+ years)") + elif consecutive_years >= 10: + safety_score += 10 + safety_factors.append(f"Long dividend history ({consecutive_years} years)") + elif consecutive_years >= 5: + safety_score += 5 + safety_factors.append(f"Consistent dividend ({consecutive_years} years)") + + # Yield factor (high yield can be risky) + if dividend_yield: + if dividend_yield > 8: + safety_score -= 10 + safety_factors.append(f"Very high yield ({dividend_yield:.1f}%) - verify sustainability") + elif dividend_yield < 1: + safety_factors.append(f"Low yield ({dividend_yield:.2f}%)") + + # Clamp score + safety_score = max(0, min(100, safety_score)) + + # Income rating + if safety_score >= 80: + income_rating = "excellent" + elif safety_score >= 60: + income_rating = "good" + elif safety_score >= 40: + income_rating = "moderate" + else: + income_rating = "poor" + + # Summary + summary_parts = [] + if dividend_yield: + summary_parts.append(f"{dividend_yield:.2f}% yield") + if payout_ratio: + summary_parts.append(f"{payout_ratio:.0f}% payout") + if dividend_growth_5y: + summary_parts.append(f"{dividend_growth_5y:+.1f}% 5Y growth") + if consecutive_years and consecutive_years >= 5: + summary_parts.append(f"{consecutive_years}Y streak") + + summary = f"{ticker}: {', '.join(summary_parts)}. Rating: {income_rating.upper()}" + + return DividendAnalysis( + ticker=ticker, + company_name=company_name, + dividend_yield=round(dividend_yield, 2) if dividend_yield else None, + annual_dividend=round(annual_dividend, 4) if annual_dividend else None, + current_price=current_price, + payout_ratio=round(payout_ratio, 1) if payout_ratio else None, + payout_status=payout_status, + dividend_growth_5y=round(dividend_growth_5y, 2) if dividend_growth_5y else None, + consecutive_years=consecutive_years, + dividend_history=dividend_history, + ex_dividend_date=ex_dividend_date, + payment_frequency=payment_frequency, + safety_score=safety_score, + safety_factors=safety_factors, + income_rating=income_rating, + summary=summary, + ) + + except Exception as e: + if verbose: + print(f"Error analyzing {ticker}: {e}", file=sys.stderr) + return None + + +def format_text(analysis: DividendAnalysis) -> str: + """Format dividend analysis as text.""" + lines = [ + "=" * 60, + f"DIVIDEND ANALYSIS: {analysis.ticker} ({analysis.company_name})", + "=" * 60, + "", + ] + + if analysis.income_rating == "no_dividend": + lines.append("This stock does not pay a dividend.") + lines.append("=" * 60) + return "\n".join(lines) + + # Yield & Price + lines.append(f"Current Price: ${analysis.current_price:.2f}") + lines.append(f"Annual Dividend: ${analysis.annual_dividend:.2f}") + lines.append(f"Dividend Yield: {analysis.dividend_yield:.2f}%") + lines.append(f"Payment Freq: {analysis.payment_frequency or 'Unknown'}") + if analysis.ex_dividend_date: + lines.append(f"Ex-Dividend: {analysis.ex_dividend_date}") + + lines.append("") + + # Payout & Safety + lines.append(f"Payout Ratio: {analysis.payout_ratio:.1f}% ({analysis.payout_status})") + lines.append(f"5Y Div Growth: {analysis.dividend_growth_5y:+.1f}%" if analysis.dividend_growth_5y else "5Y Div Growth: N/A") + if analysis.consecutive_years: + lines.append(f"Consecutive Yrs: {analysis.consecutive_years}") + + lines.append("") + lines.append(f"SAFETY SCORE: {analysis.safety_score}/100") + lines.append(f"INCOME RATING: {analysis.income_rating.upper()}") + + lines.append("") + lines.append("Safety Factors:") + for factor in analysis.safety_factors: + lines.append(f" • {factor}") + + # History + if analysis.dividend_history: + lines.append("") + lines.append("Dividend History:") + for h in analysis.dividend_history[:5]: + lines.append(f" {h['year']}: ${h['total']:.2f}") + + lines.append("") + lines.append("=" * 60) + + return "\n".join(lines) + + +def main(): + parser = argparse.ArgumentParser(description="Dividend Analysis") + parser.add_argument("tickers", nargs="+", help="Stock ticker(s)") + parser.add_argument("--output", choices=["text", "json"], default="text") + parser.add_argument("--verbose", "-v", action="store_true") + + args = parser.parse_args() + + results = [] + for ticker in args.tickers: + analysis = analyze_dividends(ticker.upper(), verbose=args.verbose) + if analysis: + results.append(analysis) + else: + print(f"Error: Could not analyze {ticker}", file=sys.stderr) + + if args.output == "json": + if len(results) == 1: + print(json.dumps(asdict(results[0]), indent=2)) + else: + print(json.dumps([asdict(r) for r in results], indent=2)) + else: + for i, analysis in enumerate(results): + if i > 0: + print("\n") + print(format_text(analysis)) + + +if __name__ == "__main__": + main() diff --git a/scripts/hot_scanner.py b/scripts/hot_scanner.py new file mode 100644 index 0000000..fadf25f --- /dev/null +++ b/scripts/hot_scanner.py @@ -0,0 +1,582 @@ +#!/usr/bin/env python3 +""" +🔥 HOT SCANNER v2 - Find viral stocks & crypto trends +Now with Twitter/X, Reddit, and improved Yahoo Finance +""" + +import json +import urllib.request +import urllib.error +import xml.etree.ElementTree as ET +import gzip +import io +import subprocess +import os +from datetime import datetime, timezone +from pathlib import Path +import re +import ssl +from collections import defaultdict +from concurrent.futures import ThreadPoolExecutor, as_completed + +# Load .env file if exists +ENV_FILE = Path(__file__).parent.parent / ".env" +if ENV_FILE.exists(): + with open(ENV_FILE) as f: + for line in f: + line = line.strip() + if line and not line.startswith("#") and "=" in line: + key, value = line.split("=", 1) + os.environ[key] = value + +# Cache directory +CACHE_DIR = Path(__file__).parent.parent / "cache" +CACHE_DIR.mkdir(exist_ok=True) + +# SSL context +SSL_CONTEXT = ssl.create_default_context() + +class HotScanner: + def __init__(self, include_social=True): + self.include_social = include_social + self.results = { + "timestamp": datetime.now(timezone.utc).isoformat(), + "crypto": [], + "stocks": [], + "news": [], + "movers": [], + "social": [] + } + self.mentions = defaultdict(lambda: {"count": 0, "sources": [], "sentiment_hints": []}) + self.headers = { + "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36", + "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", + "Accept-Language": "en-US,en;q=0.5", + "Accept-Encoding": "gzip, deflate", + } + + def _fetch(self, url, timeout=15): + """Fetch URL with gzip support.""" + req = urllib.request.Request(url, headers=self.headers) + with urllib.request.urlopen(req, timeout=timeout, context=SSL_CONTEXT) as resp: + data = resp.read() + # Handle gzip + if resp.info().get('Content-Encoding') == 'gzip' or data[:2] == b'\x1f\x8b': + data = gzip.decompress(data) + return data.decode('utf-8', errors='replace') + + def _fetch_json(self, url, timeout=15): + """Fetch and parse JSON.""" + return json.loads(self._fetch(url, timeout)) + + def scan_all(self): + """Run all scans in parallel.""" + print("🔍 Scanning for hot trends...\n") + + tasks = [ + ("CoinGecko Trending", self.scan_coingecko_trending), + ("CoinGecko Movers", self.scan_coingecko_gainers_losers), + ("Google News Finance", self.scan_google_news_finance), + ("Google News Crypto", self.scan_google_news_crypto), + ("Yahoo Movers", self.scan_yahoo_movers), + ] + + if self.include_social: + tasks.extend([ + ("Reddit WSB", self.scan_reddit_wsb), + ("Reddit Crypto", self.scan_reddit_crypto), + ("Twitter/X", self.scan_twitter), + ]) + + with ThreadPoolExecutor(max_workers=8) as executor: + futures = {executor.submit(task[1]): task[0] for task in tasks} + for future in as_completed(futures): + name = futures[future] + try: + future.result() + except Exception as e: + print(f" ❌ {name}: {str(e)[:50]}") + + return self.results + + def scan_coingecko_trending(self): + """Get trending crypto from CoinGecko.""" + print(" 📊 CoinGecko Trending...") + try: + url = "https://api.coingecko.com/api/v3/search/trending" + data = self._fetch_json(url) + + for item in data.get("coins", [])[:10]: + coin = item.get("item", {}) + price_data = coin.get("data", {}) + price_change = price_data.get("price_change_percentage_24h", {}).get("usd", 0) + + entry = { + "symbol": coin.get("symbol", "").upper(), + "name": coin.get("name", ""), + "rank": coin.get("market_cap_rank"), + "price_change_24h": round(price_change, 2) if price_change else None, + "source": "coingecko_trending" + } + self.results["crypto"].append(entry) + + sym = entry["symbol"] + self.mentions[sym]["count"] += 2 # Trending gets extra weight + self.mentions[sym]["sources"].append("CoinGecko Trending") + if price_change: + direction = "🚀 bullish" if price_change > 0 else "📉 bearish" + self.mentions[sym]["sentiment_hints"].append(f"{direction} ({price_change:+.1f}%)") + + print(f" ✅ {len(data.get('coins', []))} trending coins") + except Exception as e: + print(f" ❌ CoinGecko trending: {e}") + + def scan_coingecko_gainers_losers(self): + """Get top gainers/losers.""" + print(" 📈 CoinGecko Movers...") + try: + url = "https://api.coingecko.com/api/v3/coins/markets?vs_currency=usd&order=market_cap_desc&per_page=100&page=1&price_change_percentage=24h" + data = self._fetch_json(url) + + sorted_data = sorted(data, key=lambda x: abs(x.get("price_change_percentage_24h") or 0), reverse=True) + + count = 0 + for coin in sorted_data[:20]: + change = coin.get("price_change_percentage_24h", 0) + if abs(change or 0) > 3: + entry = { + "symbol": coin.get("symbol", "").upper(), + "name": coin.get("name", ""), + "price": coin.get("current_price"), + "change_24h": round(change, 2) if change else None, + "volume": coin.get("total_volume"), + "source": "coingecko_movers" + } + self.results["movers"].append(entry) + count += 1 + + sym = entry["symbol"] + self.mentions[sym]["count"] += 1 + self.mentions[sym]["sources"].append("CoinGecko Movers") + direction = "🚀 pumping" if change > 0 else "📉 dumping" + self.mentions[sym]["sentiment_hints"].append(f"{direction} ({change:+.1f}%)") + + print(f" ✅ {count} significant movers") + except Exception as e: + print(f" ❌ CoinGecko movers: {e}") + + def scan_google_news_finance(self): + """Get finance news from Google News RSS.""" + print(" 📰 Google News Finance...") + try: + # Business news topic + url = "https://news.google.com/rss/topics/CAAqJggKIiBDQkFTRWdvSUwyMHZNRGx6TVdZU0FtVnVHZ0pWVXlnQVAB?hl=en-US&gl=US&ceid=US:en" + text = self._fetch(url) + root = ET.fromstring(text) + items = root.findall(".//item") + + for item in items[:15]: + title_elem = item.find("title") + title = title_elem.text if title_elem is not None else "" + tickers = self._extract_tickers(title) + + news_entry = { + "title": title, + "tickers_mentioned": tickers, + "source": "google_news_finance" + } + self.results["news"].append(news_entry) + + for ticker in tickers: + self.mentions[ticker]["count"] += 1 + self.mentions[ticker]["sources"].append("Google News") + self.mentions[ticker]["sentiment_hints"].append(f"📰 {title[:40]}...") + + print(f" ✅ {len(items)} news items") + except Exception as e: + print(f" ❌ Google News Finance: {e}") + + def scan_google_news_crypto(self): + """Search for crypto news.""" + print(" 📰 Google News Crypto...") + try: + url = "https://news.google.com/rss/search?q=bitcoin+OR+ethereum+OR+crypto+crash+OR+crypto+pump&hl=en-US&gl=US&ceid=US:en" + text = self._fetch(url) + root = ET.fromstring(text) + items = root.findall(".//item") + + crypto_keywords = { + "bitcoin": "BTC", "btc": "BTC", "ethereum": "ETH", "eth": "ETH", + "solana": "SOL", "xrp": "XRP", "ripple": "XRP", "dogecoin": "DOGE", + "cardano": "ADA", "polkadot": "DOT", "avalanche": "AVAX", + } + + for item in items[:12]: + title_elem = item.find("title") + title = title_elem.text if title_elem is not None else "" + tickers = self._extract_tickers(title) + + for word, ticker in crypto_keywords.items(): + if word in title.lower(): + tickers.append(ticker) + tickers = list(set(tickers)) + + if tickers: + news_entry = { + "title": title, + "tickers_mentioned": tickers, + "source": "google_news_crypto" + } + self.results["news"].append(news_entry) + + for ticker in tickers: + self.mentions[ticker]["count"] += 1 + self.mentions[ticker]["sources"].append("Google News Crypto") + + print(f" ✅ Processed crypto news") + except Exception as e: + print(f" ❌ Google News Crypto: {e}") + + def scan_yahoo_movers(self): + """Scrape Yahoo Finance movers with gzip support.""" + print(" 📈 Yahoo Finance Movers...") + categories = [ + ("gainers", "https://finance.yahoo.com/gainers/"), + ("losers", "https://finance.yahoo.com/losers/"), + ("most_active", "https://finance.yahoo.com/most-active/") + ] + + for category, url in categories: + try: + text = self._fetch(url, timeout=12) + + # Multiple patterns for ticker extraction + tickers = [] + # Pattern 1: data-symbol attribute + tickers.extend(re.findall(r'data-symbol="([A-Z]{1,5})"', text)) + # Pattern 2: ticker in URL + tickers.extend(re.findall(r'/quote/([A-Z]{1,5})[/"\?]', text)) + # Pattern 3: fin-streamer + tickers.extend(re.findall(r'fin-streamer[^>]*symbol="([A-Z]{1,5})"', text)) + + unique_tickers = list(dict.fromkeys(tickers))[:15] + + for ticker in unique_tickers: + # Skip common false positives + if ticker in ['USA', 'CEO', 'IPO', 'ETF', 'SEC', 'FDA', 'NYSE', 'API']: + continue + self.results["stocks"].append({ + "symbol": ticker, + "category": category, + "source": f"yahoo_{category}" + }) + self.mentions[ticker]["count"] += 1 + self.mentions[ticker]["sources"].append(f"Yahoo {category.replace('_', ' ').title()}") + + if unique_tickers: + print(f" ✅ Yahoo {category}: {len(unique_tickers)} tickers") + except Exception as e: + print(f" ⚠️ Yahoo {category}: {str(e)[:30]}") + + def scan_reddit_wsb(self): + """Scrape r/wallstreetbets for hot stocks.""" + print(" 🦍 Reddit r/wallstreetbets...") + try: + # Use old.reddit.com (more scrape-friendly) + url = "https://old.reddit.com/r/wallstreetbets/hot/.json" + headers = {**self.headers, "Accept": "application/json"} + req = urllib.request.Request(url, headers=headers) + + with urllib.request.urlopen(req, timeout=15, context=SSL_CONTEXT) as resp: + data = resp.read() + if data[:2] == b'\x1f\x8b': + data = gzip.decompress(data) + posts = json.loads(data.decode('utf-8')) + + tickers_found = [] + for post in posts.get("data", {}).get("children", [])[:25]: + title = post.get("data", {}).get("title", "") + score = post.get("data", {}).get("score", 0) + + # Extract tickers + tickers = self._extract_tickers(title) + for ticker in tickers: + if ticker not in ['USA', 'CEO', 'IPO', 'DD', 'WSB', 'YOLO', 'FD']: + weight = 2 if score > 1000 else 1 + self.mentions[ticker]["count"] += weight + self.mentions[ticker]["sources"].append("Reddit WSB") + self.mentions[ticker]["sentiment_hints"].append(f"🦍 WSB: {title[:35]}...") + tickers_found.append(ticker) + + self.results["social"].append({ + "platform": "reddit_wsb", + "title": title[:100], + "score": score, + "tickers": tickers + }) + + print(f" ✅ WSB: {len(set(tickers_found))} tickers mentioned") + except Exception as e: + print(f" ❌ Reddit WSB: {str(e)[:40]}") + + def scan_reddit_crypto(self): + """Scrape r/cryptocurrency for hot coins.""" + print(" 💎 Reddit r/cryptocurrency...") + try: + url = "https://old.reddit.com/r/cryptocurrency/hot/.json" + headers = {**self.headers, "Accept": "application/json"} + req = urllib.request.Request(url, headers=headers) + + with urllib.request.urlopen(req, timeout=15, context=SSL_CONTEXT) as resp: + data = resp.read() + if data[:2] == b'\x1f\x8b': + data = gzip.decompress(data) + posts = json.loads(data.decode('utf-8')) + + crypto_keywords = { + "bitcoin": "BTC", "btc": "BTC", "ethereum": "ETH", "eth": "ETH", + "solana": "SOL", "sol": "SOL", "xrp": "XRP", "cardano": "ADA", + "dogecoin": "DOGE", "doge": "DOGE", "shiba": "SHIB", "pepe": "PEPE", + "avalanche": "AVAX", "polkadot": "DOT", "chainlink": "LINK", + } + + tickers_found = [] + for post in posts.get("data", {}).get("children", [])[:20]: + title = post.get("data", {}).get("title", "").lower() + score = post.get("data", {}).get("score", 0) + + for word, ticker in crypto_keywords.items(): + if word in title: + weight = 2 if score > 500 else 1 + self.mentions[ticker]["count"] += weight + self.mentions[ticker]["sources"].append("Reddit Crypto") + tickers_found.append(ticker) + + print(f" ✅ r/crypto: {len(set(tickers_found))} coins mentioned") + except Exception as e: + print(f" ❌ Reddit Crypto: {str(e)[:40]}") + + def scan_twitter(self): + """Use bird CLI to get trending finance/crypto tweets.""" + print(" 🐦 Twitter/X...") + try: + # Find bird binary + bird_paths = [ + "/home/clawdbot/.nvm/versions/node/v24.12.0/bin/bird", + "/usr/local/bin/bird", + "bird" + ] + bird_bin = None + for p in bird_paths: + if Path(p).exists() or p == "bird": + bird_bin = p + break + + if not bird_bin: + print(" ⚠️ Twitter: bird not found") + return + + # Search for finance tweets + searches = [ + ("stocks", "stock OR $SPY OR $QQQ OR earnings"), + ("crypto", "bitcoin OR ethereum OR crypto OR $BTC"), + ] + + for category, query in searches: + try: + env = os.environ.copy() + result = subprocess.run( + [bird_bin, "search", query, "-n", "15", "--json"], + capture_output=True, text=True, timeout=30, env=env + ) + + if result.returncode == 0 and result.stdout.strip(): + tweets = json.loads(result.stdout) + for tweet in tweets[:10]: + text = tweet.get("text", "") + tickers = self._extract_tickers(text) + + # Add crypto keywords + crypto_map = {"bitcoin": "BTC", "ethereum": "ETH", "solana": "SOL"} + for word, ticker in crypto_map.items(): + if word in text.lower(): + tickers.append(ticker) + + for ticker in set(tickers): + self.mentions[ticker]["count"] += 1 + self.mentions[ticker]["sources"].append("Twitter/X") + self.mentions[ticker]["sentiment_hints"].append(f"🐦 {text[:35]}...") + + self.results["social"].append({ + "platform": "twitter", + "text": text[:100], + "tickers": list(set(tickers)) + }) + + print(f" ✅ Twitter {category}: processed") + except subprocess.TimeoutExpired: + print(f" ⚠️ Twitter {category}: timeout") + except json.JSONDecodeError: + print(f" ⚠️ Twitter {category}: no auth?") + except FileNotFoundError: + print(" ⚠️ Twitter: bird CLI not found") + except Exception as e: + print(f" ❌ Twitter: {str(e)[:40]}") + + def _extract_tickers(self, text): + """Extract stock/crypto tickers from text.""" + patterns = [ + r'\$([A-Z]{1,5})\b', # $AAPL + r'\(([A-Z]{2,5})\)', # (AAPL) + r'(?:^|\s)([A-Z]{2,4})(?:\s|$|[,.])', # Standalone caps + ] + + tickers = [] + for pattern in patterns: + matches = re.findall(pattern, text) + tickers.extend(matches) + + # Company mappings + companies = { + "Apple": "AAPL", "Microsoft": "MSFT", "Google": "GOOGL", "Alphabet": "GOOGL", + "Amazon": "AMZN", "Tesla": "TSLA", "Nvidia": "NVDA", "Meta": "META", + "Netflix": "NFLX", "GameStop": "GME", "AMD": "AMD", "Intel": "INTC", + "Palantir": "PLTR", "Coinbase": "COIN", "MicroStrategy": "MSTR", + } + + for company, ticker in companies.items(): + if company.lower() in text.lower(): + tickers.append(ticker) + + # Filter out common words + skip = {'USA', 'CEO', 'IPO', 'ETF', 'SEC', 'FDA', 'NYSE', 'API', 'USD', 'EU', + 'UK', 'US', 'AI', 'IT', 'AT', 'TO', 'IN', 'ON', 'IS', 'IF', 'OR', 'AN', + 'DD', 'WSB', 'YOLO', 'FD', 'OP', 'PM', 'AM'} + + return list(set(t for t in tickers if t not in skip and len(t) >= 2)) + + def get_hot_summary(self): + """Generate summary.""" + sorted_mentions = sorted( + self.mentions.items(), + key=lambda x: x[1]["count"], + reverse=True + ) + + summary = { + "scan_time": self.results["timestamp"], + "top_trending": [], + "crypto_highlights": [], + "stock_highlights": [], + "social_buzz": [], + "breaking_news": [] + } + + for symbol, data in sorted_mentions[:20]: + summary["top_trending"].append({ + "symbol": symbol, + "mentions": data["count"], + "sources": list(set(data["sources"])), + "signals": data["sentiment_hints"][:3] + }) + + # Crypto + seen = set() + for coin in self.results["crypto"] + self.results["movers"]: + if coin["symbol"] not in seen: + summary["crypto_highlights"].append(coin) + seen.add(coin["symbol"]) + + # Stocks + seen = set() + for stock in self.results["stocks"]: + if stock["symbol"] not in seen: + summary["stock_highlights"].append(stock) + seen.add(stock["symbol"]) + + # Social + for item in self.results["social"][:15]: + summary["social_buzz"].append(item) + + # News + for news in self.results["news"][:10]: + if news.get("tickers_mentioned"): + summary["breaking_news"].append({ + "title": news["title"], + "tickers": news["tickers_mentioned"] + }) + + return summary + + +def main(): + import argparse + parser = argparse.ArgumentParser(description="🔥 Hot Scanner - Find trending stocks & crypto") + parser.add_argument("--no-social", action="store_true", help="Skip social media scans") + parser.add_argument("--json", action="store_true", help="Output only JSON") + args = parser.parse_args() + + scanner = HotScanner(include_social=not args.no_social) + + if not args.json: + print("=" * 60) + print("🔥 HOT SCANNER v2 - What's Trending Right Now?") + print(f"📅 {datetime.now().strftime('%Y-%m-%d %H:%M:%S')} UTC") + print("=" * 60) + print() + + scanner.scan_all() + summary = scanner.get_hot_summary() + + # Save + output_file = CACHE_DIR / "hot_scan_latest.json" + with open(output_file, "w") as f: + json.dump(summary, f, indent=2, default=str) + + if args.json: + print(json.dumps(summary, indent=2, default=str)) + return + + print() + print("=" * 60) + print("🔥 RESULTS") + print("=" * 60) + + print("\n📊 TOP TRENDING (by buzz):\n") + for i, item in enumerate(summary["top_trending"][:12], 1): + sources = ", ".join(item["sources"][:2]) + signal = item["signals"][0][:30] if item["signals"] else "" + print(f" {i:2}. {item['symbol']:8} ({item['mentions']:2} pts) [{sources}] {signal}") + + print("\n🪙 CRYPTO:\n") + for coin in summary["crypto_highlights"][:8]: + change = coin.get("change_24h") or coin.get("price_change_24h") + change_str = f"{change:+.1f}%" if change else "🔥" + emoji = "🚀" if (change or 0) > 0 else "📉" if (change or 0) < 0 else "🔥" + print(f" {emoji} {coin.get('symbol', '?'):8} {coin.get('name', '')[:16]:16} {change_str:>8}") + + print("\n📈 STOCKS:\n") + cat_emoji = {"gainers": "🟢", "losers": "🔴", "most_active": "📊"} + for stock in summary["stock_highlights"][:10]: + emoji = cat_emoji.get(stock.get("category"), "•") + print(f" {emoji} {stock['symbol']:6} ({stock.get('category', 'N/A').replace('_', ' ')})") + + if summary["social_buzz"]: + print("\n🐦 SOCIAL BUZZ:\n") + for item in summary["social_buzz"][:5]: + platform = item.get("platform", "?") + text = item.get("title") or item.get("text", "") + text = text[:55] + "..." if len(text) > 55 else text + print(f" [{platform}] {text}") + + print("\n📰 NEWS:\n") + for news in summary["breaking_news"][:5]: + tickers = ", ".join(news["tickers"][:3]) + title = news["title"][:55] + "..." if len(news["title"]) > 55 else news["title"] + print(f" [{tickers}] {title}") + + print(f"\n💾 Saved: {output_file}\n") + + +if __name__ == "__main__": + main() diff --git a/scripts/portfolio.py b/scripts/portfolio.py new file mode 100644 index 0000000..5011acb --- /dev/null +++ b/scripts/portfolio.py @@ -0,0 +1,548 @@ +#!/usr/bin/env python3 +# /// script +# requires-python = ">=3.10" +# dependencies = ["yfinance>=0.2.40"] +# /// +""" +Portfolio management for stock-analysis skill. + +Usage: + uv run portfolio.py create "Portfolio Name" + uv run portfolio.py list + uv run portfolio.py show [--portfolio NAME] + uv run portfolio.py delete "Portfolio Name" + uv run portfolio.py rename "Old Name" "New Name" + + uv run portfolio.py add TICKER --quantity 100 --cost 150.00 [--portfolio NAME] + uv run portfolio.py update TICKER --quantity 150 [--portfolio NAME] + uv run portfolio.py remove TICKER [--portfolio NAME] +""" + +import argparse +import json +import os +import sys +from dataclasses import dataclass, asdict +from datetime import datetime +from pathlib import Path +from typing import Literal + +import yfinance as yf + + +# Top 20 supported cryptocurrencies +SUPPORTED_CRYPTOS = { + "BTC-USD", "ETH-USD", "BNB-USD", "SOL-USD", "XRP-USD", + "ADA-USD", "DOGE-USD", "AVAX-USD", "DOT-USD", "MATIC-USD", + "LINK-USD", "ATOM-USD", "UNI-USD", "LTC-USD", "BCH-USD", + "XLM-USD", "ALGO-USD", "VET-USD", "FIL-USD", "NEAR-USD", +} + + +def get_storage_path() -> Path: + """Get the portfolio storage path.""" + # Use ~/.clawdbot/skills/stock-analysis/portfolios.json + state_dir = os.environ.get("CLAWDBOT_STATE_DIR", os.path.expanduser("~/.clawdbot")) + portfolio_dir = Path(state_dir) / "skills" / "stock-analysis" + portfolio_dir.mkdir(parents=True, exist_ok=True) + return portfolio_dir / "portfolios.json" + + +def detect_asset_type(ticker: str) -> Literal["stock", "crypto"]: + """Detect asset type from ticker format.""" + ticker_upper = ticker.upper() + if ticker_upper.endswith("-USD"): + base = ticker_upper[:-4] + if base.isalpha() and f"{base}-USD" in SUPPORTED_CRYPTOS: + return "crypto" + # Allow any *-USD ticker as crypto (flexible) + if base.isalpha(): + return "crypto" + return "stock" + + +@dataclass +class Asset: + ticker: str + type: Literal["stock", "crypto"] + quantity: float + cost_basis: float + added_at: str + + +@dataclass +class Portfolio: + name: str + created_at: str + updated_at: str + assets: list[Asset] + + +class PortfolioStore: + """Manages portfolio storage with atomic writes.""" + + def __init__(self, path: Path | None = None): + self.path = path or get_storage_path() + self._data: dict | None = None + + def _load(self) -> dict: + """Load portfolios from disk.""" + if self._data is not None: + return self._data + + if not self.path.exists(): + self._data = {"version": 1, "portfolios": {}} + return self._data + + try: + with open(self.path, "r", encoding="utf-8") as f: + self._data = json.load(f) + return self._data + except (json.JSONDecodeError, IOError): + self._data = {"version": 1, "portfolios": {}} + return self._data + + def _save(self) -> None: + """Save portfolios to disk with atomic write.""" + if self._data is None: + return + + # Ensure directory exists + self.path.parent.mkdir(parents=True, exist_ok=True) + + # Atomic write: write to temp file, then rename + tmp_path = self.path.with_suffix(".tmp") + try: + with open(tmp_path, "w", encoding="utf-8") as f: + json.dump(self._data, f, indent=2) + tmp_path.replace(self.path) + except Exception: + if tmp_path.exists(): + tmp_path.unlink() + raise + + def _get_portfolio_key(self, name: str) -> str: + """Convert portfolio name to storage key.""" + return name.lower().replace(" ", "-") + + def list_portfolios(self) -> list[str]: + """List all portfolio names.""" + data = self._load() + return [p["name"] for p in data["portfolios"].values()] + + def get_portfolio(self, name: str) -> Portfolio | None: + """Get a portfolio by name.""" + data = self._load() + key = self._get_portfolio_key(name) + + if key not in data["portfolios"]: + # Try case-insensitive match + for k, v in data["portfolios"].items(): + if v["name"].lower() == name.lower(): + key = k + break + else: + return None + + p = data["portfolios"][key] + assets = [ + Asset( + ticker=a["ticker"], + type=a["type"], + quantity=a["quantity"], + cost_basis=a["cost_basis"], + added_at=a["added_at"], + ) + for a in p.get("assets", []) + ] + return Portfolio( + name=p["name"], + created_at=p["created_at"], + updated_at=p["updated_at"], + assets=assets, + ) + + def create_portfolio(self, name: str) -> Portfolio: + """Create a new portfolio.""" + data = self._load() + key = self._get_portfolio_key(name) + + if key in data["portfolios"]: + raise ValueError(f"Portfolio '{name}' already exists") + + now = datetime.now().isoformat() + portfolio = { + "name": name, + "created_at": now, + "updated_at": now, + "assets": [], + } + data["portfolios"][key] = portfolio + self._save() + + return Portfolio(name=name, created_at=now, updated_at=now, assets=[]) + + def delete_portfolio(self, name: str) -> bool: + """Delete a portfolio.""" + data = self._load() + key = self._get_portfolio_key(name) + + # Try case-insensitive match + if key not in data["portfolios"]: + for k, v in data["portfolios"].items(): + if v["name"].lower() == name.lower(): + key = k + break + else: + return False + + del data["portfolios"][key] + self._save() + return True + + def rename_portfolio(self, old_name: str, new_name: str) -> bool: + """Rename a portfolio.""" + data = self._load() + old_key = self._get_portfolio_key(old_name) + new_key = self._get_portfolio_key(new_name) + + # Find old portfolio + if old_key not in data["portfolios"]: + for k, v in data["portfolios"].items(): + if v["name"].lower() == old_name.lower(): + old_key = k + break + else: + return False + + if new_key in data["portfolios"] and new_key != old_key: + raise ValueError(f"Portfolio '{new_name}' already exists") + + portfolio = data["portfolios"].pop(old_key) + portfolio["name"] = new_name + portfolio["updated_at"] = datetime.now().isoformat() + data["portfolios"][new_key] = portfolio + self._save() + return True + + def add_asset( + self, + portfolio_name: str, + ticker: str, + quantity: float, + cost_basis: float, + ) -> Asset: + """Add an asset to a portfolio.""" + data = self._load() + key = self._get_portfolio_key(portfolio_name) + + # Find portfolio + if key not in data["portfolios"]: + for k, v in data["portfolios"].items(): + if v["name"].lower() == portfolio_name.lower(): + key = k + break + else: + raise ValueError(f"Portfolio '{portfolio_name}' not found") + + portfolio = data["portfolios"][key] + ticker = ticker.upper() + + # Check if asset already exists + for asset in portfolio["assets"]: + if asset["ticker"] == ticker: + raise ValueError(f"Asset '{ticker}' already in portfolio. Use 'update' to modify.") + + # Validate ticker + asset_type = detect_asset_type(ticker) + try: + stock = yf.Ticker(ticker) + info = stock.info + if "regularMarketPrice" not in info: + raise ValueError(f"Invalid ticker: {ticker}") + except Exception as e: + raise ValueError(f"Could not validate ticker '{ticker}': {e}") + + now = datetime.now().isoformat() + asset = { + "ticker": ticker, + "type": asset_type, + "quantity": quantity, + "cost_basis": cost_basis, + "added_at": now, + } + portfolio["assets"].append(asset) + portfolio["updated_at"] = now + self._save() + + return Asset(**asset) + + def update_asset( + self, + portfolio_name: str, + ticker: str, + quantity: float | None = None, + cost_basis: float | None = None, + ) -> Asset | None: + """Update an asset in a portfolio.""" + data = self._load() + key = self._get_portfolio_key(portfolio_name) + + # Find portfolio + if key not in data["portfolios"]: + for k, v in data["portfolios"].items(): + if v["name"].lower() == portfolio_name.lower(): + key = k + break + else: + return None + + portfolio = data["portfolios"][key] + ticker = ticker.upper() + + for asset in portfolio["assets"]: + if asset["ticker"] == ticker: + if quantity is not None: + asset["quantity"] = quantity + if cost_basis is not None: + asset["cost_basis"] = cost_basis + portfolio["updated_at"] = datetime.now().isoformat() + self._save() + return Asset(**asset) + + return None + + def remove_asset(self, portfolio_name: str, ticker: str) -> bool: + """Remove an asset from a portfolio.""" + data = self._load() + key = self._get_portfolio_key(portfolio_name) + + # Find portfolio + if key not in data["portfolios"]: + for k, v in data["portfolios"].items(): + if v["name"].lower() == portfolio_name.lower(): + key = k + break + else: + return False + + portfolio = data["portfolios"][key] + ticker = ticker.upper() + + original_len = len(portfolio["assets"]) + portfolio["assets"] = [a for a in portfolio["assets"] if a["ticker"] != ticker] + + if len(portfolio["assets"]) < original_len: + portfolio["updated_at"] = datetime.now().isoformat() + self._save() + return True + + return False + + def get_default_portfolio_name(self) -> str | None: + """Get the default (first) portfolio name, or None if empty.""" + portfolios = self.list_portfolios() + return portfolios[0] if portfolios else None + + +def format_currency(value: float) -> str: + """Format a value as currency.""" + if abs(value) >= 1_000_000: + return f"${value/1_000_000:.2f}M" + elif abs(value) >= 1_000: + return f"${value/1_000:.2f}K" + else: + return f"${value:.2f}" + + +def show_portfolio(portfolio: Portfolio, verbose: bool = False) -> None: + """Display portfolio details with current prices.""" + print(f"\n{'='*60}") + print(f"PORTFOLIO: {portfolio.name}") + print(f"Created: {portfolio.created_at[:10]} | Updated: {portfolio.updated_at[:10]}") + print(f"{'='*60}\n") + + if not portfolio.assets: + print(" No assets in portfolio. Use 'add' to add assets.\n") + return + + total_cost = 0.0 + total_value = 0.0 + + print(f"{'Ticker':<12} {'Type':<8} {'Qty':>10} {'Cost':>12} {'Current':>12} {'Value':>14} {'P&L':>12}") + print("-" * 82) + + for asset in portfolio.assets: + try: + stock = yf.Ticker(asset.ticker) + current_price = stock.info.get("regularMarketPrice", 0) or 0 + except Exception: + current_price = 0 + + cost_total = asset.quantity * asset.cost_basis + current_value = asset.quantity * current_price + pnl = current_value - cost_total + pnl_pct = (pnl / cost_total * 100) if cost_total > 0 else 0 + + total_cost += cost_total + total_value += current_value + + pnl_str = f"{'+' if pnl >= 0 else ''}{format_currency(pnl)} ({pnl_pct:+.1f}%)" + + print(f"{asset.ticker:<12} {asset.type:<8} {asset.quantity:>10.4f} " + f"{format_currency(asset.cost_basis):>12} {format_currency(current_price):>12} " + f"{format_currency(current_value):>14} {pnl_str:>12}") + + print("-" * 82) + total_pnl = total_value - total_cost + total_pnl_pct = (total_pnl / total_cost * 100) if total_cost > 0 else 0 + print(f"{'TOTAL':<12} {'':<8} {'':<10} {format_currency(total_cost):>12} {'':<12} " + f"{format_currency(total_value):>14} {'+' if total_pnl >= 0 else ''}{format_currency(total_pnl)} ({total_pnl_pct:+.1f}%)") + print() + + +def main(): + parser = argparse.ArgumentParser(description="Portfolio management for stock-analysis") + subparsers = parser.add_subparsers(dest="command", help="Commands") + + # create + create_parser = subparsers.add_parser("create", help="Create a new portfolio") + create_parser.add_argument("name", help="Portfolio name") + + # list + subparsers.add_parser("list", help="List all portfolios") + + # show + show_parser = subparsers.add_parser("show", help="Show portfolio details") + show_parser.add_argument("--portfolio", "-p", help="Portfolio name (default: first portfolio)") + + # delete + delete_parser = subparsers.add_parser("delete", help="Delete a portfolio") + delete_parser.add_argument("name", help="Portfolio name") + + # rename + rename_parser = subparsers.add_parser("rename", help="Rename a portfolio") + rename_parser.add_argument("old_name", help="Current portfolio name") + rename_parser.add_argument("new_name", help="New portfolio name") + + # add + add_parser = subparsers.add_parser("add", help="Add an asset to portfolio") + add_parser.add_argument("ticker", help="Stock/crypto ticker (e.g., AAPL, BTC-USD)") + add_parser.add_argument("--quantity", "-q", type=float, required=True, help="Quantity") + add_parser.add_argument("--cost", "-c", type=float, required=True, help="Cost basis per unit") + add_parser.add_argument("--portfolio", "-p", help="Portfolio name (default: first portfolio)") + + # update + update_parser = subparsers.add_parser("update", help="Update an asset in portfolio") + update_parser.add_argument("ticker", help="Stock/crypto ticker") + update_parser.add_argument("--quantity", "-q", type=float, help="New quantity") + update_parser.add_argument("--cost", "-c", type=float, help="New cost basis per unit") + update_parser.add_argument("--portfolio", "-p", help="Portfolio name (default: first portfolio)") + + # remove + remove_parser = subparsers.add_parser("remove", help="Remove an asset from portfolio") + remove_parser.add_argument("ticker", help="Stock/crypto ticker") + remove_parser.add_argument("--portfolio", "-p", help="Portfolio name (default: first portfolio)") + + args = parser.parse_args() + + if not args.command: + parser.print_help() + sys.exit(1) + + store = PortfolioStore() + + try: + if args.command == "create": + portfolio = store.create_portfolio(args.name) + print(f"Created portfolio: {portfolio.name}") + + elif args.command == "list": + portfolios = store.list_portfolios() + if not portfolios: + print("No portfolios found. Use 'create' to create one.") + else: + print("\nPortfolios:") + for name in portfolios: + p = store.get_portfolio(name) + asset_count = len(p.assets) if p else 0 + print(f" - {name} ({asset_count} assets)") + print() + + elif args.command == "show": + portfolio_name = args.portfolio or store.get_default_portfolio_name() + if not portfolio_name: + print("No portfolios found. Use 'create' to create one.") + sys.exit(1) + + portfolio = store.get_portfolio(portfolio_name) + if not portfolio: + print(f"Portfolio '{portfolio_name}' not found.") + sys.exit(1) + + show_portfolio(portfolio) + + elif args.command == "delete": + if store.delete_portfolio(args.name): + print(f"Deleted portfolio: {args.name}") + else: + print(f"Portfolio '{args.name}' not found.") + sys.exit(1) + + elif args.command == "rename": + if store.rename_portfolio(args.old_name, args.new_name): + print(f"Renamed portfolio: {args.old_name} -> {args.new_name}") + else: + print(f"Portfolio '{args.old_name}' not found.") + sys.exit(1) + + elif args.command == "add": + portfolio_name = args.portfolio or store.get_default_portfolio_name() + if not portfolio_name: + print("No portfolios found. Use 'create' to create one first.") + sys.exit(1) + + asset = store.add_asset(portfolio_name, args.ticker, args.quantity, args.cost) + print(f"Added {asset.ticker} ({asset.type}) to {portfolio_name}: " + f"{asset.quantity} units @ {format_currency(asset.cost_basis)}") + + elif args.command == "update": + portfolio_name = args.portfolio or store.get_default_portfolio_name() + if not portfolio_name: + print("No portfolios found.") + sys.exit(1) + + if args.quantity is None and args.cost is None: + print("Must specify --quantity and/or --cost to update.") + sys.exit(1) + + asset = store.update_asset(portfolio_name, args.ticker, args.quantity, args.cost) + if asset: + print(f"Updated {asset.ticker} in {portfolio_name}: " + f"{asset.quantity} units @ {format_currency(asset.cost_basis)}") + else: + print(f"Asset '{args.ticker}' not found in portfolio '{portfolio_name}'.") + sys.exit(1) + + elif args.command == "remove": + portfolio_name = args.portfolio or store.get_default_portfolio_name() + if not portfolio_name: + print("No portfolios found.") + sys.exit(1) + + if store.remove_asset(portfolio_name, args.ticker): + print(f"Removed {args.ticker.upper()} from {portfolio_name}") + else: + print(f"Asset '{args.ticker}' not found in portfolio '{portfolio_name}'.") + sys.exit(1) + + except ValueError as e: + print(f"Error: {e}") + sys.exit(1) + except Exception as e: + print(f"Unexpected error: {e}") + sys.exit(1) + + +if __name__ == "__main__": + main() diff --git a/scripts/rumor_scanner.py b/scripts/rumor_scanner.py new file mode 100644 index 0000000..bebc191 --- /dev/null +++ b/scripts/rumor_scanner.py @@ -0,0 +1,342 @@ +#!/usr/bin/env python3 +""" +🔮 RUMOR & BUZZ SCANNER +Scans for early signals, rumors, and whispers before they become mainstream news. + +Sources: +- Twitter/X: "hearing", "rumor", "sources say", unusual buzz +- Google News: M&A, insider, upgrade/downgrade +- Unusual keywords detection + +Usage: python3 rumor_scanner.py +""" + +import json +import os +import subprocess +import sys +import re +from datetime import datetime, timezone +from pathlib import Path +from urllib.request import urlopen, Request +from urllib.parse import quote_plus +import gzip + +CACHE_DIR = Path(__file__).parent.parent / "cache" +CACHE_DIR.mkdir(exist_ok=True) + +# Bird CLI path +BIRD_CLI = "/home/clawdbot/.nvm/versions/node/v24.12.0/bin/bird" +BIRD_ENV = Path(__file__).parent.parent / ".env" + +def load_env(): + """Load environment variables from .env file.""" + if BIRD_ENV.exists(): + for line in BIRD_ENV.read_text().splitlines(): + if '=' in line and not line.startswith('#'): + key, value = line.split('=', 1) + os.environ[key.strip()] = value.strip().strip('"').strip("'") + +def fetch_url(url, timeout=15): + """Fetch URL with headers.""" + headers = { + 'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36', + 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', + 'Accept-Encoding': 'gzip, deflate', + 'Accept-Language': 'en-US,en;q=0.9', + } + req = Request(url, headers=headers) + try: + with urlopen(req, timeout=timeout) as resp: + data = resp.read() + if resp.info().get('Content-Encoding') == 'gzip': + data = gzip.decompress(data) + return data.decode('utf-8', errors='ignore') + except Exception as e: + return None + +def search_twitter_rumors(): + """Search Twitter for rumors and early signals.""" + results = [] + + # Rumor-focused search queries + queries = [ + '"hearing that" stock OR $', + '"sources say" stock OR company', + '"rumor" merger OR acquisition', + 'insider buying stock', + '"upgrade" OR "downgrade" stock tomorrow', + '$AAPL OR $TSLA OR $NVDA rumor', + '"breaking" stock market', + 'M&A rumor', + ] + + load_env() + + for query in queries[:4]: # Limit to avoid rate limits + try: + cmd = [BIRD_CLI, 'search', query, '-n', '10', '--json'] + env = os.environ.copy() + + result = subprocess.run(cmd, capture_output=True, text=True, timeout=30, env=env) + + if result.returncode == 0 and result.stdout: + try: + tweets = json.loads(result.stdout) + for tweet in tweets: + text = tweet.get('text', '') + # Filter for actual rumors/signals + if any(kw in text.lower() for kw in ['hearing', 'rumor', 'source', 'insider', 'upgrade', 'downgrade', 'breaking', 'M&A', 'merger', 'acquisition']): + results.append({ + 'source': 'twitter', + 'type': 'rumor', + 'text': text[:300], + 'author': tweet.get('author', {}).get('username', 'unknown'), + 'likes': tweet.get('likes', 0), + 'retweets': tweet.get('retweets', 0), + 'query': query + }) + except json.JSONDecodeError: + pass + except Exception as e: + pass + + # Dedupe by text similarity + seen = set() + unique = [] + for r in results: + key = r['text'][:100] + if key not in seen: + seen.add(key) + unique.append(r) + + return unique + +def search_twitter_buzz(): + """Search Twitter for general stock buzz - what are people talking about?""" + results = [] + + queries = [ + '$SPY OR $QQQ', + 'stock to buy', + 'calls OR puts expiring', + 'earnings play', + 'short squeeze', + ] + + load_env() + + for query in queries[:3]: + try: + cmd = [BIRD_CLI, 'search', query, '-n', '15', '--json'] + env = os.environ.copy() + + result = subprocess.run(cmd, capture_output=True, text=True, timeout=30, env=env) + + if result.returncode == 0 and result.stdout: + try: + tweets = json.loads(result.stdout) + for tweet in tweets: + text = tweet.get('text', '') + # Extract stock symbols + symbols = re.findall(r'\$([A-Z]{1,5})\b', text) + if symbols: + results.append({ + 'source': 'twitter', + 'type': 'buzz', + 'text': text[:300], + 'symbols': symbols, + 'author': tweet.get('author', {}).get('username', 'unknown'), + 'engagement': tweet.get('likes', 0) + tweet.get('retweets', 0) * 2 + }) + except json.JSONDecodeError: + pass + except Exception as e: + pass + + # Sort by engagement + results.sort(key=lambda x: x.get('engagement', 0), reverse=True) + return results[:20] + +def search_news_rumors(): + """Search Google News for M&A, insider, upgrade news.""" + results = [] + + queries = [ + 'merger acquisition rumor', + 'insider buying stock', + 'analyst upgrade stock', + 'takeover bid company', + 'SEC investigation company', + ] + + for query in queries: + url = f"https://news.google.com/rss/search?q={quote_plus(query)}&hl=en-US&gl=US&ceid=US:en" + content = fetch_url(url) + + if content: + import xml.etree.ElementTree as ET + try: + root = ET.fromstring(content) + for item in root.findall('.//item')[:5]: + title = item.find('title') + link = item.find('link') + pub_date = item.find('pubDate') + + if title is not None: + title_text = title.text or '' + # Extract company names or symbols + results.append({ + 'source': 'google_news', + 'type': 'news_rumor', + 'title': title_text, + 'link': link.text if link is not None else '', + 'date': pub_date.text if pub_date is not None else '', + 'query': query + }) + except ET.ParseError: + pass + + return results + +def extract_symbols_from_text(text): + """Extract stock symbols from text.""" + # $SYMBOL pattern + dollar_symbols = re.findall(r'\$([A-Z]{1,5})\b', text) + + # Common company name to symbol mapping + company_map = { + 'apple': 'AAPL', 'tesla': 'TSLA', 'nvidia': 'NVDA', 'microsoft': 'MSFT', + 'google': 'GOOGL', 'amazon': 'AMZN', 'meta': 'META', 'netflix': 'NFLX', + 'coinbase': 'COIN', 'robinhood': 'HOOD', 'disney': 'DIS', 'intel': 'INTC', + 'amd': 'AMD', 'palantir': 'PLTR', 'gamestop': 'GME', 'amc': 'AMC', + } + + text_lower = text.lower() + company_symbols = [sym for name, sym in company_map.items() if name in text_lower] + + return list(set(dollar_symbols + company_symbols)) + +def calculate_rumor_score(item): + """Score a rumor by potential impact.""" + score = 0 + text = (item.get('text', '') + item.get('title', '')).lower() + + # High impact keywords + if any(kw in text for kw in ['merger', 'acquisition', 'takeover', 'buyout']): + score += 5 + if any(kw in text for kw in ['insider', 'ceo buying', 'director buying']): + score += 4 + if any(kw in text for kw in ['upgrade', 'price target raised']): + score += 3 + if any(kw in text for kw in ['downgrade', 'sec investigation', 'fraud']): + score += 3 + if any(kw in text for kw in ['hearing', 'sources say', 'rumor']): + score += 2 + if any(kw in text for kw in ['breaking', 'just in', 'alert']): + score += 2 + + # Engagement boost + if item.get('engagement', 0) > 100: + score += 2 + if item.get('likes', 0) > 50: + score += 1 + + return score + +def main(): + print("=" * 60) + print("🔮 RUMOR & BUZZ SCANNER") + print(f"📅 {datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S')} UTC") + print("=" * 60) + print() + print("🔍 Scanning for early signals...") + print() + + all_rumors = [] + all_buzz = [] + + # Twitter Rumors + print(" 🐦 Twitter rumors...") + rumors = search_twitter_rumors() + print(f" ✅ {len(rumors)} potential rumors") + all_rumors.extend(rumors) + + # Twitter Buzz + print(" 🐦 Twitter buzz...") + buzz = search_twitter_buzz() + print(f" ✅ {len(buzz)} buzz items") + all_buzz.extend(buzz) + + # News Rumors + print(" 📰 News rumors...") + news = search_news_rumors() + print(f" ✅ {len(news)} news items") + all_rumors.extend(news) + + # Score and sort rumors + for item in all_rumors: + item['score'] = calculate_rumor_score(item) + item['symbols'] = extract_symbols_from_text(item.get('text', '') + item.get('title', '')) + + all_rumors.sort(key=lambda x: x['score'], reverse=True) + + # Count symbol mentions in buzz + symbol_counts = {} + for item in all_buzz: + for sym in item.get('symbols', []): + symbol_counts[sym] = symbol_counts.get(sym, 0) + 1 + + # Output + print() + print("=" * 60) + print("🔮 RESULTS") + print("=" * 60) + print() + + # Top Rumors + print("🚨 TOP RUMORS (by potential impact):") + print() + for item in all_rumors[:10]: + if item['score'] > 0: + source = item['source'] + symbols = ', '.join(item.get('symbols', [])) or 'N/A' + text = item.get('text', item.get('title', ''))[:80] + print(f" [{item['score']}] [{source}] {symbols}") + print(f" {text}...") + print() + + # Buzz Leaderboard + print("📊 BUZZ LEADERBOARD (most discussed):") + print() + sorted_symbols = sorted(symbol_counts.items(), key=lambda x: x[1], reverse=True) + for symbol, count in sorted_symbols[:15]: + bar = "█" * min(count, 20) + print(f" ${symbol:5} {bar} ({count})") + + print() + + # Recent Buzz Snippets + print("💬 WHAT PEOPLE ARE SAYING:") + print() + for item in all_buzz[:8]: + author = item.get('author', 'anon') + text = item.get('text', '')[:120] + engagement = item.get('engagement', 0) + print(f" @{author} ({engagement}♥): {text}...") + print() + + # Save results + output = { + 'timestamp': datetime.now(timezone.utc).isoformat(), + 'rumors': all_rumors[:20], + 'buzz': all_buzz[:30], + 'symbol_counts': symbol_counts, + } + + output_file = CACHE_DIR / 'rumor_scan_latest.json' + output_file.write_text(json.dumps(output, indent=2, default=str)) + print(f"💾 Saved: {output_file}") + +if __name__ == "__main__": + main() diff --git a/scripts/test_stock_analysis.py b/scripts/test_stock_analysis.py new file mode 100644 index 0000000..cfbd277 --- /dev/null +++ b/scripts/test_stock_analysis.py @@ -0,0 +1,381 @@ +#!/usr/bin/env python3 +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "pytest>=8.0.0", +# "yfinance>=0.2.40", +# "pandas>=2.0.0", +# ] +# /// +""" +Tests for Stock Analysis Skill v6.0 + +Run with: uv run pytest test_stock_analysis.py -v +""" + +import json +import pytest +from unittest.mock import Mock, patch, MagicMock +from datetime import datetime, timezone +import pandas as pd + +# Import modules to test +from analyze_stock import ( + detect_asset_type, + calculate_rsi, + fetch_stock_data, + analyze_earnings_surprise, + analyze_fundamentals, + analyze_momentum, + synthesize_signal, + EarningsSurprise, + Fundamentals, + MomentumAnalysis, + MarketContext, + StockData, +) +from dividends import analyze_dividends +from watchlist import ( + add_to_watchlist, + remove_from_watchlist, + list_watchlist, + WatchlistItem, +) +from portfolio import PortfolioStore + + +class TestAssetTypeDetection: + """Test asset type detection.""" + + def test_stock_detection(self): + assert detect_asset_type("AAPL") == "stock" + assert detect_asset_type("MSFT") == "stock" + assert detect_asset_type("googl") == "stock" + + def test_crypto_detection(self): + assert detect_asset_type("BTC-USD") == "crypto" + assert detect_asset_type("ETH-USD") == "crypto" + assert detect_asset_type("sol-usd") == "crypto" + + def test_edge_cases(self): + # Ticker ending in USD but not crypto format + assert detect_asset_type("MUSD") == "stock" + # Numbers in ticker + assert detect_asset_type("BRK.B") == "stock" + + +class TestRSICalculation: + """Test RSI calculation.""" + + def test_rsi_overbought(self): + """Test RSI > 70 (overbought).""" + # Create rising prices + prices = pd.Series([100 + i * 2 for i in range(20)]) + rsi = calculate_rsi(prices, period=14) + assert rsi is not None + assert rsi > 70 + + def test_rsi_oversold(self): + """Test RSI < 30 (oversold).""" + # Create falling prices + prices = pd.Series([100 - i * 2 for i in range(20)]) + rsi = calculate_rsi(prices, period=14) + assert rsi is not None + assert rsi < 30 + + def test_rsi_insufficient_data(self): + """Test RSI with insufficient data.""" + prices = pd.Series([100, 101, 102]) # Too few points + rsi = calculate_rsi(prices, period=14) + assert rsi is None + + +class TestEarningsSurprise: + """Test earnings surprise analysis.""" + + def test_earnings_beat(self): + """Test positive earnings surprise.""" + # Mock StockData with earnings beat + mock_earnings = pd.DataFrame({ + "Reported EPS": [1.50], + "EPS Estimate": [1.20], + }, index=[pd.Timestamp("2024-01-15")]) + + mock_data = Mock(spec=StockData) + mock_data.earnings_history = mock_earnings + + result = analyze_earnings_surprise(mock_data) + + assert result is not None + assert result.score > 0 + assert result.surprise_pct > 0 + assert "Beat" in result.explanation + + def test_earnings_miss(self): + """Test negative earnings surprise.""" + mock_earnings = pd.DataFrame({ + "Reported EPS": [0.80], + "EPS Estimate": [1.00], + }, index=[pd.Timestamp("2024-01-15")]) + + mock_data = Mock(spec=StockData) + mock_data.earnings_history = mock_earnings + + result = analyze_earnings_surprise(mock_data) + + assert result is not None + assert result.score < 0 + assert result.surprise_pct < 0 + assert "Missed" in result.explanation + + +class TestFundamentals: + """Test fundamentals analysis.""" + + def test_strong_fundamentals(self): + """Test stock with strong fundamentals.""" + mock_data = Mock(spec=StockData) + mock_data.info = { + "trailingPE": 15, + "operatingMargins": 0.25, + "revenueGrowth": 0.30, + "debtToEquity": 30, + } + + result = analyze_fundamentals(mock_data) + + assert result is not None + assert result.score > 0 + assert "pe_ratio" in result.key_metrics + + def test_weak_fundamentals(self): + """Test stock with weak fundamentals.""" + mock_data = Mock(spec=StockData) + mock_data.info = { + "trailingPE": 50, + "operatingMargins": 0.02, + "revenueGrowth": -0.10, + "debtToEquity": 300, + } + + result = analyze_fundamentals(mock_data) + + assert result is not None + assert result.score < 0 + + +class TestMomentum: + """Test momentum analysis.""" + + def test_overbought_momentum(self): + """Test overbought conditions.""" + # Create mock price history with rising prices near 52w high + dates = pd.date_range(end=datetime.now(), periods=100) + prices = pd.DataFrame({ + "Close": [100 + i * 0.5 for i in range(100)], + "Volume": [1000000] * 100, + }, index=dates) + + mock_data = Mock(spec=StockData) + mock_data.price_history = prices + mock_data.info = { + "fiftyTwoWeekHigh": 150, + "fiftyTwoWeekLow": 80, + "regularMarketPrice": 148, + } + + result = analyze_momentum(mock_data) + + assert result is not None + assert result.rsi_status == "overbought" + assert result.near_52w_high == True + assert result.score < 0 # Overbought = negative score + + +class TestSignalSynthesis: + """Test signal synthesis.""" + + def test_buy_signal(self): + """Test BUY recommendation synthesis.""" + earnings = EarningsSurprise(score=0.8, explanation="Beat by 20%", actual_eps=1.2, expected_eps=1.0, surprise_pct=20) + fundamentals = Fundamentals(score=0.6, key_metrics={"pe_ratio": 15}, explanation="Strong margins") + + signal = synthesize_signal( + ticker="TEST", + company_name="Test Corp", + earnings=earnings, + fundamentals=fundamentals, + analysts=None, + historical=None, + market_context=None, + sector=None, + earnings_timing=None, + momentum=None, + sentiment=None, + ) + + assert signal.recommendation == "BUY" + assert signal.confidence > 0.5 + + def test_sell_signal(self): + """Test SELL recommendation synthesis.""" + earnings = EarningsSurprise(score=-0.8, explanation="Missed by 20%", actual_eps=0.8, expected_eps=1.0, surprise_pct=-20) + fundamentals = Fundamentals(score=-0.6, key_metrics={"pe_ratio": 50}, explanation="Weak margins") + + signal = synthesize_signal( + ticker="TEST", + company_name="Test Corp", + earnings=earnings, + fundamentals=fundamentals, + analysts=None, + historical=None, + market_context=None, + sector=None, + earnings_timing=None, + momentum=None, + sentiment=None, + ) + + assert signal.recommendation == "SELL" + + def test_risk_off_penalty(self): + """Test risk-off mode reduces BUY confidence.""" + earnings = EarningsSurprise(score=0.8, explanation="Beat", actual_eps=1.2, expected_eps=1.0, surprise_pct=20) + fundamentals = Fundamentals(score=0.6, key_metrics={}, explanation="Strong") + market = MarketContext( + vix_level=25, + vix_status="elevated", + spy_trend_10d=2.0, + qqq_trend_10d=1.5, + market_regime="choppy", + score=-0.2, + explanation="Risk-off", + gld_change_5d=3.0, + tlt_change_5d=2.0, + uup_change_5d=1.5, + risk_off_detected=True, + ) + + signal = synthesize_signal( + ticker="TEST", + company_name="Test Corp", + earnings=earnings, + fundamentals=fundamentals, + analysts=None, + historical=None, + market_context=market, + sector=None, + earnings_timing=None, + momentum=None, + sentiment=None, + ) + + # Should still be BUY but with reduced confidence + assert signal.recommendation in ["BUY", "HOLD"] + assert any("RISK-OFF" in c for c in signal.caveats) + + +class TestWatchlist: + """Test watchlist functionality.""" + + @patch('watchlist.get_current_price') + @patch('watchlist.save_watchlist') + @patch('watchlist.load_watchlist') + def test_add_to_watchlist(self, mock_load, mock_save, mock_price): + """Test adding ticker to watchlist.""" + mock_load.return_value = [] + mock_price.return_value = 150.0 + mock_save.return_value = None + + result = add_to_watchlist("AAPL", target_price=200.0) + + assert result["success"] == True + assert result["action"] == "added" + assert result["ticker"] == "AAPL" + assert result["target_price"] == 200.0 + + @patch('watchlist.save_watchlist') + @patch('watchlist.load_watchlist') + def test_remove_from_watchlist(self, mock_load, mock_save): + """Test removing ticker from watchlist.""" + mock_load.return_value = [ + WatchlistItem(ticker="AAPL", added_at="2024-01-01T00:00:00+00:00") + ] + mock_save.return_value = None + + result = remove_from_watchlist("AAPL") + + assert result["success"] == True + assert result["removed"] == "AAPL" + + +class TestDividendAnalysis: + """Test dividend analysis.""" + + @patch('yfinance.Ticker') + def test_dividend_stock(self, mock_ticker): + """Test analysis of dividend-paying stock.""" + mock_stock = Mock() + mock_stock.info = { + "longName": "Johnson & Johnson", + "regularMarketPrice": 160.0, + "dividendYield": 0.03, + "dividendRate": 4.80, + "trailingEps": 6.00, + } + mock_stock.dividends = pd.Series( + [1.2, 1.2, 1.2, 1.2] * 5, # 5 years of quarterly dividends + index=pd.date_range(start="2019-01-01", periods=20, freq="Q") + ) + mock_ticker.return_value = mock_stock + + result = analyze_dividends("JNJ") + + assert result is not None + assert result.dividend_yield == 3.0 + assert result.payout_ratio == 80.0 + assert result.income_rating != "no_dividend" + + @patch('yfinance.Ticker') + def test_no_dividend_stock(self, mock_ticker): + """Test analysis of non-dividend stock.""" + mock_stock = Mock() + mock_stock.info = { + "longName": "Amazon", + "regularMarketPrice": 180.0, + "dividendYield": None, + "dividendRate": None, + } + mock_ticker.return_value = mock_stock + + result = analyze_dividends("AMZN") + + assert result is not None + assert result.income_rating == "no_dividend" + + +class TestIntegration: + """Integration tests (require network).""" + + @pytest.mark.integration + def test_real_stock_analysis(self): + """Test real stock analysis (AAPL).""" + data = fetch_stock_data("AAPL", verbose=False) + + assert data is not None + assert data.ticker == "AAPL" + assert data.info is not None + assert "regularMarketPrice" in data.info + + @pytest.mark.integration + def test_real_crypto_analysis(self): + """Test real crypto analysis (BTC-USD).""" + data = fetch_stock_data("BTC-USD", verbose=False) + + assert data is not None + assert data.asset_type == "crypto" + + +# Run tests +if __name__ == "__main__": + pytest.main([__file__, "-v", "--ignore-glob=*integration*"]) diff --git a/scripts/watchlist.py b/scripts/watchlist.py new file mode 100644 index 0000000..87af473 --- /dev/null +++ b/scripts/watchlist.py @@ -0,0 +1,336 @@ +#!/usr/bin/env python3 +# /// script +# requires-python = ">=3.10" +# dependencies = [ +# "yfinance>=0.2.40", +# ] +# /// +""" +Stock Watchlist with Price Alerts. + +Usage: + uv run watchlist.py add AAPL # Add to watchlist + uv run watchlist.py add AAPL --target 200 # With price target + uv run watchlist.py add AAPL --stop 150 # With stop loss + uv run watchlist.py add AAPL --alert-on signal # Alert on signal change + uv run watchlist.py remove AAPL # Remove from watchlist + uv run watchlist.py list # Show watchlist + uv run watchlist.py check # Check for triggered alerts + uv run watchlist.py check --notify # Check and format for notification +""" + +import argparse +import json +import sys +from dataclasses import dataclass, asdict +from datetime import datetime, timezone +from pathlib import Path +from typing import Literal + +import yfinance as yf + +# Storage +WATCHLIST_DIR = Path.home() / ".clawdbot" / "skills" / "stock-analysis" +WATCHLIST_FILE = WATCHLIST_DIR / "watchlist.json" + + +@dataclass +class WatchlistItem: + ticker: str + added_at: str + price_at_add: float | None = None + target_price: float | None = None # Alert when price >= target + stop_price: float | None = None # Alert when price <= stop + alert_on_signal: bool = False # Alert when recommendation changes + last_signal: str | None = None # BUY/HOLD/SELL + last_check: str | None = None + notes: str | None = None + + +@dataclass +class Alert: + ticker: str + alert_type: Literal["target_hit", "stop_hit", "signal_change"] + message: str + current_price: float + trigger_value: float | str + timestamp: str + + +def ensure_dirs(): + """Create storage directories.""" + WATCHLIST_DIR.mkdir(parents=True, exist_ok=True) + + +def load_watchlist() -> list[WatchlistItem]: + """Load watchlist from file.""" + if WATCHLIST_FILE.exists(): + data = json.loads(WATCHLIST_FILE.read_text()) + return [WatchlistItem(**item) for item in data] + return [] + + +def save_watchlist(items: list[WatchlistItem]): + """Save watchlist to file.""" + ensure_dirs() + data = [asdict(item) for item in items] + WATCHLIST_FILE.write_text(json.dumps(data, indent=2)) + + +def get_current_price(ticker: str) -> float | None: + """Get current price for a ticker.""" + try: + stock = yf.Ticker(ticker) + price = stock.info.get("regularMarketPrice") or stock.info.get("currentPrice") + return float(price) if price else None + except Exception: + return None + + +def add_to_watchlist( + ticker: str, + target_price: float | None = None, + stop_price: float | None = None, + alert_on_signal: bool = False, + notes: str | None = None, +) -> dict: + """Add ticker to watchlist.""" + ticker = ticker.upper() + + # Validate ticker + current_price = get_current_price(ticker) + if current_price is None: + return {"success": False, "error": f"Invalid ticker: {ticker}"} + + # Load existing watchlist + watchlist = load_watchlist() + + # Check if already exists + for item in watchlist: + if item.ticker == ticker: + # Update existing + item.target_price = target_price or item.target_price + item.stop_price = stop_price or item.stop_price + item.alert_on_signal = alert_on_signal or item.alert_on_signal + item.notes = notes or item.notes + save_watchlist(watchlist) + return { + "success": True, + "action": "updated", + "ticker": ticker, + "current_price": current_price, + "target_price": item.target_price, + "stop_price": item.stop_price, + "alert_on_signal": item.alert_on_signal, + } + + # Add new + item = WatchlistItem( + ticker=ticker, + added_at=datetime.now(timezone.utc).isoformat(), + price_at_add=current_price, + target_price=target_price, + stop_price=stop_price, + alert_on_signal=alert_on_signal, + notes=notes, + ) + watchlist.append(item) + save_watchlist(watchlist) + + return { + "success": True, + "action": "added", + "ticker": ticker, + "current_price": current_price, + "target_price": target_price, + "stop_price": stop_price, + "alert_on_signal": alert_on_signal, + } + + +def remove_from_watchlist(ticker: str) -> dict: + """Remove ticker from watchlist.""" + ticker = ticker.upper() + watchlist = load_watchlist() + + original_len = len(watchlist) + watchlist = [item for item in watchlist if item.ticker != ticker] + + if len(watchlist) == original_len: + return {"success": False, "error": f"{ticker} not in watchlist"} + + save_watchlist(watchlist) + return {"success": True, "removed": ticker} + + +def list_watchlist() -> dict: + """List all watchlist items with current prices.""" + watchlist = load_watchlist() + + if not watchlist: + return {"success": True, "items": [], "count": 0} + + items = [] + for item in watchlist: + current_price = get_current_price(item.ticker) + + # Calculate change since added + change_pct = None + if current_price and item.price_at_add: + change_pct = ((current_price - item.price_at_add) / item.price_at_add) * 100 + + # Distance to target/stop + to_target = None + to_stop = None + if current_price: + if item.target_price: + to_target = ((item.target_price - current_price) / current_price) * 100 + if item.stop_price: + to_stop = ((item.stop_price - current_price) / current_price) * 100 + + items.append({ + "ticker": item.ticker, + "current_price": current_price, + "price_at_add": item.price_at_add, + "change_pct": round(change_pct, 2) if change_pct else None, + "target_price": item.target_price, + "to_target_pct": round(to_target, 2) if to_target else None, + "stop_price": item.stop_price, + "to_stop_pct": round(to_stop, 2) if to_stop else None, + "alert_on_signal": item.alert_on_signal, + "last_signal": item.last_signal, + "added_at": item.added_at[:10], + "notes": item.notes, + }) + + return {"success": True, "items": items, "count": len(items)} + + +def check_alerts(notify_format: bool = False) -> dict: + """Check watchlist for triggered alerts.""" + watchlist = load_watchlist() + alerts: list[Alert] = [] + now = datetime.now(timezone.utc).isoformat() + + for item in watchlist: + current_price = get_current_price(item.ticker) + if current_price is None: + continue + + # Check target price + if item.target_price and current_price >= item.target_price: + alerts.append(Alert( + ticker=item.ticker, + alert_type="target_hit", + message=f"🎯 {item.ticker} hit target! ${current_price:.2f} >= ${item.target_price:.2f}", + current_price=current_price, + trigger_value=item.target_price, + timestamp=now, + )) + + # Check stop price + if item.stop_price and current_price <= item.stop_price: + alerts.append(Alert( + ticker=item.ticker, + alert_type="stop_hit", + message=f"🛑 {item.ticker} hit stop! ${current_price:.2f} <= ${item.stop_price:.2f}", + current_price=current_price, + trigger_value=item.stop_price, + timestamp=now, + )) + + # Check signal change (requires running analyze_stock) + if item.alert_on_signal: + try: + import subprocess + result = subprocess.run( + ["uv", "run", str(Path(__file__).parent / "analyze_stock.py"), item.ticker, "--output", "json"], + capture_output=True, + text=True, + timeout=60, + ) + if result.returncode == 0: + analysis = json.loads(result.stdout) + new_signal = analysis.get("recommendation") + + if item.last_signal and new_signal and new_signal != item.last_signal: + alerts.append(Alert( + ticker=item.ticker, + alert_type="signal_change", + message=f"📊 {item.ticker} signal changed: {item.last_signal} → {new_signal}", + current_price=current_price, + trigger_value=f"{item.last_signal} → {new_signal}", + timestamp=now, + )) + + # Update last signal + item.last_signal = new_signal + except Exception: + pass + + item.last_check = now + + # Save updated watchlist (with last_signal updates) + save_watchlist(watchlist) + + # Format output + if notify_format and alerts: + # Format for Telegram notification + lines = ["📢 **Stock Alerts**\n"] + for alert in alerts: + lines.append(alert.message) + return {"success": True, "alerts": [asdict(a) for a in alerts], "notification": "\n".join(lines)} + + return {"success": True, "alerts": [asdict(a) for a in alerts], "count": len(alerts)} + + +def main(): + parser = argparse.ArgumentParser(description="Stock Watchlist with Alerts") + subparsers = parser.add_subparsers(dest="command", required=True) + + # Add + add_parser = subparsers.add_parser("add", help="Add ticker to watchlist") + add_parser.add_argument("ticker", help="Stock ticker") + add_parser.add_argument("--target", type=float, help="Target price for alert") + add_parser.add_argument("--stop", type=float, help="Stop loss price for alert") + add_parser.add_argument("--alert-on", choices=["signal"], help="Alert on signal change") + add_parser.add_argument("--notes", help="Notes") + + # Remove + remove_parser = subparsers.add_parser("remove", help="Remove ticker from watchlist") + remove_parser.add_argument("ticker", help="Stock ticker") + + # List + subparsers.add_parser("list", help="List watchlist") + + # Check + check_parser = subparsers.add_parser("check", help="Check for triggered alerts") + check_parser.add_argument("--notify", action="store_true", help="Format for notification") + + args = parser.parse_args() + + if args.command == "add": + result = add_to_watchlist( + args.ticker, + target_price=args.target, + stop_price=args.stop, + alert_on_signal=(args.alert_on == "signal"), + notes=args.notes, + ) + print(json.dumps(result, indent=2)) + + elif args.command == "remove": + result = remove_from_watchlist(args.ticker) + print(json.dumps(result, indent=2)) + + elif args.command == "list": + result = list_watchlist() + print(json.dumps(result, indent=2)) + + elif args.command == "check": + result = check_alerts(notify_format=args.notify) + print(json.dumps(result, indent=2)) + + +if __name__ == "__main__": + main()