Initial commit with translated description
This commit is contained in:
34
tests/README.md
Normal file
34
tests/README.md
Normal file
@@ -0,0 +1,34 @@
|
||||
# Unit Tests
|
||||
|
||||
## Setup
|
||||
|
||||
```bash
|
||||
# Install test dependencies
|
||||
pip install -r requirements-test.txt
|
||||
|
||||
# Run tests
|
||||
pytest
|
||||
|
||||
# Run with coverage
|
||||
pytest --cov=scripts --cov-report=html
|
||||
|
||||
# Run specific test file
|
||||
pytest tests/test_portfolio.py
|
||||
```
|
||||
|
||||
## Test Structure
|
||||
|
||||
- `test_portfolio.py` - Portfolio CRUD operations
|
||||
- `test_fetch_news.py` - RSS feed parsing with mocked responses
|
||||
- `test_setup.py` - Setup wizard validation
|
||||
- `fixtures/` - Sample RSS and portfolio data
|
||||
|
||||
## Coverage Target
|
||||
|
||||
60%+ coverage for core functions (portfolio, fetch_news, setup).
|
||||
|
||||
## Notes
|
||||
|
||||
- Tests use `tmp_path` for file isolation
|
||||
- Network calls are mocked with `unittest.mock`
|
||||
- `pytest-mock` provides `mocker` fixture for advanced mocking
|
||||
4
tests/fixtures/sample_portfolio.csv
vendored
Normal file
4
tests/fixtures/sample_portfolio.csv
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
symbol,name,category,notes
|
||||
AAPL,Apple Inc,Tech,Core holding
|
||||
TSLA,Tesla Inc,Auto,Growth play
|
||||
MSFT,Microsoft,Tech,Dividend stock
|
||||
|
20
tests/fixtures/sample_rss.xml
vendored
Normal file
20
tests/fixtures/sample_rss.xml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<rss version="2.0">
|
||||
<channel>
|
||||
<title>Test Market News</title>
|
||||
<link>https://example.com</link>
|
||||
<description>Sample RSS feed for testing</description>
|
||||
<item>
|
||||
<title>Apple Stock Rises 5%</title>
|
||||
<link>https://example.com/apple-rises</link>
|
||||
<description>Apple Inc. shares rose 5% today on strong earnings.</description>
|
||||
<pubDate>Mon, 20 Jan 2025 10:00:00 GMT</pubDate>
|
||||
</item>
|
||||
<item>
|
||||
<title>Tesla Announces New Model</title>
|
||||
<link>https://example.com/tesla-model</link>
|
||||
<description>Tesla unveils new electric vehicle model.</description>
|
||||
<pubDate>Mon, 20 Jan 2025 11:30:00 GMT</pubDate>
|
||||
</item>
|
||||
</channel>
|
||||
</rss>
|
||||
110
tests/test_alerts.py
Normal file
110
tests/test_alerts.py
Normal file
@@ -0,0 +1,110 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import json
|
||||
import pytest
|
||||
from unittest.mock import Mock, patch
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Add scripts to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
from alerts import check_alerts, load_alerts, save_alerts
|
||||
|
||||
@pytest.fixture
|
||||
def mock_alerts_data():
|
||||
return {
|
||||
"_meta": {"version": 1, "supported_currencies": ["USD", "EUR"]},
|
||||
"alerts": [
|
||||
{
|
||||
"ticker": "AAPL",
|
||||
"target_price": 150.0,
|
||||
"currency": "USD",
|
||||
"note": "Buy Apple",
|
||||
"triggered_count": 0,
|
||||
"last_triggered": None
|
||||
},
|
||||
{
|
||||
"ticker": "TSLA",
|
||||
"target_price": 200.0,
|
||||
"currency": "USD",
|
||||
"note": "Buy Tesla",
|
||||
"triggered_count": 5,
|
||||
"last_triggered": "2026-01-26T10:00:00"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
def test_check_alerts_trigger(mock_alerts_data, monkeypatch, tmp_path):
|
||||
# Setup mock alerts file
|
||||
alerts_file = tmp_path / "alerts.json"
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
alerts_file.write_text(json.dumps(mock_alerts_data))
|
||||
|
||||
# Mock market data: AAPL is under target, TSLA is over
|
||||
mock_quotes = {
|
||||
"AAPL": {"price": 145.0},
|
||||
"TSLA": {"price": 210.0}
|
||||
}
|
||||
|
||||
with patch("alerts.get_fetch_market_data") as mock_fmd_getter:
|
||||
mock_fmd = Mock(return_value=mock_quotes)
|
||||
mock_fmd_getter.return_value = mock_fmd
|
||||
|
||||
results = check_alerts()
|
||||
|
||||
assert len(results["triggered"]) == 1
|
||||
assert results["triggered"][0]["ticker"] == "AAPL"
|
||||
assert results["triggered"][0]["current_price"] == 145.0
|
||||
|
||||
assert len(results["watching"]) == 1
|
||||
assert results["watching"][0]["ticker"] == "TSLA"
|
||||
|
||||
# Verify triggered count incremented for AAPL
|
||||
updated_data = json.loads(alerts_file.read_text())
|
||||
aapl_alert = next(a for a in updated_data["alerts"] if a["ticker"] == "AAPL")
|
||||
assert aapl_alert["triggered_count"] == 1
|
||||
assert aapl_alert["last_triggered"] is not None
|
||||
|
||||
def test_check_alerts_deduplication(mock_alerts_data, monkeypatch, tmp_path):
|
||||
# If already triggered today, triggered_count should NOT increment
|
||||
now = datetime.now()
|
||||
mock_alerts_data["alerts"][0]["last_triggered"] = now.isoformat()
|
||||
mock_alerts_data["alerts"][0]["triggered_count"] = 1
|
||||
|
||||
alerts_file = tmp_path / "alerts.json"
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
alerts_file.write_text(json.dumps(mock_alerts_data))
|
||||
|
||||
mock_quotes = {"AAPL": {"price": 140.0}, "TSLA": {"price": 250.0}}
|
||||
|
||||
with patch("alerts.get_fetch_market_data") as mock_fmd_getter:
|
||||
mock_fmd = Mock(return_value=mock_quotes)
|
||||
mock_fmd_getter.return_value = mock_fmd
|
||||
|
||||
check_alerts()
|
||||
|
||||
updated_data = json.loads(alerts_file.read_text())
|
||||
aapl_alert = next(a for a in updated_data["alerts"] if a["ticker"] == "AAPL")
|
||||
assert aapl_alert["triggered_count"] == 1 # Still 1, didn't increment because same day
|
||||
|
||||
def test_check_alerts_snooze(mock_alerts_data, monkeypatch, tmp_path):
|
||||
# Snoozed alert should be ignored
|
||||
future_date = datetime.now() + timedelta(days=1)
|
||||
mock_alerts_data["alerts"][0]["snooze_until"] = future_date.isoformat()
|
||||
|
||||
alerts_file = tmp_path / "alerts.json"
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
alerts_file.write_text(json.dumps(mock_alerts_data))
|
||||
|
||||
mock_quotes = {"AAPL": {"price": 140.0}, "TSLA": {"price": 190.0}}
|
||||
|
||||
with patch("alerts.get_fetch_market_data") as mock_fmd_getter:
|
||||
mock_fmd = Mock(return_value=mock_quotes)
|
||||
mock_fmd_getter.return_value = mock_fmd
|
||||
|
||||
results = check_alerts()
|
||||
|
||||
# AAPL is snoozed, so only TSLA should be in triggered
|
||||
assert len(results["triggered"]) == 1
|
||||
assert results["triggered"][0]["ticker"] == "TSLA"
|
||||
assert all(t["ticker"] != "AAPL" for t in results["triggered"])
|
||||
390
tests/test_alerts_extended.py
Normal file
390
tests/test_alerts_extended.py
Normal file
@@ -0,0 +1,390 @@
|
||||
"""Extended tests for alerts.py - price target alerts."""
|
||||
|
||||
import json
|
||||
import sys
|
||||
from argparse import Namespace
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock, patch
|
||||
from io import StringIO
|
||||
|
||||
import pytest
|
||||
|
||||
# Add scripts to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
from alerts import (
|
||||
load_alerts,
|
||||
save_alerts,
|
||||
get_alert_by_ticker,
|
||||
format_price,
|
||||
cmd_list,
|
||||
cmd_set,
|
||||
cmd_delete,
|
||||
cmd_snooze,
|
||||
cmd_update,
|
||||
SUPPORTED_CURRENCIES,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_alerts_data():
|
||||
"""Sample alerts data for testing."""
|
||||
return {
|
||||
"_meta": {"version": 1, "supported_currencies": SUPPORTED_CURRENCIES},
|
||||
"alerts": [
|
||||
{
|
||||
"ticker": "AAPL",
|
||||
"target_price": 150.0,
|
||||
"currency": "USD",
|
||||
"note": "Buy Apple",
|
||||
"set_by": "art",
|
||||
"set_date": "2026-01-15",
|
||||
"status": "active",
|
||||
"snooze_until": None,
|
||||
"triggered_count": 0,
|
||||
"last_triggered": None,
|
||||
},
|
||||
{
|
||||
"ticker": "TSLA",
|
||||
"target_price": 200.0,
|
||||
"currency": "USD",
|
||||
"note": "Buy Tesla",
|
||||
"set_by": "",
|
||||
"set_date": "2026-01-20",
|
||||
"status": "active",
|
||||
"snooze_until": None,
|
||||
"triggered_count": 5,
|
||||
"last_triggered": "2026-01-26T10:00:00",
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def alerts_file(tmp_path, sample_alerts_data):
|
||||
"""Create a temporary alerts file."""
|
||||
alerts_path = tmp_path / "alerts.json"
|
||||
alerts_path.write_text(json.dumps(sample_alerts_data))
|
||||
return alerts_path
|
||||
|
||||
|
||||
class TestLoadAlerts:
|
||||
"""Tests for load_alerts()."""
|
||||
|
||||
def test_load_existing_file(self, alerts_file, monkeypatch):
|
||||
"""Load alerts from existing file."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
data = load_alerts()
|
||||
|
||||
assert "_meta" in data
|
||||
assert len(data["alerts"]) == 2
|
||||
assert data["alerts"][0]["ticker"] == "AAPL"
|
||||
|
||||
def test_load_missing_file(self, tmp_path, monkeypatch):
|
||||
"""Return default structure when file doesn't exist."""
|
||||
missing_path = tmp_path / "missing.json"
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", missing_path)
|
||||
|
||||
data = load_alerts()
|
||||
|
||||
assert data["_meta"]["version"] == 1
|
||||
assert data["alerts"] == []
|
||||
assert "supported_currencies" in data["_meta"]
|
||||
|
||||
|
||||
class TestSaveAlerts:
|
||||
"""Tests for save_alerts()."""
|
||||
|
||||
def test_save_updates_timestamp(self, tmp_path, sample_alerts_data, monkeypatch):
|
||||
"""Save should update the updated_at field."""
|
||||
alerts_path = tmp_path / "alerts.json"
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_path)
|
||||
|
||||
save_alerts(sample_alerts_data)
|
||||
|
||||
saved = json.loads(alerts_path.read_text())
|
||||
assert "updated_at" in saved["_meta"]
|
||||
|
||||
def test_save_preserves_data(self, tmp_path, sample_alerts_data, monkeypatch):
|
||||
"""Save should preserve all alert data."""
|
||||
alerts_path = tmp_path / "alerts.json"
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_path)
|
||||
|
||||
save_alerts(sample_alerts_data)
|
||||
|
||||
saved = json.loads(alerts_path.read_text())
|
||||
assert len(saved["alerts"]) == 2
|
||||
assert saved["alerts"][0]["ticker"] == "AAPL"
|
||||
|
||||
|
||||
class TestGetAlertByTicker:
|
||||
"""Tests for get_alert_by_ticker()."""
|
||||
|
||||
def test_find_existing_alert(self, sample_alerts_data):
|
||||
"""Find alert by ticker."""
|
||||
alerts = sample_alerts_data["alerts"]
|
||||
result = get_alert_by_ticker(alerts, "AAPL")
|
||||
|
||||
assert result is not None
|
||||
assert result["ticker"] == "AAPL"
|
||||
assert result["target_price"] == 150.0
|
||||
|
||||
def test_find_case_insensitive(self, sample_alerts_data):
|
||||
"""Find alert regardless of case."""
|
||||
alerts = sample_alerts_data["alerts"]
|
||||
result = get_alert_by_ticker(alerts, "aapl")
|
||||
|
||||
assert result is not None
|
||||
assert result["ticker"] == "AAPL"
|
||||
|
||||
def test_not_found_returns_none(self, sample_alerts_data):
|
||||
"""Return None for non-existent ticker."""
|
||||
alerts = sample_alerts_data["alerts"]
|
||||
result = get_alert_by_ticker(alerts, "GOOG")
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestFormatPrice:
|
||||
"""Tests for format_price()."""
|
||||
|
||||
def test_format_usd(self):
|
||||
"""Format USD price."""
|
||||
assert format_price(150.50, "USD") == "$150.50"
|
||||
assert format_price(1234.56, "USD") == "$1,234.56"
|
||||
|
||||
def test_format_eur(self):
|
||||
"""Format EUR price."""
|
||||
assert format_price(100.00, "EUR") == "€100.00"
|
||||
|
||||
def test_format_jpy(self):
|
||||
"""Format JPY without decimals."""
|
||||
assert format_price(15000, "JPY") == "¥15,000"
|
||||
|
||||
def test_format_sgd(self):
|
||||
"""Format SGD price."""
|
||||
assert format_price(50.00, "SGD") == "S$50.00"
|
||||
|
||||
def test_format_mxn(self):
|
||||
"""Format MXN price."""
|
||||
assert format_price(100.00, "MXN") == "MX$100.00"
|
||||
|
||||
def test_format_unknown_currency(self):
|
||||
"""Format unknown currency with code prefix."""
|
||||
result = format_price(100.00, "GBP")
|
||||
assert "GBP" in result
|
||||
assert "100.00" in result
|
||||
|
||||
|
||||
class TestCmdList:
|
||||
"""Tests for cmd_list()."""
|
||||
|
||||
def test_list_empty_alerts(self, tmp_path, monkeypatch, capsys):
|
||||
"""List with no alerts."""
|
||||
alerts_path = tmp_path / "alerts.json"
|
||||
alerts_path.write_text(json.dumps({"_meta": {}, "alerts": []}))
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_path)
|
||||
|
||||
cmd_list(Namespace())
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "No price alerts set" in captured.out
|
||||
|
||||
def test_list_active_alerts(self, alerts_file, monkeypatch, capsys):
|
||||
"""List active alerts."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
cmd_list(Namespace())
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Price Alerts" in captured.out
|
||||
assert "AAPL" in captured.out
|
||||
assert "$150.00" in captured.out
|
||||
|
||||
def test_list_snoozed_alerts(self, tmp_path, monkeypatch, capsys):
|
||||
"""List snoozed alerts separately."""
|
||||
future = (datetime.now() + timedelta(days=7)).isoformat()
|
||||
data = {
|
||||
"_meta": {},
|
||||
"alerts": [
|
||||
{"ticker": "AAPL", "target_price": 150, "currency": "USD", "snooze_until": future}
|
||||
],
|
||||
}
|
||||
alerts_path = tmp_path / "alerts.json"
|
||||
alerts_path.write_text(json.dumps(data))
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_path)
|
||||
|
||||
cmd_list(Namespace())
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Snoozed" in captured.out
|
||||
assert "AAPL" in captured.out
|
||||
|
||||
|
||||
class TestCmdSet:
|
||||
"""Tests for cmd_set()."""
|
||||
|
||||
def test_set_new_alert(self, alerts_file, monkeypatch, capsys):
|
||||
"""Set a new alert."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
with patch("alerts.get_fetch_market_data") as mock_fmd:
|
||||
mock_fmd.return_value = Mock(return_value={"GOOG": {"price": 175.0}})
|
||||
|
||||
args = Namespace(ticker="GOOG", target=150.0, currency="USD", note="Buy Google", user="art")
|
||||
cmd_set(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Alert set: GOOG" in captured.out
|
||||
|
||||
data = json.loads(alerts_file.read_text())
|
||||
goog = next((a for a in data["alerts"] if a["ticker"] == "GOOG"), None)
|
||||
assert goog is not None
|
||||
assert goog["target_price"] == 150.0
|
||||
|
||||
def test_set_duplicate_alert(self, alerts_file, monkeypatch, capsys):
|
||||
"""Cannot set duplicate alert."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
args = Namespace(ticker="AAPL", target=140.0, currency="USD", note="", user="")
|
||||
cmd_set(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "already exists" in captured.out
|
||||
|
||||
def test_set_invalid_target(self, alerts_file, monkeypatch, capsys):
|
||||
"""Reject invalid target price."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
args = Namespace(ticker="GOOG", target=-10.0, currency="USD", note="", user="")
|
||||
cmd_set(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "must be greater than 0" in captured.out
|
||||
|
||||
def test_set_invalid_currency(self, alerts_file, monkeypatch, capsys):
|
||||
"""Reject invalid currency."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
args = Namespace(ticker="GOOG", target=150.0, currency="XYZ", note="", user="")
|
||||
cmd_set(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "not supported" in captured.out
|
||||
|
||||
|
||||
class TestCmdDelete:
|
||||
"""Tests for cmd_delete()."""
|
||||
|
||||
def test_delete_existing_alert(self, alerts_file, monkeypatch, capsys):
|
||||
"""Delete an existing alert."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
args = Namespace(ticker="AAPL")
|
||||
cmd_delete(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Alert deleted: AAPL" in captured.out
|
||||
|
||||
data = json.loads(alerts_file.read_text())
|
||||
assert not any(a["ticker"] == "AAPL" for a in data["alerts"])
|
||||
|
||||
def test_delete_nonexistent_alert(self, alerts_file, monkeypatch, capsys):
|
||||
"""Cannot delete non-existent alert."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
args = Namespace(ticker="GOOG")
|
||||
cmd_delete(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "No alert found" in captured.out
|
||||
|
||||
|
||||
class TestCmdSnooze:
|
||||
"""Tests for cmd_snooze()."""
|
||||
|
||||
def test_snooze_alert(self, alerts_file, monkeypatch, capsys):
|
||||
"""Snooze an alert."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
args = Namespace(ticker="AAPL", days=7)
|
||||
cmd_snooze(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Alert snoozed: AAPL" in captured.out
|
||||
|
||||
data = json.loads(alerts_file.read_text())
|
||||
aapl = next(a for a in data["alerts"] if a["ticker"] == "AAPL")
|
||||
assert aapl["snooze_until"] is not None
|
||||
|
||||
def test_snooze_nonexistent_alert(self, alerts_file, monkeypatch, capsys):
|
||||
"""Cannot snooze non-existent alert."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
args = Namespace(ticker="GOOG", days=7)
|
||||
cmd_snooze(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "No alert found" in captured.out
|
||||
|
||||
def test_snooze_default_days(self, alerts_file, monkeypatch, capsys):
|
||||
"""Default snooze is 7 days."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
args = Namespace(ticker="AAPL", days=None)
|
||||
cmd_snooze(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Alert snoozed" in captured.out
|
||||
|
||||
|
||||
class TestCmdUpdate:
|
||||
"""Tests for cmd_update()."""
|
||||
|
||||
def test_update_target_price(self, alerts_file, monkeypatch, capsys):
|
||||
"""Update alert target price."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
args = Namespace(ticker="AAPL", target=140.0, note=None)
|
||||
cmd_update(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "Alert updated: AAPL" in captured.out
|
||||
assert "$150.00" in captured.out # Old price
|
||||
assert "$140.00" in captured.out # New price
|
||||
|
||||
data = json.loads(alerts_file.read_text())
|
||||
aapl = next(a for a in data["alerts"] if a["ticker"] == "AAPL")
|
||||
assert aapl["target_price"] == 140.0
|
||||
|
||||
def test_update_with_note(self, alerts_file, monkeypatch, capsys):
|
||||
"""Update alert with new note."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
args = Namespace(ticker="AAPL", target=145.0, note="New buy zone")
|
||||
cmd_update(args)
|
||||
|
||||
data = json.loads(alerts_file.read_text())
|
||||
aapl = next(a for a in data["alerts"] if a["ticker"] == "AAPL")
|
||||
assert aapl["note"] == "New buy zone"
|
||||
|
||||
def test_update_nonexistent_alert(self, alerts_file, monkeypatch, capsys):
|
||||
"""Cannot update non-existent alert."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
args = Namespace(ticker="GOOG", target=150.0, note=None)
|
||||
cmd_update(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "No alert found" in captured.out
|
||||
|
||||
def test_update_invalid_target(self, alerts_file, monkeypatch, capsys):
|
||||
"""Reject invalid target price on update."""
|
||||
monkeypatch.setattr("alerts.ALERTS_FILE", alerts_file)
|
||||
|
||||
args = Namespace(ticker="AAPL", target=-10.0, note=None)
|
||||
cmd_update(args)
|
||||
|
||||
captured = capsys.readouterr()
|
||||
assert "must be greater than 0" in captured.out
|
||||
101
tests/test_briefing.py
Normal file
101
tests/test_briefing.py
Normal file
@@ -0,0 +1,101 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import json
|
||||
import pytest
|
||||
from unittest.mock import Mock, patch
|
||||
import subprocess
|
||||
|
||||
# Add scripts to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
from briefing import generate_and_send
|
||||
|
||||
def test_generate_and_send_success():
|
||||
# Mock subprocess.run for summarize.py
|
||||
mock_briefing_data = {
|
||||
"macro_message": "Macro Summary",
|
||||
"portfolio_message": "Portfolio Summary",
|
||||
"summary": "Full Summary"
|
||||
}
|
||||
|
||||
with patch("briefing.subprocess.run") as mock_run:
|
||||
mock_result = Mock()
|
||||
mock_result.returncode = 0
|
||||
mock_result.stdout = json.dumps(mock_briefing_data)
|
||||
mock_run.return_value = mock_result
|
||||
|
||||
args = Mock()
|
||||
args.time = "morning"
|
||||
args.style = "briefing"
|
||||
args.lang = "en"
|
||||
args.deadline = 300
|
||||
args.fast = False
|
||||
args.llm = False
|
||||
args.debug = False
|
||||
args.json = True
|
||||
args.send = False
|
||||
|
||||
result = generate_and_send(args)
|
||||
|
||||
assert result == "Macro Summary"
|
||||
assert mock_run.called
|
||||
# Check if summarize.py was called with correct args
|
||||
call_args = mock_run.call_args[0][0]
|
||||
assert "summarize.py" in str(call_args[1])
|
||||
assert "--time" in call_args
|
||||
assert "morning" in call_args
|
||||
|
||||
def test_generate_and_send_with_whatsapp():
|
||||
mock_briefing_data = {
|
||||
"macro_message": "Macro Summary",
|
||||
"portfolio_message": "Portfolio Summary"
|
||||
}
|
||||
|
||||
with patch("briefing.subprocess.run") as mock_run, \
|
||||
patch("briefing.send_to_whatsapp") as mock_send:
|
||||
|
||||
# First call is summarize.py
|
||||
mock_result = Mock()
|
||||
mock_result.returncode = 0
|
||||
mock_result.stdout = json.dumps(mock_briefing_data)
|
||||
mock_run.return_value = mock_result
|
||||
|
||||
args = Mock()
|
||||
args.time = "evening"
|
||||
args.style = "briefing"
|
||||
args.lang = "en"
|
||||
args.deadline = None
|
||||
args.fast = True
|
||||
args.llm = False
|
||||
args.json = False
|
||||
args.send = True
|
||||
args.group = "Test Group"
|
||||
args.debug = False
|
||||
|
||||
generate_and_send(args)
|
||||
|
||||
# Check if send_to_whatsapp was called for both messages
|
||||
assert mock_send.call_count == 2
|
||||
mock_send.assert_any_call("Macro Summary", "Test Group")
|
||||
mock_send.assert_any_call("Portfolio Summary", "Test Group")
|
||||
|
||||
def test_generate_and_send_failure():
|
||||
with patch("briefing.subprocess.run") as mock_run:
|
||||
mock_result = Mock()
|
||||
mock_result.returncode = 1
|
||||
mock_result.stderr = "Error occurred"
|
||||
mock_run.return_value = mock_result
|
||||
|
||||
args = Mock()
|
||||
args.time = "morning"
|
||||
args.style = "briefing"
|
||||
args.lang = "en"
|
||||
args.deadline = None
|
||||
args.fast = False
|
||||
args.llm = False
|
||||
args.json = False
|
||||
args.send = False
|
||||
args.debug = False
|
||||
|
||||
with pytest.raises(SystemExit):
|
||||
generate_and_send(args)
|
||||
111
tests/test_earnings.py
Normal file
111
tests/test_earnings.py
Normal file
@@ -0,0 +1,111 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import json
|
||||
import pytest
|
||||
from unittest.mock import Mock, patch, MagicMock
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Add scripts to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
from earnings import (
|
||||
fetch_all_earnings_finnhub,
|
||||
get_briefing_section,
|
||||
load_earnings_cache,
|
||||
save_earnings_cache,
|
||||
refresh_earnings
|
||||
)
|
||||
|
||||
@pytest.fixture
|
||||
def mock_finnhub_response():
|
||||
return {
|
||||
"earningsCalendar": [
|
||||
{
|
||||
"symbol": "AAPL",
|
||||
"date": "2026-02-01",
|
||||
"hour": "amc",
|
||||
"epsEstimate": 1.5,
|
||||
"revenueEstimate": 100000000,
|
||||
"quarter": 1,
|
||||
"year": 2026
|
||||
},
|
||||
{
|
||||
"symbol": "TSLA",
|
||||
"date": "2026-01-27",
|
||||
"hour": "bmo",
|
||||
"epsEstimate": 0.8,
|
||||
"revenueEstimate": 25000000,
|
||||
"quarter": 4,
|
||||
"year": 2025
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
def test_fetch_earnings_finnhub_success(mock_finnhub_response):
|
||||
with patch("earnings.urlopen") as mock_urlopen:
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.read.return_value = json.dumps(mock_finnhub_response).encode("utf-8")
|
||||
mock_resp.__enter__.return_value = mock_resp
|
||||
mock_urlopen.return_value = mock_resp
|
||||
|
||||
with patch("earnings.get_finnhub_key", return_value="fake_key"):
|
||||
result = fetch_all_earnings_finnhub(days_ahead=30)
|
||||
|
||||
assert "AAPL" in result
|
||||
assert result["AAPL"]["date"] == "2026-02-01"
|
||||
assert result["AAPL"]["time"] == "amc"
|
||||
assert "TSLA" in result
|
||||
assert result["TSLA"]["date"] == "2026-01-27"
|
||||
|
||||
def test_cache_logic(tmp_path, monkeypatch):
|
||||
cache_file = tmp_path / "earnings_calendar.json"
|
||||
monkeypatch.setattr("earnings.EARNINGS_CACHE", cache_file)
|
||||
monkeypatch.setattr("earnings.CACHE_DIR", tmp_path)
|
||||
|
||||
test_data = {
|
||||
"last_updated": "2026-01-27T08:00:00",
|
||||
"earnings": {"AAPL": {"date": "2026-02-01"}}
|
||||
}
|
||||
|
||||
save_earnings_cache(test_data)
|
||||
assert cache_file.exists()
|
||||
|
||||
loaded_data = load_earnings_cache()
|
||||
assert loaded_data["earnings"]["AAPL"]["date"] == "2026-02-01"
|
||||
|
||||
def test_get_briefing_section_output():
|
||||
# Mock portfolio and cache to return specific earnings
|
||||
mock_portfolio = [{"symbol": "AAPL", "name": "Apple", "category": "Tech"}]
|
||||
mock_cache = {
|
||||
"last_updated": datetime.now().isoformat(),
|
||||
"earnings": {
|
||||
"AAPL": {
|
||||
"date": datetime.now().strftime("%Y-%m-%d"),
|
||||
"time": "amc",
|
||||
"eps_estimate": 1.5
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
with patch("earnings.load_portfolio", return_value=mock_portfolio), \
|
||||
patch("earnings.load_earnings_cache", return_value=mock_cache), \
|
||||
patch("earnings.refresh_earnings", return_value=mock_cache):
|
||||
|
||||
section = get_briefing_section()
|
||||
assert "EARNINGS TODAY" in section
|
||||
assert "AAPL" in section
|
||||
assert "Apple" in section
|
||||
assert "after-close" in section
|
||||
assert "Est: $1.50" in section
|
||||
|
||||
def test_refresh_earnings_force(mock_finnhub_response):
|
||||
mock_portfolio = [{"symbol": "AAPL", "name": "Apple"}]
|
||||
|
||||
with patch("earnings.get_finnhub_key", return_value="fake_key"), \
|
||||
patch("earnings.fetch_all_earnings_finnhub", return_value={"AAPL": mock_finnhub_response["earningsCalendar"][0]}), \
|
||||
patch("earnings.save_earnings_cache") as mock_save:
|
||||
|
||||
refresh_earnings(mock_portfolio, force=True)
|
||||
assert mock_save.called
|
||||
args, _ = mock_save.call_args
|
||||
assert "AAPL" in args[0]["earnings"]
|
||||
136
tests/test_fetch_news.py
Normal file
136
tests/test_fetch_news.py
Normal file
@@ -0,0 +1,136 @@
|
||||
"""Tests for RSS feed fetching and parsing."""
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
import json
|
||||
import pytest
|
||||
from unittest.mock import Mock, patch, MagicMock
|
||||
from fetch_news import fetch_market_data, fetch_rss, _get_best_feed_url
|
||||
from utils import clamp_timeout, compute_deadline
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_rss_content():
|
||||
"""Load sample RSS fixture."""
|
||||
fixture_path = Path(__file__).parent / "fixtures" / "sample_rss.xml"
|
||||
return fixture_path.read_bytes()
|
||||
|
||||
|
||||
def test_fetch_rss_success(sample_rss_content):
|
||||
"""Test successful RSS fetch and parse."""
|
||||
with patch("urllib.request.urlopen") as mock_urlopen:
|
||||
mock_response = MagicMock()
|
||||
mock_response.read.return_value = sample_rss_content
|
||||
mock_response.__enter__.return_value = mock_response
|
||||
mock_urlopen.return_value = mock_response
|
||||
|
||||
articles = fetch_rss("https://example.com/feed.xml", timeout=7)
|
||||
|
||||
assert len(articles) == 2
|
||||
assert articles[0]["title"] == "Apple Stock Rises 5%"
|
||||
assert articles[1]["title"] == "Tesla Announces New Model"
|
||||
assert "apple-rises" in articles[0]["link"]
|
||||
assert mock_urlopen.call_args.kwargs["timeout"] == 7
|
||||
|
||||
|
||||
def test_fetch_rss_network_error():
|
||||
"""Test RSS fetch handles network errors."""
|
||||
with patch("urllib.request.urlopen", side_effect=Exception("Network error")):
|
||||
articles = fetch_rss("https://example.com/feed.xml")
|
||||
assert articles == []
|
||||
|
||||
|
||||
def test_get_best_feed_url_priority():
|
||||
"""Test feed URL selection prioritizes 'top' key."""
|
||||
source = {
|
||||
"name": "Test Source",
|
||||
"homepage": "https://example.com",
|
||||
"top": "https://example.com/top.xml",
|
||||
"markets": "https://example.com/markets.xml"
|
||||
}
|
||||
|
||||
url = _get_best_feed_url(source)
|
||||
assert url == "https://example.com/top.xml"
|
||||
|
||||
|
||||
def test_get_best_feed_url_fallback():
|
||||
"""Test feed URL falls back to other http URLs when priority keys missing."""
|
||||
source = {
|
||||
"name": "Test Source",
|
||||
"feed": "https://example.com/feed.xml"
|
||||
}
|
||||
|
||||
url = _get_best_feed_url(source)
|
||||
assert url == "https://example.com/feed.xml"
|
||||
|
||||
|
||||
def test_get_best_feed_url_none_if_no_urls():
|
||||
"""Test returns None when no valid URLs found."""
|
||||
source = {
|
||||
"name": "Test Source",
|
||||
"enabled": True,
|
||||
"note": "No URLs here"
|
||||
}
|
||||
|
||||
url = _get_best_feed_url(source)
|
||||
assert url is None
|
||||
|
||||
|
||||
def test_get_best_feed_url_skips_non_urls():
|
||||
"""Test skips non-URL values."""
|
||||
source = {
|
||||
"name": "Test Source",
|
||||
"enabled": True,
|
||||
"count": 5,
|
||||
"rss": "https://example.com/rss.xml"
|
||||
}
|
||||
|
||||
url = _get_best_feed_url(source)
|
||||
assert url == "https://example.com/rss.xml"
|
||||
|
||||
|
||||
def test_clamp_timeout_respects_deadline(monkeypatch):
|
||||
start = 100.0
|
||||
monkeypatch.setattr("utils.time.monotonic", lambda: start)
|
||||
deadline = compute_deadline(5)
|
||||
monkeypatch.setattr("utils.time.monotonic", lambda: 103.0)
|
||||
|
||||
assert clamp_timeout(30, deadline) == 2
|
||||
|
||||
|
||||
def test_clamp_timeout_deadline_exceeded(monkeypatch):
|
||||
start = 200.0
|
||||
monkeypatch.setattr("utils.time.monotonic", lambda: start)
|
||||
deadline = compute_deadline(1)
|
||||
monkeypatch.setattr("utils.time.monotonic", lambda: 205.0)
|
||||
|
||||
with pytest.raises(TimeoutError):
|
||||
clamp_timeout(30, deadline)
|
||||
|
||||
|
||||
def test_fetch_market_data_price_fallback(monkeypatch):
|
||||
sample = {
|
||||
"price": None,
|
||||
"open": 100,
|
||||
"prev_close": 105,
|
||||
"change_percent": None,
|
||||
}
|
||||
|
||||
def fake_run(*_args, **_kwargs):
|
||||
class Result:
|
||||
returncode = 0
|
||||
stdout = json.dumps(sample)
|
||||
stderr = ""
|
||||
|
||||
return Result()
|
||||
|
||||
monkeypatch.setattr("fetch_news.OPENBB_BINARY", "/bin/openbb-quote")
|
||||
monkeypatch.setattr("fetch_news.subprocess.run", fake_run)
|
||||
|
||||
no_fallback = fetch_market_data(["^GSPC"], allow_price_fallback=False)
|
||||
assert no_fallback["^GSPC"]["price"] is None
|
||||
|
||||
with_fallback = fetch_market_data(["^GSPC"], allow_price_fallback=True)
|
||||
assert with_fallback["^GSPC"]["price"] == 100
|
||||
76
tests/test_portfolio.py
Normal file
76
tests/test_portfolio.py
Normal file
@@ -0,0 +1,76 @@
|
||||
"""Tests for portfolio operations."""
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Add scripts to path for imports
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
import pytest
|
||||
from portfolio import load_portfolio, save_portfolio
|
||||
|
||||
|
||||
def test_load_portfolio_success(tmp_path, monkeypatch):
|
||||
"""Test loading valid portfolio CSV."""
|
||||
portfolio_file = tmp_path / "portfolio.csv"
|
||||
portfolio_file.write_text("symbol,name,category,notes,type\nAAPL,Apple,Tech,,\nTSLA,Tesla,Auto,,\n")
|
||||
|
||||
monkeypatch.setattr("portfolio.PORTFOLIO_FILE", portfolio_file)
|
||||
positions = load_portfolio()
|
||||
|
||||
assert len(positions) == 2
|
||||
assert positions[0]["symbol"] == "AAPL"
|
||||
assert positions[0]["name"] == "Apple"
|
||||
assert positions[1]["symbol"] == "TSLA"
|
||||
|
||||
|
||||
def test_load_portfolio_missing_file(tmp_path, monkeypatch):
|
||||
"""Test loading non-existent portfolio returns empty list."""
|
||||
portfolio_file = tmp_path / "nonexistent.csv"
|
||||
monkeypatch.setattr("portfolio.PORTFOLIO_FILE", portfolio_file)
|
||||
|
||||
positions = load_portfolio()
|
||||
assert positions == []
|
||||
|
||||
|
||||
def test_save_portfolio(tmp_path, monkeypatch):
|
||||
"""Test saving portfolio to CSV."""
|
||||
portfolio_file = tmp_path / "portfolio.csv"
|
||||
monkeypatch.setattr("portfolio.PORTFOLIO_FILE", portfolio_file)
|
||||
|
||||
positions = [
|
||||
{"symbol": "AAPL", "name": "Apple", "category": "Tech", "notes": "", "type": "stock"},
|
||||
{"symbol": "MSFT", "name": "Microsoft", "category": "Tech", "notes": "", "type": "stock"}
|
||||
]
|
||||
save_portfolio(positions)
|
||||
|
||||
content = portfolio_file.read_text()
|
||||
assert "symbol,name,category,notes,type" in content
|
||||
assert "AAPL" in content
|
||||
assert "MSFT" in content
|
||||
|
||||
|
||||
def test_save_empty_portfolio(tmp_path, monkeypatch):
|
||||
"""Test saving empty portfolio creates header."""
|
||||
portfolio_file = tmp_path / "portfolio.csv"
|
||||
monkeypatch.setattr("portfolio.PORTFOLIO_FILE", portfolio_file)
|
||||
|
||||
save_portfolio([])
|
||||
|
||||
content = portfolio_file.read_text()
|
||||
assert content == "symbol,name,category,notes,type\n"
|
||||
|
||||
|
||||
def test_load_portfolio_preserves_fields(tmp_path, monkeypatch):
|
||||
"""Test loading portfolio preserves all fields."""
|
||||
portfolio_file = tmp_path / "portfolio.csv"
|
||||
portfolio_file.write_text("symbol,name,category,notes,type\nAAPL,Apple Inc,Tech,Core holding,stock\n")
|
||||
monkeypatch.setattr("portfolio.PORTFOLIO_FILE", portfolio_file)
|
||||
|
||||
positions = load_portfolio()
|
||||
|
||||
assert len(positions) == 1
|
||||
assert positions[0]["symbol"] == "AAPL"
|
||||
assert positions[0]["name"] == "Apple Inc"
|
||||
assert positions[0]["category"] == "Tech"
|
||||
assert positions[0]["notes"] == "Core holding"
|
||||
assert positions[0]["type"] == "stock"
|
||||
70
tests/test_ranking.py
Normal file
70
tests/test_ranking.py
Normal file
@@ -0,0 +1,70 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
import pytest
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Add scripts to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
from ranking import calculate_score, rank_headlines, classify_category
|
||||
|
||||
def test_classify_category():
|
||||
assert "macro" in classify_category("Fed signals rate cut")
|
||||
assert "equities" in classify_category("Apple earnings beat")
|
||||
assert "energy" in classify_category("Oil prices surge")
|
||||
assert "tech" in classify_category("AI chip demand remains high")
|
||||
assert "geopolitics" in classify_category("US imposes new sanctions on Russia")
|
||||
assert classify_category("Weather is nice") == ["general"]
|
||||
|
||||
def test_calculate_score_impact():
|
||||
weights = {"market_impact": 0.4, "novelty": 0.2, "breadth": 0.2, "credibility": 0.1, "diversity": 0.1}
|
||||
category_counts = {}
|
||||
|
||||
high_impact = {"title": "Fed announces emergency rate cut", "source": "Reuters", "published_at": datetime.now().isoformat()}
|
||||
low_impact = {"title": "Local coffee shop opens", "source": "Blog", "published_at": datetime.now().isoformat()}
|
||||
|
||||
score_high = calculate_score(high_impact, weights, category_counts)
|
||||
score_low = calculate_score(low_impact, weights, category_counts)
|
||||
|
||||
assert score_high > score_low
|
||||
|
||||
def test_rank_headlines_deduplication():
|
||||
headlines = [
|
||||
{"title": "Fed signals rate cut in March", "source": "WSJ"},
|
||||
{"title": "FED SIGNALS RATE CUT IN MARCH!!!", "source": "Reuters"}, # Dupe
|
||||
{"title": "Apple earnings are out", "source": "CNBC"}
|
||||
]
|
||||
|
||||
result = rank_headlines(headlines)
|
||||
|
||||
# After dedupe, we should have 2 unique headlines
|
||||
assert result["after_dedupe"] == 2
|
||||
# must_read should contain the best ones
|
||||
assert len(result["must_read"]) <= 2
|
||||
|
||||
def test_rank_headlines_sorting():
|
||||
headlines = [
|
||||
{"title": "Local news", "source": "SmallBlog", "description": "Nothing much"},
|
||||
{"title": "FED EMERGENCY RATE CUT", "source": "Bloomberg", "description": "Huge market impact"},
|
||||
{"title": "Nvidia Earnings Surprise", "source": "Reuters", "description": "AI demand surges"}
|
||||
]
|
||||
|
||||
result = rank_headlines(headlines)
|
||||
|
||||
# FED should be first due to macro impact + credibility
|
||||
assert "FED" in result["must_read"][0]["title"]
|
||||
assert "Nvidia" in result["must_read"][1]["title"]
|
||||
|
||||
def test_source_cap():
|
||||
# Test that we don't have too many items from the same source
|
||||
headlines = [
|
||||
{"title": f"Story {i}", "source": "Reuters"} for i in range(10)
|
||||
]
|
||||
|
||||
# Default source cap is 2
|
||||
result = rank_headlines(headlines)
|
||||
|
||||
reuters_in_must_read = [h for h in result["must_read"] if h["source"] == "Reuters"]
|
||||
reuters_in_scan = [h for h in result["scan"] if h["source"] == "Reuters"]
|
||||
|
||||
assert len(reuters_in_must_read) + len(reuters_in_scan) <= 2
|
||||
356
tests/test_research.py
Normal file
356
tests/test_research.py
Normal file
@@ -0,0 +1,356 @@
|
||||
"""Tests for research.py - deep research module."""
|
||||
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from unittest.mock import Mock, patch, MagicMock
|
||||
import subprocess
|
||||
|
||||
import pytest
|
||||
|
||||
# Add scripts to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
from research import (
|
||||
format_market_data,
|
||||
format_headlines,
|
||||
format_portfolio_news,
|
||||
gemini_available,
|
||||
research_with_gemini,
|
||||
format_raw_data_report,
|
||||
generate_research_content,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_market_data():
|
||||
"""Sample market data for testing."""
|
||||
return {
|
||||
"markets": {
|
||||
"us": {
|
||||
"name": "US Markets",
|
||||
"indices": {
|
||||
"SPY": {
|
||||
"name": "S&P 500",
|
||||
"data": {"price": 5200.50, "change_percent": 1.25}
|
||||
},
|
||||
"QQQ": {
|
||||
"name": "Nasdaq 100",
|
||||
"data": {"price": 18500.00, "change_percent": -0.50}
|
||||
}
|
||||
}
|
||||
},
|
||||
"europe": {
|
||||
"name": "European Markets",
|
||||
"indices": {
|
||||
"DAX": {
|
||||
"name": "DAX",
|
||||
"data": {"price": 18200.00, "change_percent": 0.75}
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"headlines": [
|
||||
{"source": "Reuters", "title": "Fed holds rates steady", "link": "https://example.com/1"},
|
||||
{"source": "Bloomberg", "title": "Tech stocks rally", "link": "https://example.com/2"},
|
||||
]
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_portfolio_data():
|
||||
"""Sample portfolio data for testing."""
|
||||
return {
|
||||
"stocks": {
|
||||
"AAPL": {
|
||||
"quote": {"price": 185.50, "change_percent": 2.3},
|
||||
"articles": [
|
||||
{"title": "Apple reports strong earnings", "link": "https://example.com/aapl1"},
|
||||
{"title": "iPhone sales beat expectations", "link": "https://example.com/aapl2"},
|
||||
]
|
||||
},
|
||||
"MSFT": {
|
||||
"quote": {"price": 420.00, "change_percent": -1.1},
|
||||
"articles": [
|
||||
{"title": "Microsoft cloud growth slows", "link": "https://example.com/msft1"},
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class TestFormatMarketData:
|
||||
"""Tests for format_market_data()."""
|
||||
|
||||
def test_formats_market_indices(self, sample_market_data):
|
||||
"""Format market indices with prices and changes."""
|
||||
result = format_market_data(sample_market_data)
|
||||
|
||||
assert "## Market Data" in result
|
||||
assert "### US Markets" in result
|
||||
assert "S&P 500" in result
|
||||
assert "5200.5" in result # Price (may not have trailing zero)
|
||||
assert "+1.25%" in result
|
||||
assert "📈" in result # Positive change
|
||||
|
||||
def test_shows_negative_change_emoji(self, sample_market_data):
|
||||
"""Negative changes show down emoji."""
|
||||
result = format_market_data(sample_market_data)
|
||||
|
||||
assert "Nasdaq 100" in result
|
||||
assert "-0.50%" in result
|
||||
assert "📉" in result # Negative change
|
||||
|
||||
def test_handles_empty_data(self):
|
||||
"""Handle empty market data."""
|
||||
result = format_market_data({})
|
||||
assert "## Market Data" in result
|
||||
assert "### " not in result # No region headers
|
||||
|
||||
def test_handles_missing_index_data(self):
|
||||
"""Handle indices without data."""
|
||||
data = {
|
||||
"markets": {
|
||||
"us": {
|
||||
"name": "US Markets",
|
||||
"indices": {
|
||||
"SPY": {"name": "S&P 500"} # No 'data' key
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
result = format_market_data(data)
|
||||
assert "## Market Data" in result
|
||||
# Should not crash, just skip the index
|
||||
|
||||
|
||||
class TestFormatHeadlines:
|
||||
"""Tests for format_headlines()."""
|
||||
|
||||
def test_formats_headlines_with_links(self):
|
||||
"""Format headlines with sources and links."""
|
||||
headlines = [
|
||||
{"source": "Reuters", "title": "Breaking news", "link": "https://example.com/1"},
|
||||
{"source": "Bloomberg", "title": "Market update", "link": "https://example.com/2"},
|
||||
]
|
||||
result = format_headlines(headlines)
|
||||
|
||||
assert "## Current Headlines" in result
|
||||
assert "[Reuters] Breaking news" in result
|
||||
assert "URL: https://example.com/1" in result
|
||||
assert "[Bloomberg] Market update" in result
|
||||
|
||||
def test_handles_missing_source(self):
|
||||
"""Handle headlines with missing source."""
|
||||
headlines = [{"title": "No source headline", "link": "https://example.com"}]
|
||||
result = format_headlines(headlines)
|
||||
|
||||
assert "[Unknown] No source headline" in result
|
||||
|
||||
def test_handles_missing_link(self):
|
||||
"""Handle headlines without links."""
|
||||
headlines = [{"source": "Reuters", "title": "No link"}]
|
||||
result = format_headlines(headlines)
|
||||
|
||||
assert "[Reuters] No link" in result
|
||||
assert "URL:" not in result
|
||||
|
||||
def test_limits_to_20_headlines(self):
|
||||
"""Limit output to 20 headlines max."""
|
||||
headlines = [{"source": f"Source{i}", "title": f"Title {i}"} for i in range(30)]
|
||||
result = format_headlines(headlines)
|
||||
|
||||
assert "[Source19]" in result
|
||||
assert "[Source20]" not in result
|
||||
|
||||
def test_handles_empty_list(self):
|
||||
"""Handle empty headlines list."""
|
||||
result = format_headlines([])
|
||||
assert "## Current Headlines" in result
|
||||
|
||||
|
||||
class TestFormatPortfolioNews:
|
||||
"""Tests for format_portfolio_news()."""
|
||||
|
||||
def test_formats_portfolio_stocks(self, sample_portfolio_data):
|
||||
"""Format portfolio stocks with quotes and news."""
|
||||
result = format_portfolio_news(sample_portfolio_data)
|
||||
|
||||
assert "## Portfolio Analysis" in result
|
||||
assert "### AAPL" in result
|
||||
assert "$185.5" in result # Price (may not have trailing zero)
|
||||
assert "+2.30%" in result
|
||||
assert "Apple reports strong earnings" in result
|
||||
|
||||
def test_shows_negative_changes(self, sample_portfolio_data):
|
||||
"""Show negative change percentages."""
|
||||
result = format_portfolio_news(sample_portfolio_data)
|
||||
|
||||
assert "### MSFT" in result
|
||||
assert "-1.10%" in result
|
||||
|
||||
def test_limits_articles_to_5(self):
|
||||
"""Limit articles per stock to 5."""
|
||||
data = {
|
||||
"stocks": {
|
||||
"AAPL": {
|
||||
"quote": {"price": 185.0, "change_percent": 1.0},
|
||||
"articles": [{"title": f"Article {i}"} for i in range(10)]
|
||||
}
|
||||
}
|
||||
}
|
||||
result = format_portfolio_news(data)
|
||||
|
||||
assert "Article 4" in result
|
||||
assert "Article 5" not in result
|
||||
|
||||
def test_handles_empty_stocks(self):
|
||||
"""Handle empty stocks dict."""
|
||||
result = format_portfolio_news({"stocks": {}})
|
||||
assert "## Portfolio Analysis" in result
|
||||
|
||||
|
||||
class TestGeminiAvailable:
|
||||
"""Tests for gemini_available()."""
|
||||
|
||||
def test_returns_true_when_gemini_found(self):
|
||||
"""Return True when gemini CLI is found."""
|
||||
with patch("shutil.which", return_value="/usr/local/bin/gemini"):
|
||||
assert gemini_available() is True
|
||||
|
||||
def test_returns_false_when_gemini_not_found(self):
|
||||
"""Return False when gemini CLI is not found."""
|
||||
with patch("shutil.which", return_value=None):
|
||||
assert gemini_available() is False
|
||||
|
||||
|
||||
class TestResearchWithGemini:
|
||||
"""Tests for research_with_gemini()."""
|
||||
|
||||
def test_successful_research(self):
|
||||
"""Execute gemini research successfully."""
|
||||
mock_result = Mock()
|
||||
mock_result.returncode = 0
|
||||
mock_result.stdout = "# Research Report\n\nMarket analysis..."
|
||||
|
||||
with patch("subprocess.run", return_value=mock_result) as mock_run:
|
||||
result = research_with_gemini("Market data content")
|
||||
|
||||
assert result == "# Research Report\n\nMarket analysis..."
|
||||
mock_run.assert_called_once()
|
||||
|
||||
def test_research_with_focus_areas(self):
|
||||
"""Include focus areas in prompt."""
|
||||
mock_result = Mock()
|
||||
mock_result.returncode = 0
|
||||
mock_result.stdout = "Focused analysis"
|
||||
|
||||
with patch("subprocess.run", return_value=mock_result) as mock_run:
|
||||
result = research_with_gemini("content", focus_areas=["earnings", "macro"])
|
||||
|
||||
assert result == "Focused analysis"
|
||||
# Verify focus areas were in the prompt
|
||||
call_args = mock_run.call_args[0][0]
|
||||
prompt = call_args[1]
|
||||
assert "earnings" in prompt
|
||||
assert "macro" in prompt
|
||||
|
||||
def test_handles_gemini_error(self):
|
||||
"""Handle gemini error gracefully."""
|
||||
mock_result = Mock()
|
||||
mock_result.returncode = 1
|
||||
mock_result.stderr = "API error"
|
||||
|
||||
with patch("subprocess.run", return_value=mock_result):
|
||||
result = research_with_gemini("content")
|
||||
|
||||
assert "⚠️ Gemini research error" in result
|
||||
assert "API error" in result
|
||||
|
||||
def test_handles_timeout(self):
|
||||
"""Handle subprocess timeout."""
|
||||
with patch("subprocess.run", side_effect=subprocess.TimeoutExpired(cmd="gemini", timeout=120)):
|
||||
result = research_with_gemini("content")
|
||||
|
||||
assert "⚠️ Gemini research timeout" in result
|
||||
|
||||
def test_handles_missing_gemini(self):
|
||||
"""Handle missing gemini CLI."""
|
||||
with patch("subprocess.run", side_effect=FileNotFoundError()):
|
||||
result = research_with_gemini("content")
|
||||
|
||||
assert "⚠️ Gemini CLI not found" in result
|
||||
|
||||
|
||||
class TestFormatRawDataReport:
|
||||
"""Tests for format_raw_data_report()."""
|
||||
|
||||
def test_combines_market_and_portfolio(self, sample_market_data, sample_portfolio_data):
|
||||
"""Combine market data, headlines, and portfolio."""
|
||||
result = format_raw_data_report(sample_market_data, sample_portfolio_data)
|
||||
|
||||
assert "## Market Data" in result
|
||||
assert "## Current Headlines" in result
|
||||
assert "## Portfolio Analysis" in result
|
||||
|
||||
def test_handles_no_headlines(self, sample_portfolio_data):
|
||||
"""Handle market data without headlines."""
|
||||
market_data = {"markets": {"us": {"name": "US", "indices": {}}}}
|
||||
result = format_raw_data_report(market_data, sample_portfolio_data)
|
||||
|
||||
assert "## Market Data" in result
|
||||
assert "## Current Headlines" not in result
|
||||
|
||||
def test_handles_portfolio_error(self, sample_market_data):
|
||||
"""Skip portfolio with error."""
|
||||
portfolio_data = {"error": "No portfolio configured"}
|
||||
result = format_raw_data_report(sample_market_data, portfolio_data)
|
||||
|
||||
assert "## Portfolio Analysis" not in result
|
||||
|
||||
def test_handles_empty_data(self):
|
||||
"""Handle empty market and portfolio data."""
|
||||
result = format_raw_data_report({}, {})
|
||||
assert result == ""
|
||||
|
||||
|
||||
class TestGenerateResearchContent:
|
||||
"""Tests for generate_research_content()."""
|
||||
|
||||
def test_uses_gemini_when_available(self, sample_market_data, sample_portfolio_data):
|
||||
"""Use Gemini research when available."""
|
||||
with patch("research.gemini_available", return_value=True):
|
||||
with patch("research.research_with_gemini", return_value="Gemini report") as mock_gemini:
|
||||
result = generate_research_content(sample_market_data, sample_portfolio_data)
|
||||
|
||||
assert result["report"] == "Gemini report"
|
||||
assert result["source"] == "gemini"
|
||||
mock_gemini.assert_called_once()
|
||||
|
||||
def test_falls_back_to_raw_report(self, sample_market_data, sample_portfolio_data):
|
||||
"""Fall back to raw report when Gemini unavailable."""
|
||||
with patch("research.gemini_available", return_value=False):
|
||||
result = generate_research_content(sample_market_data, sample_portfolio_data)
|
||||
|
||||
assert "## Market Data" in result["report"]
|
||||
assert result["source"] == "raw"
|
||||
|
||||
def test_handles_empty_report(self):
|
||||
"""Return empty when no data available."""
|
||||
result = generate_research_content({}, {})
|
||||
|
||||
assert result["report"] == ""
|
||||
assert result["source"] == "none"
|
||||
|
||||
def test_passes_focus_areas_to_gemini(self, sample_market_data, sample_portfolio_data):
|
||||
"""Pass focus areas to Gemini research."""
|
||||
focus = ["earnings", "tech"]
|
||||
with patch("research.gemini_available", return_value=True):
|
||||
with patch("research.research_with_gemini", return_value="Report") as mock_gemini:
|
||||
generate_research_content(sample_market_data, sample_portfolio_data, focus_areas=focus)
|
||||
|
||||
mock_gemini.assert_called_once()
|
||||
# Check that focus_areas was passed (positional or keyword)
|
||||
call_args = mock_gemini.call_args
|
||||
# Focus areas passed as second positional arg
|
||||
assert call_args[0][1] == focus or call_args.kwargs.get("focus_areas") == focus
|
||||
84
tests/test_setup.py
Normal file
84
tests/test_setup.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""Tests for setup wizard functionality."""
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
import pytest
|
||||
import json
|
||||
from unittest.mock import patch
|
||||
from setup import load_sources, save_sources, get_default_sources, setup_language, setup_markets
|
||||
|
||||
|
||||
def test_load_sources_missing_file(tmp_path, monkeypatch):
|
||||
"""Test loading non-existent sources returns defaults."""
|
||||
sources_file = tmp_path / "sources.json"
|
||||
|
||||
# Patch both path constants to use temp file
|
||||
monkeypatch.setattr("setup.SOURCES_FILE", sources_file)
|
||||
|
||||
# File doesn't exist, so load_sources should call get_default_sources
|
||||
sources = load_sources()
|
||||
|
||||
assert isinstance(sources, dict)
|
||||
assert "rss_feeds" in sources # Default structure has rss_feeds
|
||||
|
||||
|
||||
def test_save_sources(tmp_path, monkeypatch):
|
||||
"""Test saving sources to JSON."""
|
||||
sources_file = tmp_path / "sources.json"
|
||||
monkeypatch.setattr("setup.SOURCES_FILE", sources_file)
|
||||
|
||||
sources = {
|
||||
"rss_feeds": {
|
||||
"test_source": {
|
||||
"name": "Test",
|
||||
"enabled": True,
|
||||
"top": "https://example.com/rss"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
save_sources(sources)
|
||||
|
||||
assert sources_file.exists()
|
||||
with open(sources_file) as f:
|
||||
saved = json.load(f)
|
||||
|
||||
assert saved["rss_feeds"]["test_source"]["enabled"] is True
|
||||
|
||||
|
||||
def test_get_default_sources():
|
||||
"""Test default sources structure."""
|
||||
sources = get_default_sources()
|
||||
|
||||
assert isinstance(sources, dict)
|
||||
assert "rss_feeds" in sources
|
||||
# Should have common sources like wsj, barrons, cnbc
|
||||
feeds = sources["rss_feeds"]
|
||||
assert any("wsj" in k.lower() or "barrons" in k.lower() or "cnbc" in k.lower()
|
||||
for k in feeds.keys())
|
||||
|
||||
|
||||
@patch("setup.prompt", side_effect=["en"])
|
||||
@patch("setup.save_sources")
|
||||
def test_setup_language(mock_save, mock_prompt):
|
||||
"""Test language setup function."""
|
||||
sources = {"language": {"supported": ["en", "de"], "default": "de"}}
|
||||
setup_language(sources)
|
||||
|
||||
# Should have called prompt
|
||||
mock_prompt.assert_called()
|
||||
# Language should be updated
|
||||
assert sources["language"]["default"] == "en"
|
||||
|
||||
|
||||
@patch("setup.prompt_bool", side_effect=[True, False])
|
||||
@patch("setup.save_sources")
|
||||
def test_setup_markets(mock_save, mock_prompt):
|
||||
"""Test markets setup function."""
|
||||
sources = {"markets": {"us": {"enabled": False}, "eu": {"enabled": False}}}
|
||||
setup_markets(sources)
|
||||
|
||||
# Should have prompted (at least once for US)
|
||||
assert mock_prompt.called
|
||||
286
tests/test_stocks.py
Normal file
286
tests/test_stocks.py
Normal file
@@ -0,0 +1,286 @@
|
||||
"""Tests for stocks.py - unified stock management."""
|
||||
|
||||
import json
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
# Add scripts to path
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
from stocks import (
|
||||
load_stocks,
|
||||
save_stocks,
|
||||
get_holdings,
|
||||
get_watchlist,
|
||||
get_holding_tickers,
|
||||
get_watchlist_tickers,
|
||||
add_to_watchlist,
|
||||
add_to_holdings,
|
||||
move_to_holdings,
|
||||
remove_stock,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_stocks_data():
|
||||
"""Sample stocks data for testing."""
|
||||
return {
|
||||
"version": "1.0",
|
||||
"updated": "2026-01-30",
|
||||
"holdings": [
|
||||
{"ticker": "AAPL", "name": "Apple Inc.", "category": "Tech"},
|
||||
{"ticker": "MSFT", "name": "Microsoft", "category": "Tech"},
|
||||
],
|
||||
"watchlist": [
|
||||
{"ticker": "NVDA", "target": 800.0, "notes": "Buy on dip"},
|
||||
{"ticker": "TSLA", "target": 200.0, "notes": "Watch earnings"},
|
||||
],
|
||||
"alert_definitions": {},
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def stocks_file(tmp_path, sample_stocks_data):
|
||||
"""Create a temporary stocks file."""
|
||||
stocks_path = tmp_path / "stocks.json"
|
||||
stocks_path.write_text(json.dumps(sample_stocks_data))
|
||||
return stocks_path
|
||||
|
||||
|
||||
class TestLoadStocks:
|
||||
"""Tests for load_stocks()."""
|
||||
|
||||
def test_load_existing_file(self, stocks_file, sample_stocks_data):
|
||||
"""Load stocks from existing file."""
|
||||
data = load_stocks(stocks_file)
|
||||
assert data["version"] == "1.0"
|
||||
assert len(data["holdings"]) == 2
|
||||
assert len(data["watchlist"]) == 2
|
||||
|
||||
def test_load_missing_file(self, tmp_path):
|
||||
"""Return default structure when file doesn't exist."""
|
||||
missing_path = tmp_path / "missing.json"
|
||||
data = load_stocks(missing_path)
|
||||
assert data["version"] == "1.0"
|
||||
assert data["holdings"] == []
|
||||
assert data["watchlist"] == []
|
||||
assert "alert_definitions" in data
|
||||
|
||||
|
||||
class TestSaveStocks:
|
||||
"""Tests for save_stocks()."""
|
||||
|
||||
def test_save_updates_timestamp(self, tmp_path, sample_stocks_data):
|
||||
"""Save should update the 'updated' field."""
|
||||
stocks_path = tmp_path / "stocks.json"
|
||||
save_stocks(sample_stocks_data, stocks_path)
|
||||
|
||||
saved = json.loads(stocks_path.read_text())
|
||||
assert saved["updated"] == datetime.now().strftime("%Y-%m-%d")
|
||||
|
||||
def test_save_preserves_data(self, tmp_path, sample_stocks_data):
|
||||
"""Save should preserve all data."""
|
||||
stocks_path = tmp_path / "stocks.json"
|
||||
save_stocks(sample_stocks_data, stocks_path)
|
||||
|
||||
saved = json.loads(stocks_path.read_text())
|
||||
assert len(saved["holdings"]) == 2
|
||||
assert saved["holdings"][0]["ticker"] == "AAPL"
|
||||
|
||||
|
||||
class TestGetHoldings:
|
||||
"""Tests for get_holdings()."""
|
||||
|
||||
def test_get_holdings_from_data(self, sample_stocks_data):
|
||||
"""Get holdings from provided data."""
|
||||
holdings = get_holdings(sample_stocks_data)
|
||||
assert len(holdings) == 2
|
||||
assert holdings[0]["ticker"] == "AAPL"
|
||||
|
||||
def test_get_holdings_empty(self):
|
||||
"""Return empty list for empty data."""
|
||||
data = {"holdings": [], "watchlist": []}
|
||||
holdings = get_holdings(data)
|
||||
assert holdings == []
|
||||
|
||||
|
||||
class TestGetWatchlist:
|
||||
"""Tests for get_watchlist()."""
|
||||
|
||||
def test_get_watchlist_from_data(self, sample_stocks_data):
|
||||
"""Get watchlist from provided data."""
|
||||
watchlist = get_watchlist(sample_stocks_data)
|
||||
assert len(watchlist) == 2
|
||||
assert watchlist[0]["ticker"] == "NVDA"
|
||||
|
||||
def test_get_watchlist_empty(self):
|
||||
"""Return empty list for empty data."""
|
||||
data = {"holdings": [], "watchlist": []}
|
||||
watchlist = get_watchlist(data)
|
||||
assert watchlist == []
|
||||
|
||||
|
||||
class TestGetHoldingTickers:
|
||||
"""Tests for get_holding_tickers()."""
|
||||
|
||||
def test_get_holding_tickers(self, sample_stocks_data):
|
||||
"""Get set of holding tickers."""
|
||||
tickers = get_holding_tickers(sample_stocks_data)
|
||||
assert tickers == {"AAPL", "MSFT"}
|
||||
|
||||
def test_get_holding_tickers_empty(self):
|
||||
"""Return empty set for no holdings."""
|
||||
data = {"holdings": [], "watchlist": []}
|
||||
tickers = get_holding_tickers(data)
|
||||
assert tickers == set()
|
||||
|
||||
|
||||
class TestGetWatchlistTickers:
|
||||
"""Tests for get_watchlist_tickers()."""
|
||||
|
||||
def test_get_watchlist_tickers(self, sample_stocks_data):
|
||||
"""Get set of watchlist tickers."""
|
||||
tickers = get_watchlist_tickers(sample_stocks_data)
|
||||
assert tickers == {"NVDA", "TSLA"}
|
||||
|
||||
def test_get_watchlist_tickers_empty(self):
|
||||
"""Return empty set for empty watchlist."""
|
||||
data = {"holdings": [], "watchlist": []}
|
||||
tickers = get_watchlist_tickers(data)
|
||||
assert tickers == set()
|
||||
|
||||
|
||||
class TestAddToWatchlist:
|
||||
"""Tests for add_to_watchlist()."""
|
||||
|
||||
def test_add_new_to_watchlist(self, stocks_file, monkeypatch):
|
||||
"""Add new stock to watchlist."""
|
||||
monkeypatch.setattr("stocks.STOCKS_FILE", stocks_file)
|
||||
|
||||
result = add_to_watchlist("AMD", target=150.0, notes="Watch for dip")
|
||||
assert result is True
|
||||
|
||||
data = load_stocks(stocks_file)
|
||||
tickers = get_watchlist_tickers(data)
|
||||
assert "AMD" in tickers
|
||||
|
||||
def test_update_existing_watchlist(self, stocks_file, monkeypatch):
|
||||
"""Update existing watchlist entry."""
|
||||
monkeypatch.setattr("stocks.STOCKS_FILE", stocks_file)
|
||||
|
||||
# NVDA already in watchlist with target 800
|
||||
result = add_to_watchlist("NVDA", target=750.0, notes="Updated target")
|
||||
assert result is True
|
||||
|
||||
data = load_stocks(stocks_file)
|
||||
nvda = next(w for w in data["watchlist"] if w["ticker"] == "NVDA")
|
||||
assert nvda["target"] == 750.0
|
||||
assert nvda["notes"] == "Updated target"
|
||||
|
||||
def test_add_with_alerts(self, stocks_file, monkeypatch):
|
||||
"""Add stock with alert definitions."""
|
||||
monkeypatch.setattr("stocks.STOCKS_FILE", stocks_file)
|
||||
|
||||
alerts = ["below_target", "above_stop"]
|
||||
result = add_to_watchlist("GOOG", target=180.0, alerts=alerts)
|
||||
assert result is True
|
||||
|
||||
data = load_stocks(stocks_file)
|
||||
goog = next(w for w in data["watchlist"] if w["ticker"] == "GOOG")
|
||||
assert goog["alerts"] == alerts
|
||||
|
||||
|
||||
class TestAddToHoldings:
|
||||
"""Tests for add_to_holdings()."""
|
||||
|
||||
def test_add_new_holding(self, stocks_file, monkeypatch):
|
||||
"""Add new stock to holdings."""
|
||||
monkeypatch.setattr("stocks.STOCKS_FILE", stocks_file)
|
||||
|
||||
result = add_to_holdings("GOOG", name="Alphabet", category="Tech")
|
||||
assert result is True
|
||||
|
||||
data = load_stocks(stocks_file)
|
||||
tickers = get_holding_tickers(data)
|
||||
assert "GOOG" in tickers
|
||||
|
||||
def test_update_existing_holding(self, stocks_file, monkeypatch):
|
||||
"""Update existing holding."""
|
||||
monkeypatch.setattr("stocks.STOCKS_FILE", stocks_file)
|
||||
|
||||
result = add_to_holdings("AAPL", name="Apple Inc.", category="Consumer", notes="Core holding")
|
||||
assert result is True
|
||||
|
||||
data = load_stocks(stocks_file)
|
||||
aapl = next(h for h in data["holdings"] if h["ticker"] == "AAPL")
|
||||
assert aapl["category"] == "Consumer"
|
||||
assert aapl["notes"] == "Core holding"
|
||||
|
||||
|
||||
class TestMoveToHoldings:
|
||||
"""Tests for move_to_holdings()."""
|
||||
|
||||
def test_move_from_watchlist(self, stocks_file, monkeypatch):
|
||||
"""Move stock from watchlist to holdings."""
|
||||
monkeypatch.setattr("stocks.STOCKS_FILE", stocks_file)
|
||||
|
||||
# NVDA is in watchlist, not holdings
|
||||
result = move_to_holdings("NVDA", name="NVIDIA Corp", category="Semis")
|
||||
assert result is True
|
||||
|
||||
data = load_stocks(stocks_file)
|
||||
assert "NVDA" in get_holding_tickers(data)
|
||||
assert "NVDA" not in get_watchlist_tickers(data)
|
||||
|
||||
def test_move_nonexistent_returns_false(self, stocks_file, monkeypatch):
|
||||
"""Moving non-existent ticker returns False."""
|
||||
monkeypatch.setattr("stocks.STOCKS_FILE", stocks_file)
|
||||
|
||||
result = move_to_holdings("NONEXISTENT")
|
||||
assert result is False
|
||||
|
||||
|
||||
class TestRemoveStock:
|
||||
"""Tests for remove_stock()."""
|
||||
|
||||
def test_remove_from_holdings(self, stocks_file, monkeypatch):
|
||||
"""Remove stock from holdings."""
|
||||
monkeypatch.setattr("stocks.STOCKS_FILE", stocks_file)
|
||||
|
||||
result = remove_stock("AAPL", from_list="holdings")
|
||||
assert result is True
|
||||
|
||||
data = load_stocks(stocks_file)
|
||||
assert "AAPL" not in get_holding_tickers(data)
|
||||
|
||||
def test_remove_from_watchlist(self, stocks_file, monkeypatch):
|
||||
"""Remove stock from watchlist."""
|
||||
monkeypatch.setattr("stocks.STOCKS_FILE", stocks_file)
|
||||
|
||||
result = remove_stock("NVDA", from_list="watchlist")
|
||||
assert result is True
|
||||
|
||||
data = load_stocks(stocks_file)
|
||||
assert "NVDA" not in get_watchlist_tickers(data)
|
||||
|
||||
def test_remove_nonexistent_returns_false(self, stocks_file, monkeypatch):
|
||||
"""Removing non-existent ticker returns False."""
|
||||
monkeypatch.setattr("stocks.STOCKS_FILE", stocks_file)
|
||||
|
||||
result = remove_stock("NONEXISTENT", from_list="holdings")
|
||||
assert result is False
|
||||
|
||||
def test_remove_auto_detects_list(self, stocks_file, monkeypatch):
|
||||
"""Remove without specifying list auto-detects."""
|
||||
monkeypatch.setattr("stocks.STOCKS_FILE", stocks_file)
|
||||
|
||||
# AAPL is in holdings
|
||||
result = remove_stock("AAPL")
|
||||
assert result is True
|
||||
|
||||
data = load_stocks(stocks_file)
|
||||
assert "AAPL" not in get_holding_tickers(data)
|
||||
345
tests/test_summarize.py
Normal file
345
tests/test_summarize.py
Normal file
@@ -0,0 +1,345 @@
|
||||
"""Tests for summarize helpers."""
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
import summarize
|
||||
from summarize import (
|
||||
MoverContext,
|
||||
SectorCluster,
|
||||
WatchpointsData,
|
||||
build_watchpoints_data,
|
||||
classify_move_type,
|
||||
detect_sector_clusters,
|
||||
format_watchpoints,
|
||||
get_index_change,
|
||||
match_headline_to_symbol,
|
||||
)
|
||||
|
||||
|
||||
class FixedDateTime(datetime):
|
||||
@classmethod
|
||||
def now(cls, tz=None):
|
||||
return cls(2026, 1, 1, 15, 0)
|
||||
|
||||
|
||||
def test_generate_briefing_auto_time_evening(capsys, monkeypatch):
|
||||
def fake_market_news(*_args, **_kwargs):
|
||||
return {
|
||||
"headlines": [
|
||||
{"source": "CNBC", "title": "Headline one", "link": "https://example.com/1"},
|
||||
{"source": "Yahoo", "title": "Headline two", "link": "https://example.com/2"},
|
||||
{"source": "CNBC", "title": "Headline three", "link": "https://example.com/3"},
|
||||
],
|
||||
"markets": {
|
||||
"us": {
|
||||
"name": "US Markets",
|
||||
"indices": {
|
||||
"^GSPC": {"name": "S&P 500", "data": {"price": 100, "change_percent": 1.0}},
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
def fake_summary(*_args, **_kwargs):
|
||||
return "OK"
|
||||
|
||||
monkeypatch.setattr(summarize, "get_market_news", fake_market_news)
|
||||
monkeypatch.setattr(summarize, "get_portfolio_news", lambda *_a, **_k: None)
|
||||
monkeypatch.setattr(summarize, "summarize_with_claude", fake_summary)
|
||||
monkeypatch.setattr(summarize, "datetime", FixedDateTime)
|
||||
|
||||
args = type(
|
||||
"Args",
|
||||
(),
|
||||
{
|
||||
"lang": "de",
|
||||
"style": "briefing",
|
||||
"time": None,
|
||||
"model": "claude",
|
||||
"json": False,
|
||||
"research": False,
|
||||
"deadline": None,
|
||||
"fast": False,
|
||||
"llm": False,
|
||||
"debug": False,
|
||||
},
|
||||
)()
|
||||
|
||||
summarize.generate_briefing(args)
|
||||
stdout = capsys.readouterr().out
|
||||
assert "Börsen Abend-Briefing" in stdout
|
||||
|
||||
|
||||
# --- Tests for watchpoints feature (Issue #92) ---
|
||||
|
||||
|
||||
class TestGetIndexChange:
|
||||
def test_extracts_sp500_change(self):
|
||||
market_data = {
|
||||
"markets": {
|
||||
"us": {
|
||||
"indices": {
|
||||
"^GSPC": {"data": {"change_percent": -1.5}}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
assert get_index_change(market_data) == -1.5
|
||||
|
||||
def test_returns_zero_on_missing_data(self):
|
||||
assert get_index_change({}) == 0.0
|
||||
assert get_index_change({"markets": {}}) == 0.0
|
||||
assert get_index_change({"markets": {"us": {}}}) == 0.0
|
||||
|
||||
|
||||
class TestMatchHeadlineToSymbol:
|
||||
def test_exact_symbol_match_dollar(self):
|
||||
headlines = [{"title": "Breaking: $NVDA surges on AI demand"}]
|
||||
result = match_headline_to_symbol("NVDA", "NVIDIA Corporation", headlines)
|
||||
assert result is not None
|
||||
assert "NVDA" in result["title"]
|
||||
|
||||
def test_exact_symbol_match_parens(self):
|
||||
headlines = [{"title": "Tesla (TSLA) reports record deliveries"}]
|
||||
result = match_headline_to_symbol("TSLA", "Tesla Inc", headlines)
|
||||
assert result is not None
|
||||
|
||||
def test_exact_symbol_match_word_boundary(self):
|
||||
headlines = [{"title": "AAPL announces new product line"}]
|
||||
result = match_headline_to_symbol("AAPL", "Apple Inc", headlines)
|
||||
assert result is not None
|
||||
|
||||
def test_company_name_match(self):
|
||||
headlines = [{"title": "Apple announces record iPhone sales"}]
|
||||
result = match_headline_to_symbol("AAPL", "Apple Inc", headlines)
|
||||
assert result is not None
|
||||
|
||||
def test_no_match_returns_none(self):
|
||||
headlines = [{"title": "Fed raises interest rates"}]
|
||||
result = match_headline_to_symbol("NVDA", "NVIDIA Corporation", headlines)
|
||||
assert result is None
|
||||
|
||||
def test_avoids_partial_symbol_match(self):
|
||||
# "APP" should not match "application"
|
||||
headlines = [{"title": "New application launches today"}]
|
||||
result = match_headline_to_symbol("APP", "AppLovin Corp", headlines)
|
||||
assert result is None
|
||||
|
||||
def test_empty_headlines(self):
|
||||
result = match_headline_to_symbol("NVDA", "NVIDIA", [])
|
||||
assert result is None
|
||||
|
||||
|
||||
class TestDetectSectorClusters:
|
||||
def test_detects_cluster_three_stocks_same_direction(self):
|
||||
movers = [
|
||||
{"symbol": "NVDA", "change_pct": -5.0},
|
||||
{"symbol": "AMD", "change_pct": -4.0},
|
||||
{"symbol": "INTC", "change_pct": -3.0},
|
||||
]
|
||||
portfolio_meta = {
|
||||
"NVDA": {"category": "Tech"},
|
||||
"AMD": {"category": "Tech"},
|
||||
"INTC": {"category": "Tech"},
|
||||
}
|
||||
clusters = detect_sector_clusters(movers, portfolio_meta)
|
||||
assert len(clusters) == 1
|
||||
assert clusters[0].category == "Tech"
|
||||
assert clusters[0].direction == "down"
|
||||
assert len(clusters[0].stocks) == 3
|
||||
|
||||
def test_no_cluster_if_less_than_three(self):
|
||||
movers = [
|
||||
{"symbol": "NVDA", "change_pct": -5.0},
|
||||
{"symbol": "AMD", "change_pct": -4.0},
|
||||
]
|
||||
portfolio_meta = {
|
||||
"NVDA": {"category": "Tech"},
|
||||
"AMD": {"category": "Tech"},
|
||||
}
|
||||
clusters = detect_sector_clusters(movers, portfolio_meta)
|
||||
assert len(clusters) == 0
|
||||
|
||||
def test_no_cluster_if_mixed_direction(self):
|
||||
movers = [
|
||||
{"symbol": "NVDA", "change_pct": 5.0},
|
||||
{"symbol": "AMD", "change_pct": -4.0},
|
||||
{"symbol": "INTC", "change_pct": 3.0},
|
||||
]
|
||||
portfolio_meta = {
|
||||
"NVDA": {"category": "Tech"},
|
||||
"AMD": {"category": "Tech"},
|
||||
"INTC": {"category": "Tech"},
|
||||
}
|
||||
clusters = detect_sector_clusters(movers, portfolio_meta)
|
||||
assert len(clusters) == 0
|
||||
|
||||
|
||||
class TestClassifyMoveType:
|
||||
def test_earnings_with_keyword(self):
|
||||
headline = {"title": "Company beats Q3 earnings expectations"}
|
||||
result = classify_move_type(headline, False, 5.0, 0.1)
|
||||
assert result == "earnings"
|
||||
|
||||
def test_sector_cluster(self):
|
||||
result = classify_move_type(None, True, -3.0, -0.5)
|
||||
assert result == "sector"
|
||||
|
||||
def test_market_wide(self):
|
||||
result = classify_move_type(None, False, -2.0, -2.0)
|
||||
assert result == "market_wide"
|
||||
|
||||
def test_company_specific_with_headline(self):
|
||||
headline = {"title": "Company announces acquisition"}
|
||||
result = classify_move_type(headline, False, 3.0, 0.1)
|
||||
assert result == "company_specific"
|
||||
|
||||
def test_company_specific_large_move_no_headline(self):
|
||||
result = classify_move_type(None, False, 8.0, 0.1)
|
||||
assert result == "company_specific"
|
||||
|
||||
def test_unknown_small_move_no_context(self):
|
||||
result = classify_move_type(None, False, 1.5, 0.2)
|
||||
assert result == "unknown"
|
||||
|
||||
|
||||
class TestFormatWatchpoints:
|
||||
def test_formats_sector_cluster(self):
|
||||
cluster = SectorCluster(
|
||||
category="Tech",
|
||||
stocks=[
|
||||
MoverContext("NVDA", -5.0, 100.0, "Tech", None, "sector", None),
|
||||
MoverContext("AMD", -4.0, 80.0, "Tech", None, "sector", None),
|
||||
MoverContext("INTC", -3.0, 30.0, "Tech", None, "sector", None),
|
||||
],
|
||||
avg_change=-4.0,
|
||||
direction="down",
|
||||
vs_index=-3.5,
|
||||
)
|
||||
data = WatchpointsData(
|
||||
movers=[],
|
||||
sector_clusters=[cluster],
|
||||
index_change=-0.5,
|
||||
market_wide=False,
|
||||
)
|
||||
result = format_watchpoints(data, "en", {})
|
||||
assert "Tech" in result
|
||||
assert "-4.0%" in result
|
||||
assert "vs Index" in result
|
||||
|
||||
def test_formats_individual_mover_with_headline(self):
|
||||
mover = MoverContext(
|
||||
symbol="NVDA",
|
||||
change_pct=5.0,
|
||||
price=100.0,
|
||||
category="Tech",
|
||||
matched_headline={"title": "NVIDIA reports record revenue"},
|
||||
move_type="company_specific",
|
||||
vs_index=4.5,
|
||||
)
|
||||
data = WatchpointsData(
|
||||
movers=[mover],
|
||||
sector_clusters=[],
|
||||
index_change=0.5,
|
||||
market_wide=False,
|
||||
)
|
||||
result = format_watchpoints(data, "en", {})
|
||||
assert "NVDA" in result
|
||||
assert "+5.0%" in result
|
||||
assert "record revenue" in result
|
||||
|
||||
def test_formats_market_wide_move_english(self):
|
||||
data = WatchpointsData(
|
||||
movers=[],
|
||||
sector_clusters=[],
|
||||
index_change=-2.0,
|
||||
market_wide=True,
|
||||
)
|
||||
result = format_watchpoints(data, "en", {})
|
||||
assert "Market-wide move" in result
|
||||
assert "S&P 500 fell 2.0%" in result
|
||||
|
||||
def test_formats_market_wide_move_german(self):
|
||||
data = WatchpointsData(
|
||||
movers=[],
|
||||
sector_clusters=[],
|
||||
index_change=2.5,
|
||||
market_wide=True,
|
||||
)
|
||||
result = format_watchpoints(data, "de", {})
|
||||
assert "Breite Marktbewegung" in result
|
||||
assert "stieg 2.5%" in result
|
||||
|
||||
def test_uses_label_fallbacks(self):
|
||||
mover = MoverContext(
|
||||
symbol="XYZ",
|
||||
change_pct=1.5,
|
||||
price=50.0,
|
||||
category="Other",
|
||||
matched_headline=None,
|
||||
move_type="unknown",
|
||||
vs_index=1.0,
|
||||
)
|
||||
data = WatchpointsData(
|
||||
movers=[mover],
|
||||
sector_clusters=[],
|
||||
index_change=0.5,
|
||||
market_wide=False,
|
||||
)
|
||||
labels = {"no_catalyst": " -- no news"}
|
||||
result = format_watchpoints(data, "en", labels)
|
||||
assert "XYZ" in result
|
||||
assert "no news" in result
|
||||
|
||||
|
||||
class TestBuildWatchpointsData:
|
||||
def test_builds_complete_data_structure(self):
|
||||
movers = [
|
||||
{"symbol": "NVDA", "change_pct": -5.0, "price": 100.0},
|
||||
{"symbol": "AMD", "change_pct": -4.0, "price": 80.0},
|
||||
{"symbol": "INTC", "change_pct": -3.0, "price": 30.0},
|
||||
{"symbol": "AAPL", "change_pct": 2.0, "price": 150.0},
|
||||
]
|
||||
headlines = [
|
||||
{"title": "NVIDIA reports weak guidance"},
|
||||
{"title": "Apple announces new product"},
|
||||
]
|
||||
portfolio_meta = {
|
||||
"NVDA": {"category": "Tech", "name": "NVIDIA Corporation"},
|
||||
"AMD": {"category": "Tech", "name": "Advanced Micro Devices"},
|
||||
"INTC": {"category": "Tech", "name": "Intel Corporation"},
|
||||
"AAPL": {"category": "Tech", "name": "Apple Inc"},
|
||||
}
|
||||
index_change = -0.5
|
||||
|
||||
result = build_watchpoints_data(movers, headlines, portfolio_meta, index_change)
|
||||
|
||||
# Should detect Tech sector cluster (3 losers)
|
||||
assert len(result.sector_clusters) == 1
|
||||
assert result.sector_clusters[0].category == "Tech"
|
||||
assert result.sector_clusters[0].direction == "down"
|
||||
|
||||
# All movers should be present
|
||||
assert len(result.movers) == 4
|
||||
|
||||
# NVDA should have matched headline
|
||||
nvda_mover = next(m for m in result.movers if m.symbol == "NVDA")
|
||||
assert nvda_mover.matched_headline is not None
|
||||
assert "guidance" in nvda_mover.matched_headline["title"]
|
||||
|
||||
# vs_index should be calculated
|
||||
assert nvda_mover.vs_index == -5.0 - (-0.5) # -4.5
|
||||
|
||||
def test_handles_empty_movers(self):
|
||||
result = build_watchpoints_data([], [], {}, 0.0)
|
||||
assert result.movers == []
|
||||
assert result.sector_clusters == []
|
||||
assert result.market_wide is False
|
||||
|
||||
def test_detects_market_wide_move(self):
|
||||
result = build_watchpoints_data([], [], {}, -2.0)
|
||||
assert result.market_wide is True
|
||||
Reference in New Issue
Block a user