first commit
This commit is contained in:
34
.env.example
Normal file
34
.env.example
Normal file
@@ -0,0 +1,34 @@
|
||||
# Database
|
||||
DATABASE_URL=postgresql+asyncpg://stock_backend:changeme@localhost:5432/stock_data_backend
|
||||
|
||||
# Auth
|
||||
JWT_SECRET=change-this-to-a-random-secret
|
||||
JWT_EXPIRY_MINUTES=60
|
||||
|
||||
# OHLCV Provider — Alpaca Markets
|
||||
ALPACA_API_KEY=
|
||||
ALPACA_API_SECRET=
|
||||
|
||||
# Sentiment Provider — Gemini with Search Grounding
|
||||
GEMINI_API_KEY=
|
||||
GEMINI_MODEL=gemini-2.0-flash
|
||||
|
||||
# Fundamentals Provider — Financial Modeling Prep
|
||||
FMP_API_KEY=
|
||||
|
||||
# Scheduled Jobs
|
||||
DATA_COLLECTOR_FREQUENCY=daily
|
||||
SENTIMENT_POLL_INTERVAL_MINUTES=30
|
||||
FUNDAMENTAL_FETCH_FREQUENCY=daily
|
||||
RR_SCAN_FREQUENCY=daily
|
||||
|
||||
# Scoring Defaults
|
||||
DEFAULT_WATCHLIST_AUTO_SIZE=10
|
||||
DEFAULT_RR_THRESHOLD=3.0
|
||||
|
||||
# Database Pool
|
||||
DB_POOL_SIZE=5
|
||||
DB_POOL_TIMEOUT=30
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL=INFO
|
||||
68
.gitea/workflows/deploy.yml
Normal file
68
.gitea/workflows/deploy.yml
Normal file
@@ -0,0 +1,68 @@
|
||||
# Gitea Actions CI/CD pipeline: lint → test → deploy
|
||||
# Triggers on push to main branch.
|
||||
#
|
||||
# Required secrets (set in Gitea repo settings):
|
||||
# DEPLOY_HOST — server IP or hostname
|
||||
# DEPLOY_USER — SSH username on the server
|
||||
# DEPLOY_KEY — SSH private key for deployment
|
||||
|
||||
name: Deploy
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
|
||||
jobs:
|
||||
lint:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- run: pip install ruff
|
||||
- run: ruff check app/
|
||||
|
||||
test:
|
||||
needs: lint
|
||||
runs-on: ubuntu-latest
|
||||
services:
|
||||
postgres:
|
||||
image: postgres:16
|
||||
env:
|
||||
POSTGRES_DB: test_db
|
||||
POSTGRES_USER: test_user
|
||||
POSTGRES_PASSWORD: test_pass
|
||||
ports:
|
||||
- 5432:5432
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.12"
|
||||
- run: pip install -e ".[dev]"
|
||||
- run: alembic upgrade head
|
||||
env:
|
||||
DATABASE_URL: postgresql+asyncpg://test_user:test_pass@localhost:5432/test_db
|
||||
- run: pytest --tb=short
|
||||
env:
|
||||
DATABASE_URL: postgresql+asyncpg://test_user:test_pass@localhost:5432/test_db
|
||||
|
||||
deploy:
|
||||
needs: test
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- name: Deploy via SSH
|
||||
uses: appleboy/ssh-action@v1
|
||||
with:
|
||||
host: ${{ secrets.DEPLOY_HOST }}
|
||||
username: ${{ secrets.DEPLOY_USER }}
|
||||
key: ${{ secrets.DEPLOY_KEY }}
|
||||
script: |
|
||||
cd /opt/stock-data-backend
|
||||
git pull origin main
|
||||
source .venv/bin/activate
|
||||
pip install -e .
|
||||
alembic upgrade head
|
||||
sudo systemctl restart stock-data-backend
|
||||
32
.gitignore
vendored
Normal file
32
.gitignore
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
# Python
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*.egg-info/
|
||||
*.egg
|
||||
dist/
|
||||
build/
|
||||
.venv/
|
||||
.env
|
||||
|
||||
# Hypothesis (PBT)
|
||||
.hypothesis/
|
||||
|
||||
# Pytest
|
||||
.pytest_cache/
|
||||
|
||||
# IDE
|
||||
.vscode/
|
||||
.idea/
|
||||
*.swp
|
||||
*.swo
|
||||
|
||||
# OS
|
||||
.DS_Store
|
||||
Thumbs.db
|
||||
|
||||
# Frontend
|
||||
frontend/node_modules/
|
||||
frontend/dist/
|
||||
|
||||
# Alembic
|
||||
alembic/versions/__pycache__/
|
||||
1
.kiro/specs/signal-dashboard/.config.kiro
Normal file
1
.kiro/specs/signal-dashboard/.config.kiro
Normal file
@@ -0,0 +1 @@
|
||||
{"specId": "fa730cf4-a14d-4f62-8993-fd7db6fe25cc", "workflowType": "requirements-first", "specType": "feature"}
|
||||
645
.kiro/specs/signal-dashboard/design.md
Normal file
645
.kiro/specs/signal-dashboard/design.md
Normal file
@@ -0,0 +1,645 @@
|
||||
# Design Document: Signal Dashboard
|
||||
|
||||
## Overview
|
||||
|
||||
Signal Dashboard is a React 18 + TypeScript SPA that consumes the existing Stock Data Backend REST API (`/api/v1/`). It provides authenticated users with views for watchlist monitoring, per-ticker analysis, trade setup scanning, composite-score rankings, and admin management.
|
||||
|
||||
The frontend lives in `frontend/` within the existing project root. Vite builds static assets to `frontend/dist/`, which Nginx serves on `signal.thiessen.io`. API requests to `/api/v1/` are proxied to the FastAPI backend — no CORS needed.
|
||||
|
||||
### Key Technical Decisions
|
||||
|
||||
| Decision | Choice | Rationale |
|
||||
|---|---|---|
|
||||
| Build tool | Vite 5 | Fast HMR, native TS/React support, small output |
|
||||
| Routing | React Router v6 | Standard, supports layout routes and guards |
|
||||
| Server state | TanStack Query v5 | Caching, deduplication, background refetch |
|
||||
| Client state | Zustand | Minimal auth store, no boilerplate |
|
||||
| Styling | Tailwind CSS v3 | Utility-first, dark mode built-in, small bundle |
|
||||
| Charts | Recharts | React-native charting, composable, lightweight |
|
||||
| HTTP | Axios | Interceptors for auth/envelope unwrapping |
|
||||
| Testing | Vitest + React Testing Library + fast-check | Vite-native test runner, property-based testing |
|
||||
|
||||
## Architecture
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
subgraph Browser
|
||||
Router[React Router]
|
||||
Pages[Page Components]
|
||||
Hooks[TanStack Query Hooks]
|
||||
Store[Zustand Auth Store]
|
||||
API[API Client - Axios]
|
||||
end
|
||||
|
||||
Router --> Pages
|
||||
Pages --> Hooks
|
||||
Hooks --> API
|
||||
API --> Store
|
||||
Store --> API
|
||||
|
||||
subgraph Server
|
||||
Nginx[Nginx - static files + proxy]
|
||||
Backend[FastAPI Backend]
|
||||
end
|
||||
|
||||
API -->|/api/v1/*| Nginx
|
||||
Nginx -->|proxy_pass| Backend
|
||||
Nginx -->|static| Browser
|
||||
```
|
||||
|
||||
### Request Flow
|
||||
|
||||
1. Component mounts → calls a TanStack Query hook (e.g., `useWatchlist()`)
|
||||
2. Hook calls an API client function (e.g., `api.watchlist.list()`)
|
||||
3. Axios sends request with JWT Bearer header from Zustand store
|
||||
4. Axios response interceptor unwraps `{ status, data, error }` envelope
|
||||
5. On 401 → Zustand clears token, React Router redirects to `/login`
|
||||
6. TanStack Query caches the result, component renders data
|
||||
|
||||
### Directory Structure
|
||||
|
||||
```
|
||||
frontend/
|
||||
├── index.html
|
||||
├── package.json
|
||||
├── tsconfig.json
|
||||
├── vite.config.ts
|
||||
├── tailwind.config.ts
|
||||
├── postcss.config.js
|
||||
├── src/
|
||||
│ ├── main.tsx # App entry, providers
|
||||
│ ├── App.tsx # Router + layout
|
||||
│ ├── api/
|
||||
│ │ ├── client.ts # Axios instance, interceptors
|
||||
│ │ ├── auth.ts # login, register
|
||||
│ │ ├── watchlist.ts # watchlist CRUD
|
||||
│ │ ├── tickers.ts # ticker CRUD
|
||||
│ │ ├── scores.ts # scores, rankings, weights
|
||||
│ │ ├── trades.ts # trade setups
|
||||
│ │ ├── ohlcv.ts # OHLCV data
|
||||
│ │ ├── indicators.ts # technical indicators
|
||||
│ │ ├── sr-levels.ts # support/resistance
|
||||
│ │ ├── sentiment.ts # sentiment data
|
||||
│ │ ├── fundamentals.ts # fundamental data
|
||||
│ │ ├── ingestion.ts # manual data fetch
|
||||
│ │ ├── admin.ts # admin endpoints
|
||||
│ │ └── health.ts # health check
|
||||
│ ├── hooks/
|
||||
│ │ ├── useAuth.ts # login/register/logout mutations
|
||||
│ │ ├── useWatchlist.ts # watchlist queries + mutations
|
||||
│ │ ├── useTickers.ts # ticker queries + mutations
|
||||
│ │ ├── useScores.ts # scores, rankings queries
|
||||
│ │ ├── useTrades.ts # trade setup queries
|
||||
│ │ ├── useTickerDetail.ts # parallel queries for detail view
|
||||
│ │ └── useAdmin.ts # admin queries + mutations
|
||||
│ ├── stores/
|
||||
│ │ └── authStore.ts # Zustand: token, user, role
|
||||
│ ├── pages/
|
||||
│ │ ├── LoginPage.tsx
|
||||
│ │ ├── RegisterPage.tsx
|
||||
│ │ ├── WatchlistPage.tsx
|
||||
│ │ ├── TickerDetailPage.tsx
|
||||
│ │ ├── ScannerPage.tsx
|
||||
│ │ ├── RankingsPage.tsx
|
||||
│ │ └── AdminPage.tsx
|
||||
│ ├── components/
|
||||
│ │ ├── layout/
|
||||
│ │ │ ├── AppShell.tsx # Sidebar + main content
|
||||
│ │ │ ├── Sidebar.tsx
|
||||
│ │ │ └── MobileNav.tsx
|
||||
│ │ ├── auth/
|
||||
│ │ │ └── ProtectedRoute.tsx
|
||||
│ │ ├── charts/
|
||||
│ │ │ └── CandlestickChart.tsx
|
||||
│ │ ├── ui/
|
||||
│ │ │ ├── ScoreCard.tsx
|
||||
│ │ │ ├── Toast.tsx
|
||||
│ │ │ ├── Skeleton.tsx
|
||||
│ │ │ ├── Badge.tsx
|
||||
│ │ │ └── ConfirmDialog.tsx
|
||||
│ │ ├── watchlist/
|
||||
│ │ │ ├── WatchlistTable.tsx
|
||||
│ │ │ └── AddTickerForm.tsx
|
||||
│ │ ├── scanner/
|
||||
│ │ │ └── TradeTable.tsx
|
||||
│ │ ├── rankings/
|
||||
│ │ │ ├── RankingsTable.tsx
|
||||
│ │ │ └── WeightsForm.tsx
|
||||
│ │ ├── ticker/
|
||||
│ │ │ ├── SentimentPanel.tsx
|
||||
│ │ │ ├── FundamentalsPanel.tsx
|
||||
│ │ │ ├── IndicatorSelector.tsx
|
||||
│ │ │ └── SROverlay.tsx
|
||||
│ │ └── admin/
|
||||
│ │ ├── UserTable.tsx
|
||||
│ │ ├── SettingsForm.tsx
|
||||
│ │ ├── JobControls.tsx
|
||||
│ │ └── DataCleanup.tsx
|
||||
│ ├── lib/
|
||||
│ │ ├── format.ts # Number/date formatting utilities
|
||||
│ │ └── types.ts # Shared TypeScript interfaces
|
||||
│ └── styles/
|
||||
│ └── globals.css # Tailwind directives + custom vars
|
||||
└── tests/
|
||||
├── unit/
|
||||
└── property/
|
||||
```
|
||||
|
||||
## Components and Interfaces
|
||||
|
||||
### API Client (`src/api/client.ts`)
|
||||
|
||||
Central Axios instance with interceptors:
|
||||
|
||||
```typescript
|
||||
// Axios instance configuration
|
||||
const apiClient = axios.create({
|
||||
baseURL: '/api/v1/',
|
||||
timeout: 30_000,
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
});
|
||||
|
||||
// Request interceptor: attach JWT
|
||||
apiClient.interceptors.request.use((config) => {
|
||||
const token = useAuthStore.getState().token;
|
||||
if (token) config.headers.Authorization = `Bearer ${token}`;
|
||||
return config;
|
||||
});
|
||||
|
||||
// Response interceptor: unwrap envelope, handle 401
|
||||
apiClient.interceptors.response.use(
|
||||
(response) => {
|
||||
const envelope = response.data as APIEnvelope;
|
||||
if (envelope.status === 'error') throw new ApiError(envelope.error);
|
||||
return envelope.data;
|
||||
},
|
||||
(error) => {
|
||||
if (error.response?.status === 401) {
|
||||
useAuthStore.getState().logout();
|
||||
}
|
||||
const msg = error.response?.data?.error ?? error.message ?? 'Network error';
|
||||
throw new ApiError(msg);
|
||||
}
|
||||
);
|
||||
```
|
||||
|
||||
### Auth Store (`src/stores/authStore.ts`)
|
||||
|
||||
```typescript
|
||||
interface AuthState {
|
||||
token: string | null;
|
||||
username: string | null;
|
||||
role: 'admin' | 'user' | null;
|
||||
login: (token: string) => void;
|
||||
logout: () => void;
|
||||
}
|
||||
```
|
||||
|
||||
- `login()` decodes the JWT payload to extract `sub` (username) and `role`, stores token in `localStorage`
|
||||
- `logout()` clears token from state and `localStorage`, TanStack Query cache is cleared on logout
|
||||
|
||||
### Protected Route (`src/components/auth/ProtectedRoute.tsx`)
|
||||
|
||||
```typescript
|
||||
// Wraps routes that require authentication
|
||||
// Props: requireAdmin?: boolean
|
||||
// If no token → redirect to /login
|
||||
// If requireAdmin && role !== 'admin' → redirect to /watchlist
|
||||
```
|
||||
|
||||
### Router Layout
|
||||
|
||||
```typescript
|
||||
// Route structure
|
||||
<Routes>
|
||||
<Route path="/login" element={<LoginPage />} />
|
||||
<Route path="/register" element={<RegisterPage />} />
|
||||
<Route element={<ProtectedRoute />}>
|
||||
<Route element={<AppShell />}>
|
||||
<Route path="/" element={<Navigate to="/watchlist" />} />
|
||||
<Route path="/watchlist" element={<WatchlistPage />} />
|
||||
<Route path="/ticker/:symbol" element={<TickerDetailPage />} />
|
||||
<Route path="/scanner" element={<ScannerPage />} />
|
||||
<Route path="/rankings" element={<RankingsPage />} />
|
||||
<Route element={<ProtectedRoute requireAdmin />}>
|
||||
<Route path="/admin" element={<AdminPage />} />
|
||||
</Route>
|
||||
</Route>
|
||||
</Route>
|
||||
</Routes>
|
||||
```
|
||||
|
||||
### TanStack Query Hooks Pattern
|
||||
|
||||
Each domain has a hook file that exports query/mutation hooks:
|
||||
|
||||
```typescript
|
||||
// Example: useWatchlist.ts
|
||||
export function useWatchlist() {
|
||||
return useQuery({
|
||||
queryKey: ['watchlist'],
|
||||
queryFn: () => api.watchlist.list(),
|
||||
});
|
||||
}
|
||||
|
||||
export function useAddToWatchlist() {
|
||||
const qc = useQueryClient();
|
||||
return useMutation({
|
||||
mutationFn: (symbol: string) => api.watchlist.add(symbol),
|
||||
onSuccess: () => qc.invalidateQueries({ queryKey: ['watchlist'] }),
|
||||
});
|
||||
}
|
||||
```
|
||||
|
||||
### Key UI Components
|
||||
|
||||
**ScoreCard**: Displays composite score with a colored ring/bar (green > 70, yellow 40-70, red < 40) and expandable dimension breakdown.
|
||||
|
||||
**CandlestickChart**: Recharts `ComposedChart` with custom `Bar` shapes for OHLCV candles. S/R levels rendered as `ReferenceLine` components with color coding (green = support, red = resistance).
|
||||
|
||||
**Toast System**: Lightweight toast using React context + portal. Auto-dismiss after 4 seconds. Error toasts in red, success in green.
|
||||
|
||||
**Skeleton**: Tailwind `animate-pulse` placeholder blocks matching the shape of cards/tables during loading states.
|
||||
|
||||
### Formatting Utilities (`src/lib/format.ts`)
|
||||
|
||||
```typescript
|
||||
formatPrice(n: number): string // "1,234.56"
|
||||
formatPercent(n: number): string // "12.34%"
|
||||
formatLargeNumber(n: number): string // "1.23B", "456.7M", "12.3K"
|
||||
formatDate(d: string): string // "Jan 15, 2025"
|
||||
formatDateTime(d: string): string // "Jan 15, 2025 2:30 PM"
|
||||
```
|
||||
|
||||
## Data Models
|
||||
|
||||
### TypeScript Interfaces (`src/lib/types.ts`)
|
||||
|
||||
```typescript
|
||||
// API envelope (before unwrapping)
|
||||
interface APIEnvelope<T = unknown> {
|
||||
status: 'success' | 'error';
|
||||
data: T | null;
|
||||
error: string | null;
|
||||
}
|
||||
|
||||
// Auth
|
||||
interface TokenResponse {
|
||||
access_token: string;
|
||||
token_type: string;
|
||||
}
|
||||
|
||||
// Watchlist
|
||||
interface WatchlistEntry {
|
||||
symbol: string;
|
||||
entry_type: 'auto' | 'manual';
|
||||
composite_score: number | null;
|
||||
dimensions: DimensionScore[];
|
||||
rr_ratio: number | null;
|
||||
rr_direction: string | null;
|
||||
sr_levels: SRLevelSummary[];
|
||||
added_at: string;
|
||||
}
|
||||
|
||||
interface DimensionScore {
|
||||
dimension: string;
|
||||
score: number;
|
||||
}
|
||||
|
||||
interface SRLevelSummary {
|
||||
price_level: number;
|
||||
type: 'support' | 'resistance';
|
||||
strength: number;
|
||||
}
|
||||
|
||||
// OHLCV
|
||||
interface OHLCVBar {
|
||||
id: number;
|
||||
ticker_id: number;
|
||||
date: string;
|
||||
open: number;
|
||||
high: number;
|
||||
low: number;
|
||||
close: number;
|
||||
volume: number;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
// Scores
|
||||
interface ScoreResponse {
|
||||
symbol: string;
|
||||
composite_score: number | null;
|
||||
composite_stale: boolean;
|
||||
weights: Record<string, number>;
|
||||
dimensions: DimensionScoreDetail[];
|
||||
missing_dimensions: string[];
|
||||
computed_at: string | null;
|
||||
}
|
||||
|
||||
interface DimensionScoreDetail {
|
||||
dimension: string;
|
||||
score: number;
|
||||
is_stale: boolean;
|
||||
computed_at: string | null;
|
||||
}
|
||||
|
||||
interface RankingEntry {
|
||||
symbol: string;
|
||||
composite_score: number;
|
||||
dimensions: DimensionScoreDetail[];
|
||||
}
|
||||
|
||||
interface RankingsResponse {
|
||||
rankings: RankingEntry[];
|
||||
weights: Record<string, number>;
|
||||
}
|
||||
|
||||
// Trade Setups
|
||||
interface TradeSetup {
|
||||
id: number;
|
||||
symbol: string;
|
||||
direction: string;
|
||||
entry_price: number;
|
||||
stop_loss: number;
|
||||
target: number;
|
||||
rr_ratio: number;
|
||||
composite_score: number;
|
||||
detected_at: string;
|
||||
}
|
||||
|
||||
// S/R Levels
|
||||
interface SRLevel {
|
||||
id: number;
|
||||
price_level: number;
|
||||
type: 'support' | 'resistance';
|
||||
strength: number;
|
||||
detection_method: string;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
interface SRLevelResponse {
|
||||
symbol: string;
|
||||
levels: SRLevel[];
|
||||
count: number;
|
||||
}
|
||||
|
||||
// Sentiment
|
||||
interface SentimentScore {
|
||||
id: number;
|
||||
classification: 'bullish' | 'bearish' | 'neutral';
|
||||
confidence: number;
|
||||
source: string;
|
||||
timestamp: string;
|
||||
}
|
||||
|
||||
interface SentimentResponse {
|
||||
symbol: string;
|
||||
scores: SentimentScore[];
|
||||
count: number;
|
||||
dimension_score: number | null;
|
||||
lookback_hours: number;
|
||||
}
|
||||
|
||||
// Fundamentals
|
||||
interface FundamentalResponse {
|
||||
symbol: string;
|
||||
pe_ratio: number | null;
|
||||
revenue_growth: number | null;
|
||||
earnings_surprise: number | null;
|
||||
market_cap: number | null;
|
||||
fetched_at: string | null;
|
||||
}
|
||||
|
||||
// Indicators
|
||||
interface IndicatorResult {
|
||||
indicator_type: string;
|
||||
values: Record<string, unknown>;
|
||||
score: number;
|
||||
bars_used: number;
|
||||
}
|
||||
|
||||
interface EMACrossResult {
|
||||
short_ema: number;
|
||||
long_ema: number;
|
||||
short_period: number;
|
||||
long_period: number;
|
||||
signal: 'bullish' | 'bearish' | 'neutral';
|
||||
}
|
||||
|
||||
// Tickers
|
||||
interface Ticker {
|
||||
id: number;
|
||||
symbol: string;
|
||||
created_at: string;
|
||||
}
|
||||
|
||||
// Admin
|
||||
interface AdminUser {
|
||||
id: number;
|
||||
username: string;
|
||||
role: string;
|
||||
has_access: boolean;
|
||||
created_at: string | null;
|
||||
updated_at: string | null;
|
||||
}
|
||||
|
||||
interface SystemSetting {
|
||||
key: string;
|
||||
value: string;
|
||||
updated_at: string | null;
|
||||
}
|
||||
```
|
||||
|
||||
|
||||
## Correctness Properties
|
||||
|
||||
*A property is a characteristic or behavior that should hold true across all valid executions of a system — essentially, a formal statement about what the system should do. Properties serve as the bridge between human-readable specifications and machine-verifiable correctness guarantees.*
|
||||
|
||||
### Property 1: Token storage round-trip
|
||||
|
||||
*For any* valid JWT token string, storing it via `authStore.login(token)` and then reading `authStore.token` and `localStorage.getItem('token')` should both return the original token string.
|
||||
|
||||
**Validates: Requirements 1.1, 1.6**
|
||||
|
||||
### Property 2: Bearer token attachment
|
||||
|
||||
*For any* non-null token in the auth store, every request made through the API client should include an `Authorization` header with value `Bearer {token}`.
|
||||
|
||||
**Validates: Requirements 1.3, 12.3**
|
||||
|
||||
### Property 3: Registration form validation
|
||||
|
||||
*For any* username string shorter than 1 character or password string shorter than 6 characters, the registration form should reject submission. *For any* username of length >= 1 and password of length >= 6, the form should allow submission.
|
||||
|
||||
**Validates: Requirements 1.2**
|
||||
|
||||
### Property 4: Route protection based on auth state
|
||||
|
||||
*For any* protected route path, if no token exists in the auth store, navigation should redirect to `/login`. If a valid token exists, navigation should render the protected component.
|
||||
|
||||
**Validates: Requirements 2.1, 2.2**
|
||||
|
||||
### Property 5: API envelope unwrapping
|
||||
|
||||
*For any* API response with `status: "success"`, the API client should return the `data` field. *For any* API response with `status: "error"`, the API client should throw an error containing the `error` field message.
|
||||
|
||||
**Validates: Requirements 12.2**
|
||||
|
||||
### Property 6: Watchlist entry rendering completeness
|
||||
|
||||
*For any* watchlist entry, the rendered output should contain the symbol, entry type (with a visual badge distinguishing "auto" from "manual"), composite score, dimension scores, R:R ratio, R:R direction, and S/R levels.
|
||||
|
||||
**Validates: Requirements 3.2, 3.7**
|
||||
|
||||
### Property 7: Symbol click navigation
|
||||
|
||||
*For any* symbol displayed in the watchlist table, scanner table, or rankings table, clicking that symbol should trigger navigation to `/ticker/{symbol}`.
|
||||
|
||||
**Validates: Requirements 3.6, 5.6, 6.4**
|
||||
|
||||
### Property 8: Score card rendering
|
||||
|
||||
*For any* score response with a composite score and dimension scores, the ScoreCard component should render the composite score value and one entry per dimension with its name and score.
|
||||
|
||||
**Validates: Requirements 4.4**
|
||||
|
||||
### Property 9: Sentiment panel rendering
|
||||
|
||||
*For any* sentiment response, the rendered SentimentPanel should display the classification, confidence value, and dimension score.
|
||||
|
||||
**Validates: Requirements 4.5**
|
||||
|
||||
### Property 10: Fundamentals panel rendering
|
||||
|
||||
*For any* fundamentals response, the rendered FundamentalsPanel should display P/E ratio, revenue growth, earnings surprise, and market cap (or a placeholder for null values).
|
||||
|
||||
**Validates: Requirements 4.6**
|
||||
|
||||
### Property 11: Trade setup rendering
|
||||
|
||||
*For any* trade setup, the rendered table row should contain the symbol, direction, entry price, stop loss, target, R:R ratio, composite score, and detection timestamp.
|
||||
|
||||
**Validates: Requirements 5.2**
|
||||
|
||||
### Property 12: Scanner filtering
|
||||
|
||||
*For any* list of trade setups, minimum R:R filter value, and direction filter selection: all displayed setups should have `rr_ratio >= minRR` and (if direction is not "both") `direction === selectedDirection`.
|
||||
|
||||
**Validates: Requirements 5.3, 5.4**
|
||||
|
||||
### Property 13: Scanner sorting
|
||||
|
||||
*For any* list of trade setups and a selected sort column, the displayed rows should be ordered by that column's values (ascending or descending based on sort direction).
|
||||
|
||||
**Validates: Requirements 5.5**
|
||||
|
||||
### Property 14: Rankings display order
|
||||
|
||||
*For any* rankings response, the rendered list should display entries in descending order by composite score, with each entry showing rank position, symbol, composite score, and all dimension scores.
|
||||
|
||||
**Validates: Requirements 6.1, 6.2**
|
||||
|
||||
### Property 15: Admin user table rendering
|
||||
|
||||
*For any* admin user record, the rendered table row should contain the username, role, and access status.
|
||||
|
||||
**Validates: Requirements 7.2**
|
||||
|
||||
### Property 16: Number formatting
|
||||
|
||||
*For any* finite number, `formatPrice` should produce a string with exactly 2 decimal places. `formatPercent` should produce a string ending with `%`. `formatLargeNumber` should produce a string with an appropriate suffix (`K` for thousands, `M` for millions, `B` for billions) for values >= 1000, and no suffix for smaller values.
|
||||
|
||||
**Validates: Requirements 13.4**
|
||||
|
||||
### Property 17: Weights form rendering
|
||||
|
||||
*For any* weights map (dimension name → number), the WeightsForm should render one labeled numeric input per dimension key.
|
||||
|
||||
**Validates: Requirements 11.1**
|
||||
|
||||
## Error Handling
|
||||
|
||||
### API Client Error Strategy
|
||||
|
||||
All errors flow through the Axios response interceptor and are surfaced via the Toast system:
|
||||
|
||||
| Error Type | Detection | Behavior |
|
||||
|---|---|---|
|
||||
| 401 Unauthorized | `error.response.status === 401` | Clear auth store, redirect to `/login` |
|
||||
| API error envelope | `envelope.status === 'error'` | Throw `ApiError` with `envelope.error` message |
|
||||
| Network error | No `error.response` | Throw `ApiError` with "Network error — check your connection" |
|
||||
| Timeout | Axios timeout (30s) | Throw `ApiError` with "Request timed out" |
|
||||
| Unknown | Catch-all | Throw `ApiError` with `error.message` fallback |
|
||||
|
||||
### Component-Level Error Handling
|
||||
|
||||
- **TanStack Query `onError`**: Each mutation hook passes errors to the toast system
|
||||
- **Query error states**: Components check `isError` and render inline error messages
|
||||
- **Ticker Detail partial failure**: Each data section (scores, sentiment, fundamentals, S/R, OHLCV) is an independent query. If one fails, the others still render. Failed sections show an inline error with a retry button.
|
||||
- **Form validation**: Client-side validation before API calls (username length, password length, numeric inputs). Invalid submissions are blocked with inline field errors.
|
||||
|
||||
### Toast System
|
||||
|
||||
```typescript
|
||||
type ToastType = 'success' | 'error' | 'info';
|
||||
|
||||
interface Toast {
|
||||
id: string;
|
||||
type: ToastType;
|
||||
message: string;
|
||||
}
|
||||
|
||||
// Auto-dismiss after 4 seconds
|
||||
// Max 3 toasts visible at once (oldest dismissed first)
|
||||
// Error toasts: red accent, Success: green accent, Info: blue accent
|
||||
```
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Testing Stack
|
||||
|
||||
| Tool | Purpose |
|
||||
|---|---|
|
||||
| Vitest | Test runner (Vite-native, fast) |
|
||||
| React Testing Library | Component rendering + DOM queries |
|
||||
| fast-check | Property-based testing |
|
||||
| MSW (Mock Service Worker) | API mocking for integration tests |
|
||||
|
||||
### Unit Tests
|
||||
|
||||
Unit tests cover specific examples, edge cases, and integration points:
|
||||
|
||||
- **Auth flow**: Login stores token, logout clears token, 401 triggers logout
|
||||
- **API client**: Envelope unwrapping for success/error, timeout config, Bearer header
|
||||
- **Routing**: Unauthenticated redirect, admin-only route guard, non-admin redirect
|
||||
- **Component rendering**: Each page renders with mock data, loading skeletons appear, error states display
|
||||
- **Form validation**: Empty username rejected, short password rejected, valid inputs accepted
|
||||
- **Confirmation dialog**: Delete ticker shows confirm before API call
|
||||
- **Partial failure**: Ticker detail renders available sections when one query fails
|
||||
|
||||
### Property-Based Tests
|
||||
|
||||
Each correctness property maps to a single `fast-check` property test with minimum 100 iterations. Tests are tagged with the property reference:
|
||||
|
||||
```typescript
|
||||
// Feature: signal-dashboard, Property 16: Number formatting
|
||||
test.prop([fc.float({ min: -1e15, max: 1e15, noNaN: true })], (n) => {
|
||||
const result = formatPrice(n);
|
||||
expect(result).toMatch(/\.\d{2}$/);
|
||||
});
|
||||
```
|
||||
|
||||
Property tests focus on:
|
||||
- **Pure functions**: `format.ts` utilities (Property 16)
|
||||
- **Store logic**: Auth store token round-trip (Property 1)
|
||||
- **API client interceptors**: Envelope unwrapping (Property 5), Bearer attachment (Property 2)
|
||||
- **Filtering/sorting logic**: Scanner filter functions (Properties 12, 13)
|
||||
- **Component rendering**: Given generated data, components render required fields (Properties 6, 8–11, 14, 15, 17)
|
||||
- **Routing guards**: Protected route behavior based on auth state (Property 4)
|
||||
|
||||
### Test Configuration
|
||||
|
||||
- Vitest config in `frontend/vitest.config.ts` with jsdom environment
|
||||
- `fast-check` configured with `{ numRuns: 100 }` minimum per property
|
||||
- MSW handlers for all API endpoints used in integration tests
|
||||
- Each property test tagged: `Feature: signal-dashboard, Property {N}: {title}`
|
||||
200
.kiro/specs/signal-dashboard/requirements.md
Normal file
200
.kiro/specs/signal-dashboard/requirements.md
Normal file
@@ -0,0 +1,200 @@
|
||||
# Requirements Document
|
||||
|
||||
## Introduction
|
||||
|
||||
Signal Dashboard is a single-page application (SPA) frontend for the Stock Data Backend API. The Dashboard provides authenticated users with a visual interface to monitor watchlists, analyze individual tickers across multiple dimensions (technical, S/R, sentiment, fundamentals, momentum), scan for asymmetric risk:reward trade setups, view composite-score rankings, and manage system administration. The Dashboard consumes the existing REST API at `/api/v1/` and is served as static files by Nginx on the same domain (`signal.thiessen.io`).
|
||||
|
||||
Technology choice: React 18 + TypeScript + Vite, with TanStack Query for data fetching, Zustand for auth state, Tailwind CSS for styling, Recharts for charting, and React Router for navigation. This stack prioritizes maintainability, small bundle size, and a modern developer experience without framework bloat.
|
||||
|
||||
## Glossary
|
||||
|
||||
- **Dashboard**: The Signal Dashboard SPA frontend application
|
||||
- **API_Client**: The HTTP client module that communicates with the backend REST API
|
||||
- **Auth_Module**: The authentication subsystem handling login, registration, token storage, and token refresh
|
||||
- **Watchlist_View**: The main overview page displaying the user's watchlist entries with enriched score data
|
||||
- **Ticker_Detail_View**: The per-ticker analysis page showing price chart, indicators, S/R levels, sentiment, and fundamentals
|
||||
- **Scanner_View**: The trade setup scanner page displaying R:R filtered setups
|
||||
- **Rankings_View**: The page displaying all tickers sorted by composite score
|
||||
- **Admin_Panel**: The administration interface for user management, job control, system settings, and data cleanup
|
||||
- **Router**: The client-side routing module controlling navigation and access guards
|
||||
- **Token_Store**: The client-side storage mechanism for JWT access tokens
|
||||
- **Chart_Component**: The interactive price chart component rendering OHLCV candlestick data with overlays
|
||||
- **Score_Card**: A UI component displaying a composite score and its dimension breakdown
|
||||
- **Toast_System**: The notification subsystem displaying transient success/error messages to the user
|
||||
|
||||
## Requirements
|
||||
|
||||
### Requirement 1: JWT Authentication Flow
|
||||
|
||||
**User Story:** As a user, I want to log in and register so that I can access the dashboard securely.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. WHEN a user submits valid credentials on the login form, THE Auth_Module SHALL send a POST request to `/api/v1/auth/login` and store the returned JWT token in the Token_Store
|
||||
2. WHEN a user submits a registration form with a username (minimum 1 character) and password (minimum 6 characters), THE Auth_Module SHALL send a POST request to `/api/v1/auth/register` and display a success message via the Toast_System
|
||||
3. WHILE a valid JWT token exists in the Token_Store, THE API_Client SHALL include the token as a Bearer authorization header on all subsequent API requests
|
||||
4. WHEN the API returns a 401 Unauthorized response, THE Auth_Module SHALL clear the Token_Store and redirect the user to the login page
|
||||
5. IF the login or registration request fails, THEN THE Auth_Module SHALL display the error message from the API response via the Toast_System
|
||||
6. THE Token_Store SHALL persist the JWT token in browser localStorage so that sessions survive page reloads
|
||||
|
||||
### Requirement 2: Protected Routing and Role-Based Access
|
||||
|
||||
**User Story:** As a user, I want the app to enforce access control so that unauthenticated users cannot access protected pages and only admins can access admin features.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. WHILE no valid JWT token exists in the Token_Store, THE Router SHALL redirect navigation to any protected route to the login page
|
||||
2. WHILE a valid JWT token exists in the Token_Store, THE Router SHALL allow navigation to protected routes (Watchlist_View, Ticker_Detail_View, Scanner_View, Rankings_View)
|
||||
3. WHILE the authenticated user has an admin role, THE Router SHALL allow navigation to the Admin_Panel
|
||||
4. WHILE the authenticated user has a non-admin role, THE Router SHALL redirect navigation to the Admin_Panel to the Watchlist_View
|
||||
5. THE Router SHALL provide a navigation sidebar or top bar with links to all accessible views for the authenticated user
|
||||
|
||||
### Requirement 3: Watchlist Overview
|
||||
|
||||
**User Story:** As a user, I want to see my watchlist with composite scores, dimension breakdowns, and R:R ratios so that I can quickly assess my tracked tickers.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. WHEN the Watchlist_View loads, THE Dashboard SHALL fetch data from `GET /api/v1/watchlist` and display each entry as a card or row
|
||||
2. THE Watchlist_View SHALL display for each entry: symbol, entry type (auto/manual), composite score, dimension scores, R:R ratio, R:R direction, and nearest S/R levels
|
||||
3. WHEN a user clicks the add-to-watchlist control and enters a valid ticker symbol, THE Dashboard SHALL send a POST request to `/api/v1/watchlist/{symbol}` and refresh the watchlist
|
||||
4. WHEN a user clicks the remove button on a watchlist entry, THE Dashboard SHALL send a DELETE request to `/api/v1/watchlist/{symbol}` and remove the entry from the display
|
||||
5. IF the watchlist API request fails, THEN THE Dashboard SHALL display the error message via the Toast_System
|
||||
6. WHEN a user clicks on a watchlist entry symbol, THE Router SHALL navigate to the Ticker_Detail_View for that symbol
|
||||
7. THE Watchlist_View SHALL visually distinguish auto-populated entries from manual entries using a badge or label
|
||||
|
||||
### Requirement 4: Ticker Detail View
|
||||
|
||||
**User Story:** As a user, I want to see a comprehensive analysis of a single ticker including price chart, indicators, S/R levels, sentiment, and fundamentals so that I can make informed decisions.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. WHEN the Ticker_Detail_View loads for a given symbol, THE Dashboard SHALL fetch data in parallel from: `GET /api/v1/ohlcv/{symbol}`, `GET /api/v1/scores/{symbol}`, `GET /api/v1/sr-levels/{symbol}`, `GET /api/v1/sentiment/{symbol}`, and `GET /api/v1/fundamentals/{symbol}`
|
||||
2. THE Chart_Component SHALL render OHLCV data as a candlestick chart with date on the x-axis and price on the y-axis
|
||||
3. THE Chart_Component SHALL overlay S/R levels as horizontal lines on the price chart, color-coded by type (support in green, resistance in red)
|
||||
4. THE Ticker_Detail_View SHALL display the composite score and all dimension scores using Score_Card components
|
||||
5. THE Ticker_Detail_View SHALL display sentiment data including classification (bullish/bearish/neutral), confidence, and the time-decay weighted dimension score
|
||||
6. THE Ticker_Detail_View SHALL display fundamental data including P/E ratio, revenue growth, earnings surprise, and market cap
|
||||
7. WHEN a user selects an indicator type (ADX, EMA, RSI, ATR, volume_profile, pivot_points), THE Dashboard SHALL fetch data from `GET /api/v1/indicators/{symbol}/{indicator_type}` and display the result with its normalized score
|
||||
8. WHEN a user requests the EMA cross signal, THE Dashboard SHALL fetch data from `GET /api/v1/indicators/{symbol}/ema-cross` and display the signal (bullish/bearish/neutral) with short and long EMA values
|
||||
9. IF any data fetch fails for the Ticker_Detail_View, THEN THE Dashboard SHALL display an inline error message for the failed section while rendering the remaining sections normally
|
||||
|
||||
### Requirement 5: Trade Setup Scanner
|
||||
|
||||
**User Story:** As a user, I want to scan for trade setups with favorable risk:reward ratios so that I can find asymmetric opportunities.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. WHEN the Scanner_View loads, THE Dashboard SHALL fetch data from `GET /api/v1/trades` and display all trade setups in a sortable table
|
||||
2. THE Scanner_View SHALL display for each trade setup: symbol, direction (long/short), entry price, stop loss, target, R:R ratio, composite score, and detection timestamp
|
||||
3. THE Scanner_View SHALL allow the user to filter trade setups by minimum R:R ratio using a numeric input
|
||||
4. THE Scanner_View SHALL allow the user to filter trade setups by direction (long, short, or both)
|
||||
5. THE Scanner_View SHALL allow the user to sort the table by any column (R:R ratio, composite score, symbol, detection time)
|
||||
6. WHEN a user clicks on a trade setup symbol, THE Router SHALL navigate to the Ticker_Detail_View for that symbol
|
||||
|
||||
### Requirement 6: Rankings View
|
||||
|
||||
**User Story:** As a user, I want to see all tickers ranked by composite score so that I can identify the strongest opportunities.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. WHEN the Rankings_View loads, THE Dashboard SHALL fetch data from `GET /api/v1/rankings` and display tickers sorted by composite score descending
|
||||
2. THE Rankings_View SHALL display for each ticker: rank position, symbol, composite score, and all dimension scores
|
||||
3. THE Rankings_View SHALL display the current scoring weights used for composite calculation
|
||||
4. WHEN a user clicks on a ranked ticker symbol, THE Router SHALL navigate to the Ticker_Detail_View for that symbol
|
||||
|
||||
### Requirement 7: Admin Panel — User Management
|
||||
|
||||
**User Story:** As an admin, I want to manage user accounts so that I can control access to the platform.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. WHEN the Admin_Panel user management section loads, THE Dashboard SHALL fetch data from `GET /api/v1/admin/users` and display all users in a table
|
||||
2. THE Admin_Panel SHALL display for each user: username, role, and access status
|
||||
3. WHEN an admin clicks the create-user control and submits a username, password, role, and access flag, THE Dashboard SHALL send a POST request to `/api/v1/admin/users` and refresh the user list
|
||||
4. WHEN an admin toggles a user's access status, THE Dashboard SHALL send a PUT request to `/api/v1/admin/users/{user_id}/access` with the new access flag
|
||||
5. WHEN an admin resets a user's password, THE Dashboard SHALL send a PUT request to `/api/v1/admin/users/{user_id}/password` with the new password
|
||||
6. IF any admin user management request fails, THEN THE Dashboard SHALL display the error message via the Toast_System
|
||||
|
||||
### Requirement 8: Admin Panel — System Settings and Jobs
|
||||
|
||||
**User Story:** As an admin, I want to manage system settings, scheduled jobs, and data cleanup so that I can maintain the platform.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. WHEN the Admin_Panel settings section loads, THE Dashboard SHALL fetch data from `GET /api/v1/admin/settings` and display all settings as editable fields
|
||||
2. WHEN an admin updates a system setting value, THE Dashboard SHALL send a PUT request to `/api/v1/admin/settings/{key}` with the new value
|
||||
3. WHEN an admin toggles the registration setting, THE Dashboard SHALL send a PUT request to `/api/v1/admin/settings/registration` with the enabled flag
|
||||
4. WHEN an admin toggles a scheduled job on or off, THE Dashboard SHALL send a PUT request to `/api/v1/admin/jobs/{job_name}/toggle` with the enabled flag
|
||||
5. WHEN an admin triggers a scheduled job manually, THE Dashboard SHALL send a POST request to `/api/v1/admin/jobs/{job_name}/trigger` and display a confirmation via the Toast_System
|
||||
6. WHEN an admin submits a data cleanup request with an older-than-days value, THE Dashboard SHALL send a POST request to `/api/v1/admin/data/cleanup` with the specified value and display the result via the Toast_System
|
||||
|
||||
### Requirement 9: Ticker Management
|
||||
|
||||
**User Story:** As a user, I want to add and remove tickers from the system so that I can track the stocks I care about.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. WHEN a user submits a new ticker symbol via the add-ticker form, THE Dashboard SHALL send a POST request to `/api/v1/tickers` with the symbol and refresh the ticker list
|
||||
2. WHEN a user views the ticker list, THE Dashboard SHALL fetch data from `GET /api/v1/tickers` and display all registered tickers
|
||||
3. WHEN a user clicks the delete button on a ticker, THE Dashboard SHALL display a confirmation dialog before sending a DELETE request to `/api/v1/tickers/{symbol}`
|
||||
4. IF a ticker deletion or creation request fails, THEN THE Dashboard SHALL display the error message via the Toast_System
|
||||
5. WHEN a ticker is successfully deleted, THE Dashboard SHALL remove the ticker from the displayed list without requiring a full page reload
|
||||
|
||||
### Requirement 10: Data Ingestion Trigger
|
||||
|
||||
**User Story:** As a user, I want to manually trigger data ingestion for a specific ticker so that I can get fresh data on demand.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. WHEN a user clicks the fetch-data button on the Ticker_Detail_View, THE Dashboard SHALL send a POST request to `/api/v1/ingestion/fetch/{symbol}`
|
||||
2. WHILE the ingestion request is in progress, THE Dashboard SHALL display a loading indicator on the fetch-data button
|
||||
3. WHEN the ingestion request completes successfully, THE Dashboard SHALL display a success message via the Toast_System and refresh the OHLCV data on the Ticker_Detail_View
|
||||
4. IF the ingestion request fails, THEN THE Dashboard SHALL display the error message via the Toast_System
|
||||
|
||||
### Requirement 11: Score Weight Configuration
|
||||
|
||||
**User Story:** As a user, I want to adjust the scoring dimension weights so that I can customize the composite score calculation to my strategy.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. THE Dashboard SHALL display the current scoring weights on the Rankings_View with editable numeric inputs for each dimension
|
||||
2. WHEN a user modifies one or more weight values and submits the form, THE Dashboard SHALL send a PUT request to `/api/v1/scores/weights` with the updated weights map
|
||||
3. WHEN the weight update succeeds, THE Dashboard SHALL refresh the rankings data to reflect the new composite scores
|
||||
4. IF the weight update request fails, THEN THE Dashboard SHALL display the error message via the Toast_System
|
||||
|
||||
### Requirement 12: API Client and Error Handling
|
||||
|
||||
**User Story:** As a developer, I want a centralized API client with consistent error handling so that all API interactions follow the same patterns.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. THE API_Client SHALL send all requests to the base URL `/api/v1/` using the JSON content type
|
||||
2. THE API_Client SHALL unwrap the API envelope (`{ status, data, error }`) and return the `data` field on success or throw an error with the `error` field on failure
|
||||
3. THE API_Client SHALL attach the JWT Bearer token from the Token_Store to every authenticated request
|
||||
4. WHEN the API_Client receives a network error or timeout, THE API_Client SHALL throw a descriptive error that the calling component can display via the Toast_System
|
||||
5. THE API_Client SHALL set a request timeout of 30 seconds for all API calls
|
||||
|
||||
### Requirement 13: Responsive Layout and Visual Design
|
||||
|
||||
**User Story:** As a user, I want the dashboard to have a clean, modern interface that works on desktop and tablet screens so that I can use it comfortably.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. THE Dashboard SHALL use a sidebar navigation layout on screens wider than 1024 pixels and a collapsible hamburger menu on narrower screens
|
||||
2. THE Dashboard SHALL use a dark color scheme with accent colors for positive (green) and negative (red) values consistent with financial data conventions
|
||||
3. THE Dashboard SHALL apply smooth transitions (duration 150ms to 300ms) for page navigation, modal openings, and interactive element state changes
|
||||
4. THE Dashboard SHALL display numeric financial values with appropriate formatting: prices to 2 decimal places, percentages with a percent sign, large numbers with abbreviations (K, M, B)
|
||||
5. THE Dashboard SHALL display loading skeleton placeholders while data is being fetched from the API
|
||||
|
||||
### Requirement 14: Static SPA Build and Deployment
|
||||
|
||||
**User Story:** As a developer, I want the frontend to build as static files that Nginx can serve alongside the backend API so that deployment is simple.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
1. THE Dashboard SHALL produce a static build output (HTML, CSS, JS) in a `dist/` directory via a single build command
|
||||
2. THE Dashboard SHALL use hash-based filenames for JS and CSS assets to enable long-term browser caching
|
||||
3. THE Dashboard SHALL support client-side routing with a fallback to `index.html` for all non-API routes (Nginx `try_files` configuration)
|
||||
4. THE Dashboard SHALL proxy API requests to `/api/v1/` on the same origin, requiring no CORS configuration in production
|
||||
5. WHEN the `GET /api/v1/health` endpoint returns a success response, THE Dashboard SHALL consider the backend available
|
||||
238
.kiro/specs/signal-dashboard/tasks.md
Normal file
238
.kiro/specs/signal-dashboard/tasks.md
Normal file
@@ -0,0 +1,238 @@
|
||||
# Implementation Plan: Signal Dashboard
|
||||
|
||||
## Overview
|
||||
|
||||
Incremental build of the Signal Dashboard SPA in `frontend/`. Each phase wires up end-to-end before moving on, so there's always a runnable app. Backend API is already live — we consume it as-is.
|
||||
|
||||
## Tasks
|
||||
|
||||
- [x] 1. Scaffold Vite + React + TypeScript project
|
||||
- [x] 1.1 Initialize `frontend/` with Vite React-TS template, install dependencies (react, react-dom, react-router-dom, @tanstack/react-query, zustand, axios, recharts, tailwindcss, postcss, autoprefixer)
|
||||
- Create `package.json`, `tsconfig.json`, `vite.config.ts`, `tailwind.config.ts`, `postcss.config.js`
|
||||
- Configure Vite proxy for `/api/v1/` to backend during dev
|
||||
- Configure Tailwind with dark mode class strategy
|
||||
- Create `src/main.tsx`, `src/App.tsx`, `src/styles/globals.css` with Tailwind directives
|
||||
- _Requirements: 14.1, 14.4, 13.2_
|
||||
|
||||
- [x] 1.2 Create shared TypeScript interfaces and formatting utilities
|
||||
- Create `src/lib/types.ts` with all data model interfaces (APIEnvelope, TokenResponse, WatchlistEntry, OHLCVBar, ScoreResponse, TradeSetup, SRLevel, SentimentResponse, FundamentalResponse, IndicatorResult, EMACrossResult, Ticker, AdminUser, SystemSetting, etc.)
|
||||
- Create `src/lib/format.ts` with `formatPrice`, `formatPercent`, `formatLargeNumber`, `formatDate`, `formatDateTime`
|
||||
- _Requirements: 13.4_
|
||||
|
||||
- [ ]* 1.3 Write property tests for formatting utilities
|
||||
- **Property 16: Number formatting**
|
||||
- **Validates: Requirements 13.4**
|
||||
- Install vitest, @testing-library/react, fast-check as dev dependencies
|
||||
- Create `frontend/vitest.config.ts` with jsdom environment
|
||||
- Create `frontend/tests/property/format.test.ts`
|
||||
- Test `formatPrice` always produces 2 decimal places, `formatPercent` ends with `%`, `formatLargeNumber` uses correct suffix
|
||||
|
||||
- [x] 2. API client and auth store
|
||||
- [x] 2.1 Create Axios API client with interceptors
|
||||
- Create `src/api/client.ts` with base URL `/api/v1/`, 30s timeout, JSON content type
|
||||
- Add request interceptor to attach Bearer token from auth store
|
||||
- Add response interceptor to unwrap `{ status, data, error }` envelope
|
||||
- Add 401 handler that clears auth store and redirects to login
|
||||
- Create `ApiError` class for typed error handling
|
||||
- _Requirements: 12.1, 12.2, 12.3, 12.4, 12.5_
|
||||
|
||||
- [x] 2.2 Create Zustand auth store
|
||||
- Create `src/stores/authStore.ts` with token, username, role state
|
||||
- `login(token)` decodes JWT payload, extracts `sub` and `role`, persists to localStorage
|
||||
- `logout()` clears state and localStorage
|
||||
- Initialize from localStorage on store creation for session persistence
|
||||
- _Requirements: 1.1, 1.6_
|
||||
|
||||
- [x] 2.3 Create API module files for each domain
|
||||
- Create `src/api/auth.ts` (login, register)
|
||||
- Create `src/api/watchlist.ts` (list, add, remove)
|
||||
- Create `src/api/tickers.ts` (list, create, delete)
|
||||
- Create `src/api/scores.ts` (getScores, getRankings, updateWeights)
|
||||
- Create `src/api/trades.ts` (list)
|
||||
- Create `src/api/ohlcv.ts` (getOHLCV)
|
||||
- Create `src/api/indicators.ts` (getIndicator, getEMACross)
|
||||
- Create `src/api/sr-levels.ts` (getLevels)
|
||||
- Create `src/api/sentiment.ts` (getSentiment)
|
||||
- Create `src/api/fundamentals.ts` (getFundamentals)
|
||||
- Create `src/api/ingestion.ts` (fetchData)
|
||||
- Create `src/api/admin.ts` (users CRUD, settings, jobs, cleanup)
|
||||
- Create `src/api/health.ts` (check)
|
||||
- _Requirements: 12.1, 12.2_
|
||||
|
||||
- [ ]* 2.4 Write property tests for API client and auth store
|
||||
- **Property 1: Token storage round-trip**
|
||||
- **Property 2: Bearer token attachment**
|
||||
- **Property 5: API envelope unwrapping**
|
||||
- **Validates: Requirements 1.1, 1.3, 1.6, 12.2, 12.3**
|
||||
|
||||
- [x] 3. Checkpoint — Verify foundation
|
||||
- Ensure all tests pass, ask the user if questions arise.
|
||||
|
||||
- [x] 4. Routing, layout, and auth pages
|
||||
- [x] 4.1 Create ProtectedRoute component and router setup
|
||||
- Create `src/components/auth/ProtectedRoute.tsx` — redirects to `/login` if no token, redirects non-admin away from admin routes
|
||||
- Set up React Router in `src/App.tsx` with route structure from design (login, register, protected shell with watchlist, ticker detail, scanner, rankings, admin)
|
||||
- _Requirements: 2.1, 2.2, 2.3, 2.4_
|
||||
|
||||
- [x] 4.2 Create AppShell layout with sidebar navigation
|
||||
- Create `src/components/layout/AppShell.tsx` — sidebar + main content area with `<Outlet />`
|
||||
- Create `src/components/layout/Sidebar.tsx` — nav links to watchlist, scanner, rankings, admin (admin link only if role is admin)
|
||||
- Create `src/components/layout/MobileNav.tsx` — hamburger menu for screens < 1024px
|
||||
- Apply dark color scheme with Tailwind dark mode classes
|
||||
- Add smooth transitions (150-300ms) for navigation and interactive elements
|
||||
- _Requirements: 2.5, 13.1, 13.2, 13.3_
|
||||
|
||||
- [x] 4.3 Create Login and Register pages
|
||||
- Create `src/pages/LoginPage.tsx` with username/password form, calls `useAuth().login` mutation
|
||||
- Create `src/pages/RegisterPage.tsx` with username (min 1 char) / password (min 6 chars) validation, calls `useAuth().register` mutation
|
||||
- Create `src/hooks/useAuth.ts` with login/register/logout mutations using TanStack Query
|
||||
- Display API errors via toast on failure, redirect to watchlist on login success
|
||||
- _Requirements: 1.1, 1.2, 1.4, 1.5_
|
||||
|
||||
- [x] 4.4 Create shared UI components
|
||||
- Create `src/components/ui/Toast.tsx` — toast context + portal, auto-dismiss 4s, max 3 visible, color-coded (red/green/blue)
|
||||
- Create `src/components/ui/Skeleton.tsx` — Tailwind `animate-pulse` placeholder blocks
|
||||
- Create `src/components/ui/Badge.tsx` — small label component for entry types
|
||||
- Create `src/components/ui/ConfirmDialog.tsx` — modal confirmation dialog
|
||||
- Create `src/components/ui/ScoreCard.tsx` — composite score display with colored ring (green > 70, yellow 40-70, red < 40) and dimension breakdown
|
||||
- _Requirements: 13.3, 13.5, 1.5_
|
||||
|
||||
- [ ]* 4.5 Write property tests for routing and registration validation
|
||||
- **Property 3: Registration form validation**
|
||||
- **Property 4: Route protection based on auth state**
|
||||
- **Validates: Requirements 1.2, 2.1, 2.2**
|
||||
|
||||
- [x] 5. Checkpoint — Verify auth flow and navigation
|
||||
- Ensure all tests pass, ask the user if questions arise.
|
||||
|
||||
- [x] 6. Watchlist view
|
||||
- [x] 6.1 Create TanStack Query hooks for watchlist
|
||||
- Create `src/hooks/useWatchlist.ts` with `useWatchlist()` query, `useAddToWatchlist()` mutation, `useRemoveFromWatchlist()` mutation
|
||||
- Invalidate watchlist query on add/remove success
|
||||
- Surface errors to toast system
|
||||
- _Requirements: 3.1, 3.3, 3.4, 3.5_
|
||||
|
||||
- [x] 6.2 Create WatchlistPage and sub-components
|
||||
- Create `src/pages/WatchlistPage.tsx` — fetches watchlist, renders table/cards, loading skeletons, error state
|
||||
- Create `src/components/watchlist/WatchlistTable.tsx` — displays symbol (clickable → `/ticker/{symbol}`), entry type badge (auto/manual), composite score, dimension scores, R:R ratio, R:R direction, nearest S/R levels, remove button
|
||||
- Create `src/components/watchlist/AddTickerForm.tsx` — input + submit to add symbol to watchlist
|
||||
- _Requirements: 3.1, 3.2, 3.3, 3.4, 3.6, 3.7_
|
||||
|
||||
- [ ]* 6.3 Write property tests for watchlist rendering
|
||||
- **Property 6: Watchlist entry rendering completeness**
|
||||
- **Property 7: Symbol click navigation** (watchlist portion)
|
||||
- **Validates: Requirements 3.2, 3.6, 3.7**
|
||||
|
||||
- [x] 7. Ticker detail view
|
||||
- [x] 7.1 Create TanStack Query hooks for ticker detail
|
||||
- Create `src/hooks/useTickerDetail.ts` with parallel queries for OHLCV, scores, S/R levels, sentiment, fundamentals
|
||||
- Each query is independent — partial failure renders available sections
|
||||
- _Requirements: 4.1, 4.9_
|
||||
|
||||
- [x] 7.2 Create TickerDetailPage with chart and data panels
|
||||
- Create `src/pages/TickerDetailPage.tsx` — orchestrates parallel data fetching, renders sections with independent loading/error states
|
||||
- Create `src/components/charts/CandlestickChart.tsx` — Recharts ComposedChart with custom Bar shapes for OHLCV candles, date x-axis, price y-axis
|
||||
- Create `src/components/ticker/SROverlay.tsx` — renders S/R levels as ReferenceLine components on chart (green = support, red = resistance)
|
||||
- Render ScoreCard for composite + dimension scores
|
||||
- _Requirements: 4.1, 4.2, 4.3, 4.4, 4.9_
|
||||
|
||||
- [x] 7.3 Create sentiment, fundamentals, and indicator panels
|
||||
- Create `src/components/ticker/SentimentPanel.tsx` — displays classification, confidence, dimension score
|
||||
- Create `src/components/ticker/FundamentalsPanel.tsx` — displays P/E, revenue growth, earnings surprise, market cap (placeholder for nulls)
|
||||
- Create `src/components/ticker/IndicatorSelector.tsx` — dropdown to select indicator type (ADX, EMA, RSI, ATR, volume_profile, pivot_points), fetches from `/api/v1/indicators/{symbol}/{type}`, displays result with normalized score. Includes EMA cross signal display.
|
||||
- _Requirements: 4.5, 4.6, 4.7, 4.8_
|
||||
|
||||
- [x] 7.4 Add data ingestion trigger to ticker detail
|
||||
- Add fetch-data button to TickerDetailPage
|
||||
- POST to `/api/v1/ingestion/fetch/{symbol}`, show loading indicator on button, toast on success/failure, refresh OHLCV data on success
|
||||
- _Requirements: 10.1, 10.2, 10.3, 10.4_
|
||||
|
||||
- [ ]* 7.5 Write property tests for ticker detail components
|
||||
- **Property 8: Score card rendering**
|
||||
- **Property 9: Sentiment panel rendering**
|
||||
- **Property 10: Fundamentals panel rendering**
|
||||
- **Validates: Requirements 4.4, 4.5, 4.6**
|
||||
|
||||
- [x] 8. Checkpoint — Verify watchlist and ticker detail
|
||||
- Ensure all tests pass, ask the user if questions arise.
|
||||
|
||||
- [x] 9. Scanner view
|
||||
- [x] 9.1 Create TanStack Query hooks and scanner page
|
||||
- Create `src/hooks/useTrades.ts` with `useTrades()` query
|
||||
- Create `src/pages/ScannerPage.tsx` — fetches trade setups, renders filter controls and table, loading skeletons
|
||||
- Create `src/components/scanner/TradeTable.tsx` — sortable table displaying symbol (clickable → `/ticker/{symbol}`), direction, entry price, stop loss, target, R:R ratio, composite score, detection timestamp
|
||||
- Add filter controls: minimum R:R numeric input, direction dropdown (long/short/both)
|
||||
- Add column sorting (R:R ratio, composite score, symbol, detection time) with ascending/descending toggle
|
||||
- _Requirements: 5.1, 5.2, 5.3, 5.4, 5.5, 5.6_
|
||||
|
||||
- [ ]* 9.2 Write property tests for scanner filtering and sorting
|
||||
- **Property 11: Trade setup rendering**
|
||||
- **Property 12: Scanner filtering**
|
||||
- **Property 13: Scanner sorting**
|
||||
- **Validates: Requirements 5.2, 5.3, 5.4, 5.5**
|
||||
|
||||
- [x] 10. Rankings view
|
||||
- [x] 10.1 Create TanStack Query hooks and rankings page
|
||||
- Create `src/hooks/useScores.ts` with `useRankings()` query, `useUpdateWeights()` mutation
|
||||
- Create `src/pages/RankingsPage.tsx` — fetches rankings, renders table sorted by composite score descending, displays current weights
|
||||
- Create `src/components/rankings/RankingsTable.tsx` — displays rank position, symbol (clickable → `/ticker/{symbol}`), composite score, all dimension scores
|
||||
- Create `src/components/rankings/WeightsForm.tsx` — editable numeric inputs per dimension, submit updates weights via PUT, refreshes rankings on success
|
||||
- _Requirements: 6.1, 6.2, 6.3, 6.4, 11.1, 11.2, 11.3, 11.4_
|
||||
|
||||
- [ ]* 10.2 Write property tests for rankings and weights
|
||||
- **Property 14: Rankings display order**
|
||||
- **Property 17: Weights form rendering**
|
||||
- **Validates: Requirements 6.1, 6.2, 11.1**
|
||||
|
||||
- [x] 11. Checkpoint — Verify scanner and rankings
|
||||
- Ensure all tests pass, ask the user if questions arise.
|
||||
|
||||
- [x] 12. Ticker management
|
||||
- [x] 12.1 Create TanStack Query hooks and ticker management UI
|
||||
- Create `src/hooks/useTickers.ts` with `useTickers()` query, `useAddTicker()` mutation, `useDeleteTicker()` mutation
|
||||
- Add ticker list display to an appropriate location (e.g., admin page or dedicated section)
|
||||
- Add ticker form for adding new symbols
|
||||
- Delete button triggers ConfirmDialog before sending DELETE request
|
||||
- Remove ticker from display on successful delete without full page reload
|
||||
- _Requirements: 9.1, 9.2, 9.3, 9.4, 9.5_
|
||||
|
||||
- [x] 13. Admin panel
|
||||
- [x] 13.1 Create admin hooks and user management section
|
||||
- Create `src/hooks/useAdmin.ts` with queries and mutations for users, settings, jobs, cleanup
|
||||
- Create `src/pages/AdminPage.tsx` — tabbed layout with user management, settings, jobs, data cleanup sections
|
||||
- Create `src/components/admin/UserTable.tsx` — displays username, role, access status; toggle access, reset password controls
|
||||
- Add create-user form (username, password, role, access flag)
|
||||
- _Requirements: 7.1, 7.2, 7.3, 7.4, 7.5, 7.6_
|
||||
|
||||
- [x] 13.2 Create settings, jobs, and data cleanup sections
|
||||
- Create `src/components/admin/SettingsForm.tsx` — editable fields for each setting, registration toggle
|
||||
- Create `src/components/admin/JobControls.tsx` — toggle on/off per job, manual trigger button, toast confirmation
|
||||
- Create `src/components/admin/DataCleanup.tsx` — older-than-days input, submit cleanup, display result via toast
|
||||
- _Requirements: 8.1, 8.2, 8.3, 8.4, 8.5, 8.6_
|
||||
|
||||
- [ ]* 13.3 Write property test for admin user table rendering
|
||||
- **Property 15: Admin user table rendering**
|
||||
- **Validates: Requirements 7.2**
|
||||
|
||||
- [x] 14. Final wiring and polish
|
||||
- [x] 14.1 Add health check and loading states
|
||||
- Create health check query using `GET /api/v1/health` — display backend status indicator in sidebar
|
||||
- Ensure all pages show Skeleton placeholders during loading
|
||||
- Ensure all mutation errors surface through Toast system consistently
|
||||
- _Requirements: 14.5, 13.5, 12.4_
|
||||
|
||||
- [x] 14.2 Configure production build
|
||||
- Verify `vite build` outputs to `frontend/dist/` with hashed filenames
|
||||
- Add Nginx config snippet in comments or README for `try_files $uri $uri/ /index.html` and `/api/v1/` proxy
|
||||
- _Requirements: 14.1, 14.2, 14.3, 14.4_
|
||||
|
||||
- [x] 15. Final checkpoint — Ensure all tests pass
|
||||
- Ensure all tests pass, ask the user if questions arise.
|
||||
|
||||
## Notes
|
||||
|
||||
- Tasks marked with `*` are optional property test tasks and can be skipped for faster MVP
|
||||
- Each task references specific requirements for traceability
|
||||
- Backend API is already running — no backend changes needed
|
||||
- All 17 correctness properties are covered across optional test tasks
|
||||
- Checkpoints are placed after each major phase for incremental validation
|
||||
1
.kiro/specs/stock-data-backend/.config.kiro
Normal file
1
.kiro/specs/stock-data-backend/.config.kiro
Normal file
@@ -0,0 +1 @@
|
||||
{"specId": "fa730cf4-a14d-4f62-8993-fd7db6fe25cc", "workflowType": "requirements-first", "specType": "feature"}
|
||||
1121
.kiro/specs/stock-data-backend/design.md
Normal file
1121
.kiro/specs/stock-data-backend/design.md
Normal file
File diff suppressed because it is too large
Load Diff
221
.kiro/specs/stock-data-backend/requirements.md
Normal file
221
.kiro/specs/stock-data-backend/requirements.md
Normal file
@@ -0,0 +1,221 @@
|
||||
# Requirements Document
|
||||
|
||||
## Introduction
|
||||
|
||||
This document defines the requirements for the Stock Data Backend — an opinionated investing-signal platform built with Python/FastAPI and PostgreSQL, focused on NASDAQ stocks. The platform's philosophy: find the path of least resistance (trend direction), identify key support/resistance zones, detect asymmetric risk-reward setups, and surface the best opportunities through a unified scoring pipeline. It does not attempt to predict price — it identifies where conditions are most favorable.
|
||||
|
||||
Every data source (OHLCV, technical indicators, sentiment, fundamentals) feeds into a single composite scoring and ranking system that auto-populates a watchlist and flags trade setups. Data ingestion is exclusively via the configured market data provider — users do not upload data directly.
|
||||
|
||||
This is an MVP focused on delivering actionable signals. Engineering concerns (API format, database indexing, logging, connection pooling, graceful shutdown) are design constraints, not requirements.
|
||||
|
||||
## Glossary
|
||||
|
||||
- **Backend_Service**: The FastAPI-based Python web application that exposes REST API endpoints.
|
||||
- **Ticker**: A unique NASDAQ stock symbol (e.g., AAPL, MSFT) being tracked by the system.
|
||||
- **OHLCV_Record**: A single price data point containing Open, High, Low, Close, and Volume values for a specific Ticker on a specific date.
|
||||
- **Ticker_Registry**: The subsystem responsible for adding, removing, listing, and looking up tracked NASDAQ tickers.
|
||||
- **Price_Store**: The subsystem responsible for persisting and retrieving OHLCV price data in PostgreSQL.
|
||||
- **Ingestion_Pipeline**: The subsystem responsible for importing stock data into the Price_Store via the configured market data provider.
|
||||
- **Data_Collector**: A scheduled job that periodically fetches the latest price data for all tracked tickers and upserts it into the Price_Store.
|
||||
- **Auth_Service**: The subsystem responsible for user registration, login, JWT token management, and role-based access control.
|
||||
- **User**: A registered account with a username, hashed password, and assigned role (user or admin).
|
||||
- **Admin**: A User with the admin role who can manage other users and configure system settings.
|
||||
- **Access_Token**: A JWT token issued upon login, expires after 60 minutes.
|
||||
- **ADX**: Average Directional Index — measures trend strength (0-100). Values above 25 indicate a strong trend.
|
||||
- **EMA**: Exponential Moving Average — configurable period. EMA Cross (e.g., 20/50) determines directional bias.
|
||||
- **RSI**: Relative Strength Index — momentum oscillator (0-100). Overbought >70, oversold <30.
|
||||
- **ATR**: Average True Range — measures price volatility. Used for stop-loss and target placement.
|
||||
- **Volume_Profile**: Distribution of traded volume across price levels, producing POC, Value Area, HVN, and LVN.
|
||||
- **POC**: Point of Control — price level with highest traded volume.
|
||||
- **HVN**: High Volume Node — above-average volume level, acts as support/resistance magnet.
|
||||
- **LVN**: Low Volume Node — below-average volume level, acts as breakout zone.
|
||||
- **Pivot_Point**: A support or resistance level from swing highs and swing lows.
|
||||
- **SR_Level**: A support or resistance level tagged with type, strength score, and detection method.
|
||||
- **SR_Detector**: The subsystem that auto-calculates support and resistance levels.
|
||||
- **Sentiment_Score**: A record containing bullish/bearish/neutral classification, confidence (0-100), source, and timestamp for a Ticker.
|
||||
- **Fundamental_Data**: Key financial metrics: P/E ratio, revenue growth rate, earnings surprise %, and market cap.
|
||||
- **Composite_Score**: A weighted aggregate score (0-100) from all dimension scores for a Ticker.
|
||||
- **Dimension_Score**: A normalized score (0-100) for a single analysis dimension (technical, S/R quality, sentiment, fundamental, momentum).
|
||||
- **Scoring_Engine**: The subsystem that computes dimension scores, applies weights, and produces Composite_Scores.
|
||||
- **RR_Scanner**: The subsystem that scans for asymmetric risk-reward trade setups.
|
||||
- **Trade_Setup**: A detected trade opportunity with entry, stop-loss, target, R:R ratio, direction (long/short), and Composite_Score.
|
||||
- **Watchlist**: A curated list of top-ranked tickers from the Scoring_Engine, with manual add/remove support.
|
||||
- **System_Settings**: Persisted configuration values managed by admins.
|
||||
|
||||
## Requirements
|
||||
|
||||
### Requirement 1: Ticker Management
|
||||
|
||||
**User Story:** As a user, I want to manage the NASDAQ tickers I am tracking, so that I can control which stocks the system analyzes.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
- 1.1 WHEN a user submits a valid NASDAQ ticker symbol, THE Ticker_Registry SHALL create a new ticker entry and return the created ticker with its metadata.
|
||||
- 1.2 WHEN a user submits a ticker symbol that already exists, THE Backend_Service SHALL return a duplicate error.
|
||||
- 1.3 WHEN a user submits an empty or whitespace-only ticker symbol, THE Backend_Service SHALL reject the request with a validation error.
|
||||
- 1.4 WHEN a user requests the list of tracked tickers, THE Ticker_Registry SHALL return all tickers sorted alphabetically by symbol.
|
||||
- 1.5 WHEN a user requests deletion of a tracked ticker, THE Ticker_Registry SHALL remove the ticker and all associated data (OHLCV, scores, setups).
|
||||
- 1.6 WHEN a user requests deletion of a ticker that does not exist, THE Backend_Service SHALL return a not-found error.
|
||||
|
||||
### Requirement 2: OHLCV Price Data Storage
|
||||
|
||||
**User Story:** As a user, I want the system to store historical OHLCV price data, so that technical analysis and signal detection have a data foundation.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
- 2.1 THE Price_Store SHALL persist each OHLCV_Record with: ticker symbol, date, open, high, low, close, and volume.
|
||||
- 2.2 THE Price_Store SHALL enforce uniqueness on (ticker symbol, date).
|
||||
- 2.3 THE Backend_Service SHALL reject OHLCV_Records where high < low, any price is negative, volume is negative, or date is in the future.
|
||||
- 2.4 THE Backend_Service SHALL reject OHLCV_Records for tickers not in the Ticker_Registry.
|
||||
|
||||
### Requirement 3: Data Ingestion
|
||||
|
||||
**User Story:** As a user, I want the system to fetch stock data from the market data provider, so that my price history stays current.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
- 3.1 WHEN a user requests a data fetch for a ticker and date range, THE Ingestion_Pipeline SHALL fetch from the configured provider and upsert into the Price_Store.
|
||||
- 3.2 IF the provider is unreachable or errors, THE Ingestion_Pipeline SHALL return a descriptive error without modifying existing data.
|
||||
- 3.3 IF the provider returns a rate-limit error, THE Ingestion_Pipeline SHALL record progress and return a response indicating how many records were ingested, so the fetch can be resumed without gaps.
|
||||
- 3.4 WHEN a rate-limited fetch is resumed for the same ticker and date range, THE Ingestion_Pipeline SHALL continue from the last successfully ingested date.
|
||||
|
||||
### Requirement 4: Scheduled Data Collection
|
||||
|
||||
**User Story:** As a user, I want the system to automatically fetch the latest price data on a schedule, so that my data stays current without manual intervention.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
- 4.1 THE Data_Collector SHALL periodically fetch the latest daily OHLCV data for all tracked tickers.
|
||||
- 4.2 THE Data_Collector SHALL upsert records, updating existing ones if they already exist.
|
||||
- 4.3 WHEN the Data_Collector encounters an error for a specific ticker, it SHALL log the error and continue with remaining tickers.
|
||||
- 4.4 THE Data_Collector SHALL be configurable for frequency (daily, hourly) via configuration.
|
||||
- 4.5 IF a rate limit is hit during collection, THE Data_Collector SHALL record the last successful ticker and resume from there on the next run.
|
||||
|
||||
### Requirement 5: Technical Analysis
|
||||
|
||||
**User Story:** As a user, I want the system to compute key technical indicators, so that trend strength, momentum, and volatility feed into the scoring pipeline.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
- 5.1 THE Backend_Service SHALL compute the following from OHLCV data: ADX, EMA (default periods 20 and 50), RSI (default 14-period), ATR (default 14-period), Volume_Profile (POC, Value Area, HVN, LVN), and Pivot_Points (swing highs/lows).
|
||||
- 5.2 WHEN an indicator is requested for a Ticker and date range, THE Backend_Service SHALL return both raw values and a normalized score (0-100).
|
||||
- 5.3 WHEN an EMA Cross signal is requested, THE Backend_Service SHALL compare short vs long EMA and return directional bias (bullish, bearish, neutral).
|
||||
- 5.4 IF insufficient data exists to compute an indicator, THE Backend_Service SHALL return an error indicating the minimum data requirement.
|
||||
|
||||
### Requirement 6: Support/Resistance Detection
|
||||
|
||||
**User Story:** As a user, I want the system to auto-calculate support and resistance levels, so that I can see key price zones where buying or selling pressure concentrates.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
- 6.1 THE SR_Detector SHALL identify SR_Levels from Volume_Profile (HVN/LVN zones) and from Pivot_Points (swing highs/lows).
|
||||
- 6.2 THE SR_Detector SHALL assign each level a strength score (0-100) based on how many times price has respected that level.
|
||||
- 6.3 THE SR_Detector SHALL tag each level as "support" or "resistance" relative to current price.
|
||||
- 6.4 WHEN new OHLCV data arrives for a Ticker, THE SR_Detector SHALL recalculate its SR_Levels.
|
||||
- 6.5 THE SR_Detector SHALL merge levels from different methods within a configurable price tolerance (default 0.5%) into a single consolidated level.
|
||||
- 6.6 WHEN a user requests SR_Levels for a Ticker, they SHALL be returned sorted by strength descending with detection method indicated.
|
||||
|
||||
### Requirement 7: Sentiment Data
|
||||
|
||||
**User Story:** As a user, I want sentiment data to feed into the scoring pipeline, so that social mood is factored into signal detection.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
- 7.1 THE Backend_Service SHALL periodically collect sentiment data for all tracked tickers from a configured source at a configurable interval (default 30 minutes).
|
||||
- 7.2 EACH Sentiment_Score SHALL contain: classification (bullish/bearish/neutral), confidence (0-100), source identifier, and timestamp.
|
||||
- 7.3 IF the sentiment source is unreachable, THE Backend_Service SHALL log the error and retain existing data.
|
||||
- 7.4 WHEN computing the sentiment Dimension_Score, THE Scoring_Engine SHALL aggregate recent scores within a configurable lookback window (default 24h) using configurable source weights and time decay.
|
||||
|
||||
### Requirement 8: Fundamental Data
|
||||
|
||||
**User Story:** As a user, I want key fundamental metrics to feed into the scoring pipeline, so that financial quality is factored into signal detection.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
- 8.1 THE Backend_Service SHALL fetch and store Fundamental_Data for each tracked Ticker: P/E ratio, revenue growth rate, earnings surprise %, and market cap.
|
||||
- 8.2 THE Data_Collector SHALL periodically fetch updated Fundamental_Data (default daily).
|
||||
- 8.3 IF the data source is unreachable, THE Backend_Service SHALL log the error and retain the most recent data.
|
||||
- 8.4 WHEN new Fundamental_Data arrives, THE Scoring_Engine SHALL mark the fundamental Dimension_Score as stale.
|
||||
|
||||
### Requirement 9: Composite Scoring and Ranking
|
||||
|
||||
**User Story:** As a user, I want each stock scored across all dimensions with configurable weights, so that I can rank stocks by a single unified metric tuned to my preferences.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
- 9.1 THE Scoring_Engine SHALL compute a Dimension_Score (0-100) per Ticker for: technical, S/R quality, sentiment, fundamental, and momentum.
|
||||
- 9.2 THE Scoring_Engine SHALL compute a Composite_Score as the weighted average of available Dimension_Scores using user-configurable weights.
|
||||
- 9.3 WHEN a Ticker is missing data for one or more dimensions, THE Scoring_Engine SHALL use only available dimensions (re-normalizing weights) and indicate which are missing.
|
||||
- 9.4 WHEN underlying data changes, THE Scoring_Engine SHALL mark the affected Composite_Score as stale.
|
||||
- 9.5 WHEN a stale score is requested, THE Scoring_Engine SHALL recompute on-demand. No background recomputation.
|
||||
- 9.6 WHEN a user requests rankings, THE Scoring_Engine SHALL return tickers sorted by Composite_Score descending with all Dimension_Scores included.
|
||||
- 9.7 WHEN a user updates dimension weights, THE Scoring_Engine SHALL recompute all Composite_Scores.
|
||||
|
||||
### Requirement 10: Asymmetric R:R Trade Detection
|
||||
|
||||
**User Story:** As a user, I want the system to scan for trade setups with favorable risk-reward ratios, so that I see highly asymmetric opportunities without manual chart analysis.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
- 10.1 THE RR_Scanner SHALL periodically scan all tracked tickers for Trade_Setups meeting a configurable R:R threshold (default 3:1).
|
||||
- 10.2 FOR long setups: target = nearest SR_Level above price, stop = ATR-based distance below price.
|
||||
- 10.3 FOR short setups: target = nearest SR_Level below price, stop = ATR-based distance above price.
|
||||
- 10.4 EACH Trade_Setup SHALL include: entry price, stop-loss, target, R:R ratio, direction (long/short), and Composite_Score.
|
||||
- 10.5 WHEN underlying SR_Levels or price data changes, THE RR_Scanner SHALL recalculate and remove setups that no longer meet the threshold.
|
||||
- 10.6 THE RR_Scanner SHALL be configurable for scan frequency via configuration.
|
||||
- 10.7 IF a Ticker lacks sufficient SR_Levels or ATR data, THE RR_Scanner SHALL skip it and log the reason.
|
||||
- 10.8 WHEN a user requests trade setups, results SHALL be sorted by R:R descending (secondary: Composite_Score descending), with optional direction filter.
|
||||
|
||||
### Requirement 11: Watchlist
|
||||
|
||||
**User Story:** As a user, I want a watchlist of top-ranked stocks that auto-populates from scoring, so that I always have a curated shortlist of the best opportunities.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
- 11.1 THE Watchlist SHALL auto-include the top-X tickers by Composite_Score (X configurable, default 10).
|
||||
- 11.2 WHEN requested, THE Watchlist SHALL return each entry with Composite_Score, Dimension_Scores, R:R ratio (if setup exists), and active SR_Levels.
|
||||
- 11.3 Users MAY manually add/remove tickers. Manual additions are tagged and not subject to auto-population rules.
|
||||
- 11.4 THE Watchlist SHALL enforce a max size of auto-populate count + 10 manual additions (default max 20).
|
||||
- 11.5 WHEN Composite_Scores are recomputed, auto-populated entries SHALL update to reflect new rankings.
|
||||
- 11.6 THE Watchlist SHALL be sortable by Composite_Score, any Dimension_Score, or R:R ratio.
|
||||
|
||||
### Requirement 12: User Authentication
|
||||
|
||||
**User Story:** As a system owner, I want user registration and login with role-based access, so that only authorized users can access signals and analysis.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
- 12.1 WHEN registration is enabled and valid credentials are submitted, THE Auth_Service SHALL create a User with no API access by default.
|
||||
- 12.2 WHEN registration is disabled, THE Auth_Service SHALL reject registration.
|
||||
- 12.3 WHEN valid login credentials are submitted, THE Auth_Service SHALL return an Access_Token (60-minute expiry).
|
||||
- 12.4 WHEN invalid credentials are submitted, THE Auth_Service SHALL return an error without revealing which field was wrong.
|
||||
- 12.5 Unauthenticated requests to protected endpoints SHALL receive 401. Authenticated users without granted access SHALL receive 403.
|
||||
- 12.6 WHEN a token expires, THE Backend_Service SHALL return 401 indicating expiration.
|
||||
|
||||
### Requirement 13: Admin Management
|
||||
|
||||
**User Story:** As an admin, I want to manage users, control system settings, and perform data maintenance.
|
||||
|
||||
#### Acceptance Criteria
|
||||
|
||||
- 13.1 WHEN the system initializes for the first time, a default admin account SHALL be created (username: "admin", password: "admin").
|
||||
- 13.2 Admins SHALL be able to: grant/revoke user access, toggle registration, list all users, reset user passwords, and create new user accounts.
|
||||
- 13.3 Admins SHALL be able to: enable/disable scheduled jobs, update system settings (frequencies, thresholds, weights, watchlist size), and trigger manual job runs.
|
||||
- 13.4 Admins SHALL be able to delete all data older than a specified number of days (OHLCV, sentiment, fundamentals). Ticker entries, user accounts, and latest scores SHALL be preserved.
|
||||
- 13.5 Admin endpoints SHALL be restricted to users with the admin role.
|
||||
|
||||
## Design Constraints
|
||||
|
||||
The following are engineering concerns to be addressed during design, not user-facing requirements:
|
||||
|
||||
- Consistent JSON API envelope (status, data, error fields) with appropriate HTTP status codes
|
||||
- OpenAPI/Swagger documentation endpoint
|
||||
- Versioned URL prefixes (/api/v1/)
|
||||
- Composite database index on (ticker, date) for range query performance
|
||||
- Date-only storage for OHLCV (no time component)
|
||||
- Database migrations for schema management
|
||||
- Structured JSON logging with configurable levels
|
||||
- Database connection pooling (default 5 connections)
|
||||
- Health check endpoint (unauthenticated)
|
||||
- Graceful shutdown (complete in-flight requests, stop jobs, close pool)
|
||||
- Market data provider behind an interface/protocol for swappability
|
||||
255
.kiro/specs/stock-data-backend/tasks.md
Normal file
255
.kiro/specs/stock-data-backend/tasks.md
Normal file
@@ -0,0 +1,255 @@
|
||||
# Implementation Plan: Stock Data Backend
|
||||
|
||||
## Overview
|
||||
|
||||
Incremental build of the investing-signal platform: foundation first (config, DB, models, auth), then domain services (tickers, OHLCV, ingestion, indicators, S/R, sentiment, fundamentals), then scoring/ranking (scoring engine, R:R scanner, watchlist), then scheduled jobs, deployment templates, and final wiring. Each step builds on the previous and ends integrated.
|
||||
|
||||
## Tasks
|
||||
|
||||
- [x] 1. Project scaffolding, configuration, and database foundation
|
||||
- [x] 1.1 Create project structure with `pyproject.toml`, `.env.example`, `alembic.ini`, and `app/` package
|
||||
- Create `pyproject.toml` with dependencies: fastapi, uvicorn, sqlalchemy[asyncio], asyncpg, alembic, pydantic-settings, python-jose, passlib[bcrypt], apscheduler, httpx, alpaca-py, google-genai, hypothesis
|
||||
- Create `.env.example` with all environment variables from design
|
||||
- Create `app/__init__.py`, `app/config.py` (pydantic-settings `Settings` class)
|
||||
- Create `app/database.py` (async SQLAlchemy engine, session factory, connection pooling)
|
||||
- _Requirements: Design Constraints (connection pooling, config)_
|
||||
|
||||
- [x] 1.2 Create all SQLAlchemy ORM models and Alembic initial migration
|
||||
- Create `app/models/__init__.py` and model files: `ticker.py`, `ohlcv.py`, `user.py`, `sentiment.py`, `fundamental.py`, `score.py`, `sr_level.py`, `trade_setup.py`, `watchlist.py`, `settings.py`
|
||||
- Implement all 12 entities from the ERD: User, Ticker, OHLCVRecord, SentimentScore, FundamentalData, SRLevel, DimensionScore, CompositeScore, TradeSetup, WatchlistEntry, SystemSetting, IngestionProgress
|
||||
- Include composite unique constraints, indexes, and cascade deletes per design
|
||||
- Initialize Alembic (`alembic/env.py`) and generate initial migration
|
||||
- _Requirements: 2.1, 2.2, Design Constraints (composite index on ticker+date)_
|
||||
|
||||
- [x] 1.3 Create shared schemas, exception hierarchy, and API envelope
|
||||
- Create `app/schemas/common.py` with `APIEnvelope` model (status, data, error)
|
||||
- Create `app/middleware.py` with global exception handler mapping `AppError` subclasses to JSON envelope responses
|
||||
- Create exception classes: `AppError`, `ValidationError`, `NotFoundError`, `DuplicateError`, `AuthenticationError`, `AuthorizationError`, `ProviderError`, `RateLimitError`
|
||||
- _Requirements: Design Constraints (JSON envelope, HTTP status codes)_
|
||||
|
||||
- [x] 1.4 Create FastAPI app entry point with lifespan, health check, and dependency injection
|
||||
- Create `app/main.py` with FastAPI app, lifespan handler (DB pool startup/shutdown, default admin creation)
|
||||
- Create `app/dependencies.py` with `Depends()` factories for DB session, current user, admin guard
|
||||
- Create `app/routers/health.py` with unauthenticated `/api/v1/health` endpoint
|
||||
- Wire health router into app
|
||||
- _Requirements: 13.1, Design Constraints (health check, graceful shutdown, versioned URLs)_
|
||||
|
||||
- [x] 2. Authentication and admin services
|
||||
- [x] 2.1 Implement Auth Service and auth router
|
||||
- Create `app/services/auth_service.py`: registration (configurable on/off, creates no-access user), login (bcrypt verify, JWT generation with 60-min expiry), token validation
|
||||
- Create `app/schemas/auth.py`: RegisterRequest, LoginRequest, TokenResponse
|
||||
- Create `app/routers/auth.py`: `POST /api/v1/auth/register`, `POST /api/v1/auth/login`
|
||||
- Implement JWT middleware in `app/dependencies.py` for `get_current_user` and `require_admin`
|
||||
- _Requirements: 12.1, 12.2, 12.3, 12.4, 12.5, 12.6_
|
||||
|
||||
- [ ]* 2.2 Write property tests for auth (Properties 34-38)
|
||||
- **Property 34: Registration creates no-access user** — _Validates: Requirements 12.1_
|
||||
- **Property 35: Registration disabled rejects all attempts** — _Validates: Requirements 12.2_
|
||||
- **Property 36: Login returns valid JWT** — _Validates: Requirements 12.3_
|
||||
- **Property 37: Invalid credentials return generic error** — _Validates: Requirements 12.4_
|
||||
- **Property 38: Access control enforcement** — _Validates: Requirements 12.5_
|
||||
|
||||
- [x] 2.3 Implement Admin Service and admin router
|
||||
- Create `app/services/admin_service.py`: grant/revoke access, toggle registration, list users, reset passwords, create accounts, system settings CRUD, data cleanup (delete old OHLCV/sentiment/fundamentals preserving tickers/users/scores), job control
|
||||
- Create `app/schemas/admin.py`: UserManagement, SystemSettingUpdate, DataCleanupRequest
|
||||
- Create `app/routers/admin.py`: admin-only endpoints under `/api/v1/admin/`
|
||||
- _Requirements: 13.1, 13.2, 13.3, 13.4, 13.5_
|
||||
|
||||
- [ ]* 2.4 Write property tests for admin (Properties 39-40)
|
||||
- **Property 39: Admin user management operations** — _Validates: Requirements 13.2_
|
||||
- **Property 40: Data cleanup preserves structure** — _Validates: Requirements 13.4_
|
||||
|
||||
- [x] 3. Checkpoint - Ensure all tests pass
|
||||
- Ensure all tests pass, ask the user if questions arise.
|
||||
|
||||
- [x] 4. Ticker management and OHLCV price storage
|
||||
- [x] 4.1 Implement Ticker Registry service and router
|
||||
- Create `app/services/ticker_service.py`: add (validate non-empty, uppercase, alphanumeric, check uniqueness), delete (cascade all associated data), list (sorted alphabetically)
|
||||
- Create `app/schemas/ticker.py`: TickerCreate, TickerResponse
|
||||
- Create `app/routers/tickers.py`: `POST /api/v1/tickers`, `GET /api/v1/tickers`, `DELETE /api/v1/tickers/{symbol}`
|
||||
- _Requirements: 1.1, 1.2, 1.3, 1.4, 1.5, 1.6_
|
||||
|
||||
- [ ]* 4.2 Write property tests for ticker management (Properties 1-4)
|
||||
- **Property 1: Ticker creation round-trip** — _Validates: Requirements 1.1_
|
||||
- **Property 2: Duplicate ticker rejection** — _Validates: Requirements 1.2_
|
||||
- **Property 3: Whitespace ticker rejection** — _Validates: Requirements 1.3_
|
||||
- **Property 4: Ticker deletion cascades** — _Validates: Requirements 1.5_
|
||||
|
||||
- [x] 4.3 Implement Price Store service and OHLCV router
|
||||
- Create `app/services/price_service.py`: upsert OHLCV (validate high >= low, prices >= 0, volume >= 0, date <= today, ticker exists), query by ticker + date range
|
||||
- Create `app/schemas/ohlcv.py`: OHLCVCreate, OHLCVResponse
|
||||
- Create `app/routers/ohlcv.py`: `POST /api/v1/ohlcv`, `GET /api/v1/ohlcv/{symbol}`
|
||||
- On upsert: invalidate LRU cache for ticker, mark composite score as stale
|
||||
- _Requirements: 2.1, 2.2, 2.3, 2.4_
|
||||
|
||||
- [ ]* 4.4 Write property tests for OHLCV (Properties 5-7)
|
||||
- **Property 5: OHLCV storage round-trip** — _Validates: Requirements 2.1, 2.2_
|
||||
- **Property 6: OHLCV validation rejects invalid records** — _Validates: Requirements 2.3_
|
||||
- **Property 7: OHLCV rejects unregistered tickers** — _Validates: Requirements 2.4_
|
||||
|
||||
- [x] 5. Market data provider and ingestion pipeline
|
||||
- [x] 5.1 Implement provider protocols and concrete implementations
|
||||
- Create `app/providers/protocol.py`: `MarketDataProvider` Protocol (fetch_ohlcv), `SentimentProvider` Protocol (fetch_sentiment), `FundamentalProvider` Protocol (fetch_fundamentals)
|
||||
- Create `app/providers/alpaca.py`: Alpaca OHLCV provider using `alpaca-py` SDK — fetches daily bars by ticker and date range
|
||||
- Create `app/providers/gemini_sentiment.py`: Gemini sentiment provider using `google-genai` with search grounding — sends structured prompt per ticker, parses JSON response (classification + confidence)
|
||||
- Create `app/providers/fmp.py`: Financial Modeling Prep fundamentals provider using `httpx` — fetches P/E, revenue growth, earnings surprise, market cap
|
||||
- _Requirements: Design Constraints (provider behind interface)_
|
||||
|
||||
- [x] 5.2 Implement Ingestion Pipeline service and router
|
||||
- Create `app/services/ingestion_service.py`: fetch + upsert with rate-limit handling (track `last_ingested_date`, return partial progress on rate limit, resume from last date + 1 day), provider error handling (descriptive error, no data modification)
|
||||
- Create `app/routers/ingestion.py`: `POST /api/v1/ingestion/fetch/{symbol}`
|
||||
- _Requirements: 3.1, 3.2, 3.3, 3.4_
|
||||
|
||||
- [ ]* 5.3 Write property tests for ingestion (Properties 8-9)
|
||||
- **Property 8: Provider error preserves existing data** — _Validates: Requirements 3.2, 7.3, 8.3_
|
||||
- **Property 9: Rate-limit resume continuity** — _Validates: Requirements 3.3, 3.4, 4.5_
|
||||
|
||||
- [x] 6. Checkpoint - Ensure all tests pass
|
||||
- Ensure all tests pass, ask the user if questions arise.
|
||||
|
||||
- [x] 7. Technical analysis and S/R detection
|
||||
- [x] 7.1 Implement LRU cache wrapper with invalidation
|
||||
- Create `app/cache.py`: LRU cache wrapper (max 1000 entries) keyed on ticker + date range + indicator type, with per-ticker invalidation method
|
||||
- _Requirements: Design Constraints (LRU cache)_
|
||||
|
||||
- [x] 7.2 Implement Technical Analysis service and indicators router
|
||||
- Create `app/services/indicator_service.py`: compute ADX (28+ bars), EMA (period+1 bars, default 20/50), RSI (15+ bars, 14-period), ATR (15+ bars, 14-period), Volume Profile (20+ bars, POC/Value Area/HVN/LVN), Pivot Points (5+ bars, swing highs/lows)
|
||||
- Each indicator returns raw values + normalized 0-100 score
|
||||
- Implement EMA cross signal (bullish/bearish/neutral based on short vs long EMA comparison)
|
||||
- Enforce minimum data requirements, return error if insufficient
|
||||
- Create `app/schemas/indicator.py`: IndicatorRequest, IndicatorResponse, EMACrossResponse
|
||||
- Create `app/routers/indicators.py`: `GET /api/v1/indicators/{symbol}/{indicator_type}`, `GET /api/v1/indicators/{symbol}/ema-cross`
|
||||
- _Requirements: 5.1, 5.2, 5.3, 5.4_
|
||||
|
||||
- [ ]* 7.3 Write property tests for indicators (Properties 11-14)
|
||||
- **Property 11: Score bounds invariant** — _Validates: Requirements 5.2, 6.2, 9.1_
|
||||
- **Property 12: Indicator minimum data enforcement** — _Validates: Requirements 5.4_
|
||||
- **Property 13: EMA cross directional bias** — _Validates: Requirements 5.3_
|
||||
- **Property 14: Indicator computation determinism** — _Validates: Requirements 5.1_
|
||||
|
||||
- [x] 7.4 Implement S/R Detector service and router
|
||||
- Create `app/services/sr_service.py`: detect SR levels from Volume Profile (HVN/LVN) and Pivot Points (swing highs/lows), assign strength scores (0-100 based on price respect count), merge levels within tolerance (default 0.5%), tag as support/resistance relative to current price, recalculate on new OHLCV data
|
||||
- Create `app/schemas/sr_level.py`: SRLevelResponse
|
||||
- Create `app/routers/sr_levels.py`: `GET /api/v1/sr-levels/{symbol}` (sorted by strength descending)
|
||||
- _Requirements: 6.1, 6.2, 6.3, 6.4, 6.5, 6.6_
|
||||
|
||||
- [ ]* 7.5 Write property tests for S/R detection (Properties 15-17)
|
||||
- **Property 15: SR level support/resistance tagging** — _Validates: Requirements 6.3_
|
||||
- **Property 16: SR level merging within tolerance** — _Validates: Requirements 6.5_
|
||||
- **Property 17: SR level detection from data** — _Validates: Requirements 6.1_
|
||||
|
||||
- [x] 8. Sentiment and fundamental data services
|
||||
- [x] 8.1 Implement Sentiment service and router
|
||||
- Create `app/services/sentiment_service.py`: store sentiment records (classification, confidence, source, timestamp), compute dimension score with time-decay weighted average over configurable lookback window (default 24h)
|
||||
- Create `app/schemas/sentiment.py`: SentimentResponse
|
||||
- Create `app/routers/sentiment.py`: `GET /api/v1/sentiment/{symbol}`
|
||||
- _Requirements: 7.1, 7.2, 7.3, 7.4_
|
||||
|
||||
- [ ]* 8.2 Write property tests for sentiment (Properties 18-19)
|
||||
- **Property 18: Sentiment score data shape** — _Validates: Requirements 7.2_
|
||||
- **Property 19: Sentiment dimension score uses time decay** — _Validates: Requirements 7.4_
|
||||
|
||||
- [x] 8.3 Implement Fundamental Data service and router
|
||||
- Create `app/services/fundamental_service.py`: store fundamental data (P/E, revenue growth, earnings surprise, market cap), mark fundamental dimension score as stale on new data
|
||||
- Create `app/schemas/fundamental.py`: FundamentalResponse
|
||||
- Create `app/routers/fundamentals.py`: `GET /api/v1/fundamentals/{symbol}`
|
||||
- _Requirements: 8.1, 8.2, 8.3, 8.4_
|
||||
|
||||
- [ ]* 8.4 Write property test for fundamentals (Property 20)
|
||||
- **Property 20: Fundamental data storage round-trip** — _Validates: Requirements 8.1_
|
||||
|
||||
- [x] 9. Checkpoint - Ensure all tests pass
|
||||
- Ensure all tests pass, ask the user if questions arise.
|
||||
|
||||
- [x] 10. Scoring engine, R:R scanner, and watchlist
|
||||
- [x] 10.1 Implement Scoring Engine service and router
|
||||
- Create `app/services/scoring_service.py`: compute dimension scores (technical, sr_quality, sentiment, fundamental, momentum) each 0-100, compute composite score as weighted average of available dimensions with re-normalized weights, staleness marking/recomputation on demand, weight update triggers full recomputation
|
||||
- Create `app/schemas/score.py`: ScoreResponse, WeightUpdateRequest, RankingResponse
|
||||
- Create `app/routers/scores.py`: `GET /api/v1/scores/{symbol}`, `GET /api/v1/rankings`, `PUT /api/v1/scores/weights`
|
||||
- _Requirements: 9.1, 9.2, 9.3, 9.4, 9.5, 9.6, 9.7_
|
||||
|
||||
- [ ]* 10.2 Write property tests for scoring (Properties 21-25)
|
||||
- **Property 21: Composite score is weighted average** — _Validates: Requirements 9.2_
|
||||
- **Property 22: Missing dimensions re-normalize weights** — _Validates: Requirements 9.3_
|
||||
- **Property 23: Staleness marking on data change** — _Validates: Requirements 9.4_
|
||||
- **Property 24: Stale score recomputation on demand** — _Validates: Requirements 9.5_
|
||||
- **Property 25: Weight update triggers full recomputation** — _Validates: Requirements 9.7_
|
||||
|
||||
- [x] 10.3 Implement R:R Scanner service and router
|
||||
- Create `app/services/rr_scanner_service.py`: scan tickers for trade setups (long: target = nearest SR above, stop = entry - ATR×multiplier; short: target = nearest SR below, stop = entry + ATR×multiplier), filter by R:R threshold (default 3:1), recalculate/prune on data change, skip tickers without sufficient SR/ATR data
|
||||
- Create `app/schemas/trade_setup.py`: TradeSetupResponse
|
||||
- Create `app/routers/trades.py`: `GET /api/v1/trades` (sorted by R:R desc, secondary composite desc, optional direction filter)
|
||||
- _Requirements: 10.1, 10.2, 10.3, 10.4, 10.5, 10.6, 10.7, 10.8_
|
||||
|
||||
- [ ]* 10.4 Write property tests for R:R scanner (Properties 26-29)
|
||||
- **Property 26: Trade setup R:R threshold filtering** — _Validates: Requirements 10.1_
|
||||
- **Property 27: Trade setup computation correctness** — _Validates: Requirements 10.2, 10.3_
|
||||
- **Property 28: Trade setup data completeness** — _Validates: Requirements 10.4_
|
||||
- **Property 29: Trade setup pruning on data change** — _Validates: Requirements 10.5_
|
||||
|
||||
- [x] 10.5 Implement Watchlist service and router
|
||||
- Create `app/services/watchlist_service.py`: auto-populate top-X by composite score (default 10), manual add/remove (tagged, not subject to auto-population), enforce cap (auto + 10 manual, default max 20), update auto entries on score recomputation
|
||||
- Create `app/schemas/watchlist.py`: WatchlistEntryResponse (includes composite score, dimension scores, R:R ratio, SR levels)
|
||||
- Create `app/routers/watchlist.py`: `GET /api/v1/watchlist`, `POST /api/v1/watchlist/{symbol}`, `DELETE /api/v1/watchlist/{symbol}` (sortable by composite, dimension, or R:R)
|
||||
- _Requirements: 11.1, 11.2, 11.3, 11.4, 11.5, 11.6_
|
||||
|
||||
- [ ]* 10.6 Write property tests for watchlist (Properties 30-33)
|
||||
- **Property 30: Watchlist auto-population** — _Validates: Requirements 11.1_
|
||||
- **Property 31: Watchlist entry data completeness** — _Validates: Requirements 11.2_
|
||||
- **Property 32: Manual watchlist entries persist through auto-population** — _Validates: Requirements 11.3_
|
||||
- **Property 33: Watchlist size cap enforcement** — _Validates: Requirements 11.4_
|
||||
|
||||
- [x] 11. Checkpoint - Ensure all tests pass
|
||||
- Ensure all tests pass, ask the user if questions arise.
|
||||
|
||||
- [x] 12. Scheduled jobs and sorting correctness
|
||||
- [x] 12.1 Implement APScheduler job definitions and scheduler integration
|
||||
- Create `app/scheduler.py`: define scheduled jobs for Data Collector (OHLCV fetch for all tickers, configurable frequency), Sentiment Collector (default 30 min), Fundamental Collector (default daily), R:R Scanner (configurable frequency)
|
||||
- Each job: process all tracked tickers independently (one failure doesn't stop others), log errors with structured JSON, handle rate limits (record last successful ticker, resume next run)
|
||||
- Wire scheduler into FastAPI lifespan (start on startup, shutdown gracefully)
|
||||
- _Requirements: 4.1, 4.2, 4.3, 4.4, 4.5, 7.1, 8.2, 10.6_
|
||||
|
||||
- [ ]* 12.2 Write property test for scheduled collection (Property 10)
|
||||
- **Property 10: Scheduled collection processes all tickers** — _Validates: Requirements 4.1, 4.3, 7.1, 8.2_
|
||||
|
||||
- [ ]* 12.3 Write property test for sorting correctness (Property 41)
|
||||
- **Property 41: Sorting correctness** — _Validates: Requirements 1.4, 6.6, 9.6, 10.8, 11.6_
|
||||
|
||||
- [x] 13. Test infrastructure and shared fixtures
|
||||
- [x] 13.1 Create test configuration and shared fixtures
|
||||
- Create `tests/conftest.py`: test DB session fixture (transaction rollback per test), FastAPI test client fixture, mock `MarketDataProvider`, hypothesis custom strategies (`valid_ticker_symbols`, `whitespace_strings`, `valid_ohlcv_records`, `invalid_ohlcv_records`, `dimension_scores`, `weight_configs`, `sr_levels`, `sentiment_scores`, `trade_setups`)
|
||||
- Create `tests/__init__.py`, `tests/unit/__init__.py`, `tests/property/__init__.py`
|
||||
- _Requirements: Design (Testing Strategy)_
|
||||
|
||||
- [x] 14. Deployment templates and CI/CD
|
||||
- [x] 14.1 Create deployment configuration files
|
||||
- Create `deploy/nginx.conf` (reverse proxy for signal.thiessen.io)
|
||||
- Create `deploy/stock-data-backend.service` (systemd unit file)
|
||||
- Create `deploy/setup_db.sh` (idempotent DB creation + migration script)
|
||||
- Create `.gitea/workflows/deploy.yml` (lint → test → deploy pipeline)
|
||||
- _Requirements: Design (Deployment and Infrastructure)_
|
||||
|
||||
- [x] 15. Final wiring and integration
|
||||
- [x] 15.1 Wire all routers into FastAPI app and verify OpenAPI docs
|
||||
- Register all routers in `app/main.py` under `/api/v1/` prefix
|
||||
- Verify Swagger/OpenAPI docs endpoint works at `/docs`
|
||||
- Ensure all middleware (logging, error handling, auth) is applied
|
||||
- _Requirements: Design Constraints (OpenAPI/Swagger, versioned URLs)_
|
||||
|
||||
- [ ]* 15.2 Write integration tests for key API flows
|
||||
- Test end-to-end: register → login → add ticker → fetch data → get indicators → get scores → get watchlist
|
||||
- Test auth enforcement: unauthenticated → 401, no-access user → 403, admin endpoints → 403 for non-admin
|
||||
- Test error flows: duplicate ticker → 409, invalid OHLCV → 400, missing ticker → 404
|
||||
- _Requirements: 1.1-1.6, 2.1-2.4, 12.1-12.6_
|
||||
|
||||
- [x] 16. Final checkpoint - Ensure all tests pass
|
||||
- Ensure all tests pass, ask the user if questions arise.
|
||||
|
||||
## Notes
|
||||
|
||||
- Tasks marked with `*` are optional and can be skipped for faster MVP
|
||||
- Each task references specific requirements for traceability
|
||||
- Checkpoints ensure incremental validation
|
||||
- Property tests validate the 41 correctness properties from the design document using `hypothesis`
|
||||
- Unit tests validate specific examples and edge cases
|
||||
- All code is Python 3.12+ with FastAPI, SQLAlchemy async, and PostgreSQL
|
||||
300
README.md
Normal file
300
README.md
Normal file
@@ -0,0 +1,300 @@
|
||||
# Signal Dashboard
|
||||
|
||||
Investing-signal platform for NASDAQ stocks. Surfaces the best trading opportunities through weighted multi-dimensional scoring — technical indicators, support/resistance quality, sentiment, fundamentals, and momentum — with asymmetric risk:reward scanning.
|
||||
|
||||
**Philosophy:** Don't predict price. Find the path of least resistance, key S/R zones, and asymmetric R:R setups.
|
||||
|
||||
## Stack
|
||||
|
||||
| Layer | Tech |
|
||||
|---|---|
|
||||
| Backend | Python 3.12+, FastAPI, Uvicorn, async SQLAlchemy, Alembic |
|
||||
| Database | PostgreSQL (asyncpg) |
|
||||
| Scheduler | APScheduler — OHLCV, sentiment, fundamentals, R:R scan |
|
||||
| Frontend | React 18, TypeScript, Vite 5 |
|
||||
| Styling | Tailwind CSS 3 with custom glassmorphism design system |
|
||||
| State | TanStack React Query v5 (server), Zustand (client/auth) |
|
||||
| Charts | Canvas 2D candlestick chart with S/R overlays |
|
||||
| Routing | React Router v6 (SPA) |
|
||||
| HTTP | Axios with JWT interceptor |
|
||||
| Data providers | Alpaca (OHLCV), Gemini 2.0 Flash (sentiment via search grounding), Financial Modeling Prep (fundamentals) |
|
||||
|
||||
## Features
|
||||
|
||||
### Backend
|
||||
- Ticker registry with full cascade delete
|
||||
- OHLCV price storage with upsert and validation
|
||||
- Technical indicators: ADX, EMA, RSI, ATR, Volume Profile, Pivot Points, EMA Cross
|
||||
- Support/Resistance detection with strength scoring and merge-within-tolerance
|
||||
- Sentiment analysis with time-decay weighted scoring
|
||||
- Fundamental data tracking (P/E, revenue growth, earnings surprise, market cap)
|
||||
- 5-dimension scoring engine (technical, S/R quality, sentiment, fundamental, momentum) with configurable weights
|
||||
- Risk:Reward scanner — long and short setups, ATR-based stops, configurable R:R threshold (default 3:1)
|
||||
- Auto-populated watchlist (top-10 by composite score) + manual entries (cap: 20)
|
||||
- JWT auth with admin role, configurable registration, user access control
|
||||
- Scheduled jobs with enable/disable control and status monitoring
|
||||
- Admin panel: user management, data cleanup, job control, system settings
|
||||
|
||||
### Frontend
|
||||
- Glassmorphism UI with frosted glass panels, gradient text, ambient glow effects, mesh gradient background
|
||||
- Interactive candlestick chart (Canvas 2D) with hover tooltips showing OHLCV values
|
||||
- Support/Resistance level overlays on chart (top 6 by strength, dashed lines with labels)
|
||||
- Data freshness bar showing availability and recency of each data source
|
||||
- Watchlist with composite scores, R:R ratios, and S/R summaries
|
||||
- Ticker detail page: chart, scores, sentiment breakdown, fundamentals, technical indicators, S/R table
|
||||
- Rankings table with configurable dimension weights
|
||||
- Trade scanner showing detected R:R setups
|
||||
- Admin page: user management, job status with live indicators, enable/disable toggles, data cleanup, system settings
|
||||
- Protected routes with JWT auth, admin-only sections
|
||||
- Responsive layout with mobile navigation
|
||||
- Toast notifications for async operations
|
||||
|
||||
## Pages
|
||||
|
||||
| Route | Page | Access |
|
||||
|---|---|---|
|
||||
| `/login` | Login | Public |
|
||||
| `/register` | Register | Public (when enabled) |
|
||||
| `/watchlist` | Watchlist (default) | Authenticated |
|
||||
| `/ticker/:symbol` | Ticker Detail | Authenticated |
|
||||
| `/scanner` | Trade Scanner | Authenticated |
|
||||
| `/rankings` | Rankings | Authenticated |
|
||||
| `/admin` | Admin Panel | Admin only |
|
||||
|
||||
## API Endpoints
|
||||
|
||||
All under `/api/v1/`. Interactive docs at `/docs` (Swagger) and `/redoc`.
|
||||
|
||||
| Group | Endpoints |
|
||||
|---|---|
|
||||
| Health | `GET /health` |
|
||||
| Auth | `POST /auth/register`, `POST /auth/login` |
|
||||
| Tickers | `POST /tickers`, `GET /tickers`, `DELETE /tickers/{symbol}` |
|
||||
| OHLCV | `POST /ohlcv`, `GET /ohlcv/{symbol}` |
|
||||
| Ingestion | `POST /ingestion/fetch/{symbol}` |
|
||||
| Indicators | `GET /indicators/{symbol}/{type}`, `GET /indicators/{symbol}/ema-cross` |
|
||||
| S/R Levels | `GET /sr-levels/{symbol}` |
|
||||
| Sentiment | `GET /sentiment/{symbol}` |
|
||||
| Fundamentals | `GET /fundamentals/{symbol}` |
|
||||
| Scores | `GET /scores/{symbol}`, `GET /rankings`, `PUT /scores/weights` |
|
||||
| Trades | `GET /trades` |
|
||||
| Watchlist | `GET /watchlist`, `POST /watchlist/{symbol}`, `DELETE /watchlist/{symbol}` |
|
||||
| Admin | `GET /admin/users`, `PUT /admin/users/{id}/role`, `PUT /admin/users/{id}/access`, `DELETE /admin/data/{symbol}`, `POST /admin/jobs/{name}/trigger`, `PUT /admin/jobs/{name}/toggle`, `GET /admin/jobs`, `GET /admin/settings`, `PUT /admin/settings` |
|
||||
|
||||
## Development Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.12+
|
||||
- PostgreSQL (via Homebrew on macOS: `brew install postgresql@17`)
|
||||
- Node.js 18+ and npm
|
||||
|
||||
### Backend Setup
|
||||
|
||||
```bash
|
||||
# Create and activate virtual environment
|
||||
python -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install -e ".[dev]"
|
||||
|
||||
# Configure environment
|
||||
cp .env.example .env
|
||||
# Edit .env with your values (see Environment Variables below)
|
||||
|
||||
# Start PostgreSQL and create database
|
||||
brew services start postgresql@17
|
||||
createdb stock_data_backend
|
||||
createuser stock_backend
|
||||
|
||||
# Run migrations
|
||||
alembic upgrade head
|
||||
|
||||
# Start the backend
|
||||
uvicorn app.main:app --reload --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
A default `admin`/`admin` account is created on first startup. Open http://localhost:8000/docs for Swagger UI.
|
||||
|
||||
### Frontend Setup
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
npm install
|
||||
npm run dev
|
||||
```
|
||||
|
||||
Open http://localhost:5173 for the Signal Dashboard. The Vite dev server proxies `/api/v1/` requests to the backend at `http://127.0.0.1:8000`.
|
||||
|
||||
### Frontend Build
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
npm run build # TypeScript check + production build → frontend/dist/
|
||||
npm run preview # Preview the production build locally
|
||||
```
|
||||
|
||||
### Tests
|
||||
|
||||
```bash
|
||||
# Backend tests (in-memory SQLite — no PostgreSQL needed)
|
||||
pytest tests/ -v
|
||||
|
||||
# Frontend tests
|
||||
cd frontend
|
||||
npm test
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
Configure in `.env` (copy from `.env.example`):
|
||||
|
||||
| Variable | Required | Default | Description |
|
||||
|---|---|---|---|
|
||||
| `DATABASE_URL` | Yes | — | PostgreSQL connection string (`postgresql+asyncpg://...`) |
|
||||
| `JWT_SECRET` | Yes | — | Random secret for JWT signing |
|
||||
| `JWT_EXPIRY_MINUTES` | No | `60` | JWT token expiry |
|
||||
| `ALPACA_API_KEY` | For OHLCV | — | Alpaca Markets API key |
|
||||
| `ALPACA_API_SECRET` | For OHLCV | — | Alpaca Markets API secret |
|
||||
| `GEMINI_API_KEY` | For sentiment | — | Google Gemini API key |
|
||||
| `GEMINI_MODEL` | No | `gemini-2.0-flash` | Gemini model name |
|
||||
| `FMP_API_KEY` | For fundamentals | — | Financial Modeling Prep API key |
|
||||
| `DATA_COLLECTOR_FREQUENCY` | No | `daily` | OHLCV collection schedule |
|
||||
| `SENTIMENT_POLL_INTERVAL_MINUTES` | No | `30` | Sentiment polling interval |
|
||||
| `FUNDAMENTAL_FETCH_FREQUENCY` | No | `daily` | Fundamentals fetch schedule |
|
||||
| `RR_SCAN_FREQUENCY` | No | `daily` | R:R scanner schedule |
|
||||
| `DEFAULT_WATCHLIST_AUTO_SIZE` | No | `10` | Auto-watchlist size |
|
||||
| `DEFAULT_RR_THRESHOLD` | No | `3.0` | Minimum R:R ratio for setups |
|
||||
| `DB_POOL_SIZE` | No | `5` | Database connection pool size |
|
||||
| `LOG_LEVEL` | No | `INFO` | Logging level |
|
||||
|
||||
## Production Deployment (Debian 12)
|
||||
|
||||
### 1. Install dependencies
|
||||
|
||||
```bash
|
||||
sudo apt update && sudo apt install -y python3.12 python3.12-venv postgresql nginx nodejs npm
|
||||
```
|
||||
|
||||
### 2. Create service user
|
||||
|
||||
```bash
|
||||
sudo useradd -r -s /usr/sbin/nologin stockdata
|
||||
```
|
||||
|
||||
### 3. Deploy application
|
||||
|
||||
```bash
|
||||
sudo mkdir -p /opt/stock-data-backend
|
||||
# Copy project files to /opt/stock-data-backend
|
||||
cd /opt/stock-data-backend
|
||||
python3.12 -m venv .venv
|
||||
source .venv/bin/activate
|
||||
pip install .
|
||||
```
|
||||
|
||||
### 4. Configure
|
||||
|
||||
```bash
|
||||
sudo cp .env.example /opt/stock-data-backend/.env
|
||||
sudo chown stockdata:stockdata /opt/stock-data-backend/.env
|
||||
# Edit .env with production values (strong JWT_SECRET, real API keys, etc.)
|
||||
```
|
||||
|
||||
### 5. Database
|
||||
|
||||
```bash
|
||||
DB_NAME=stock_data_backend DB_USER=stock_backend DB_PASS=strong_password ./deploy/setup_db.sh
|
||||
```
|
||||
|
||||
### 6. Build frontend
|
||||
|
||||
```bash
|
||||
cd frontend
|
||||
npm ci
|
||||
npm run build
|
||||
```
|
||||
|
||||
### 7. Systemd service
|
||||
|
||||
```bash
|
||||
sudo cp deploy/stock-data-backend.service /etc/systemd/system/
|
||||
sudo systemctl daemon-reload
|
||||
sudo systemctl enable --now stock-data-backend
|
||||
```
|
||||
|
||||
### 8. Nginx reverse proxy
|
||||
|
||||
```bash
|
||||
sudo cp deploy/nginx.conf /etc/nginx/sites-available/stock-data-backend
|
||||
sudo ln -s /etc/nginx/sites-available/stock-data-backend /etc/nginx/sites-enabled/
|
||||
sudo nginx -t && sudo systemctl reload nginx
|
||||
```
|
||||
|
||||
Nginx serves the frontend static files from `frontend/dist/` and proxies `/api/v1/` to the backend.
|
||||
|
||||
### 9. SSL (recommended)
|
||||
|
||||
```bash
|
||||
sudo apt install certbot python3-certbot-nginx
|
||||
sudo certbot --nginx -d signal.thiessen.io
|
||||
```
|
||||
|
||||
### Verify
|
||||
|
||||
```bash
|
||||
curl https://signal.thiessen.io/api/v1/health
|
||||
```
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
app/
|
||||
├── main.py # FastAPI app, lifespan, router wiring
|
||||
├── config.py # Pydantic settings from .env
|
||||
├── database.py # Async SQLAlchemy engine + session
|
||||
├── dependencies.py # DI: DB session, auth guards
|
||||
├── exceptions.py # Exception hierarchy
|
||||
├── middleware.py # Global error handler → JSON envelope
|
||||
├── cache.py # LRU cache with per-ticker invalidation
|
||||
├── scheduler.py # APScheduler job definitions
|
||||
├── models/ # SQLAlchemy ORM models
|
||||
├── schemas/ # Pydantic request/response schemas
|
||||
├── services/ # Business logic layer
|
||||
├── providers/ # External data provider integrations
|
||||
└── routers/ # FastAPI route handlers
|
||||
|
||||
frontend/
|
||||
├── index.html # SPA entry point
|
||||
├── vite.config.ts # Vite config with API proxy
|
||||
├── tailwind.config.ts # Tailwind + glassmorphism theme
|
||||
├── package.json
|
||||
└── src/
|
||||
├── App.tsx # Route definitions
|
||||
├── main.tsx # React entry point
|
||||
├── api/ # Axios API client modules (one per resource)
|
||||
├── components/
|
||||
│ ├── admin/ # User table, job controls, settings, data cleanup
|
||||
│ ├── auth/ # Protected route wrapper
|
||||
│ ├── charts/ # Canvas candlestick chart
|
||||
│ ├── layout/ # App shell, sidebar, mobile nav
|
||||
│ ├── rankings/ # Rankings table, weights form
|
||||
│ ├── scanner/ # Trade table
|
||||
│ ├── ticker/ # Sentiment panel, fundamentals, indicators, S/R overlay
|
||||
│ ├── ui/ # Badge, toast, skeleton, score card, confirm dialog
|
||||
│ └── watchlist/ # Watchlist table, add ticker form
|
||||
├── hooks/ # React Query hooks (one per resource)
|
||||
├── lib/ # Types, formatting utilities
|
||||
├── pages/ # Page components (7 pages)
|
||||
├── stores/ # Zustand auth store
|
||||
└── styles/ # Global CSS with glassmorphism classes
|
||||
|
||||
deploy/
|
||||
├── nginx.conf # Reverse proxy + static file serving
|
||||
├── setup_db.sh # Idempotent DB setup script
|
||||
└── stock-data-backend.service # systemd unit
|
||||
|
||||
tests/
|
||||
├── conftest.py # Fixtures, strategies, test DB
|
||||
├── unit/ # Unit tests
|
||||
└── property/ # Property-based tests (Hypothesis)
|
||||
```
|
||||
40
alembic.ini
Normal file
40
alembic.ini
Normal file
@@ -0,0 +1,40 @@
|
||||
[alembic]
|
||||
script_location = alembic
|
||||
prepend_sys_path = .
|
||||
sqlalchemy.url = driver://user:pass@localhost/dbname
|
||||
|
||||
[post_write_hooks]
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
||||
67
alembic/env.py
Normal file
67
alembic/env.py
Normal file
@@ -0,0 +1,67 @@
|
||||
import asyncio
|
||||
from logging.config import fileConfig
|
||||
|
||||
from alembic import context
|
||||
from sqlalchemy import pool
|
||||
from sqlalchemy.ext.asyncio import async_engine_from_config
|
||||
|
||||
from app.config import settings
|
||||
from app.database import Base
|
||||
|
||||
# Import all models so they register with Base.metadata
|
||||
import app.models # noqa: F401
|
||||
|
||||
config = context.config
|
||||
|
||||
if config.config_file_name is not None:
|
||||
fileConfig(config.config_file_name)
|
||||
|
||||
# Override sqlalchemy.url with the app's database URL
|
||||
config.set_main_option("sqlalchemy.url", settings.database_url)
|
||||
|
||||
target_metadata = Base.metadata
|
||||
|
||||
|
||||
def run_migrations_offline() -> None:
|
||||
"""Run migrations in 'offline' mode."""
|
||||
url = config.get_main_option("sqlalchemy.url")
|
||||
context.configure(
|
||||
url=url,
|
||||
target_metadata=target_metadata,
|
||||
literal_binds=True,
|
||||
dialect_opts={"paramstyle": "named"},
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
def do_run_migrations(connection):
|
||||
context.configure(connection=connection, target_metadata=target_metadata)
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
||||
|
||||
|
||||
async def run_async_migrations() -> None:
|
||||
"""Run migrations in 'online' mode with async engine."""
|
||||
connectable = async_engine_from_config(
|
||||
config.get_section(config.config_ini_section, {}),
|
||||
prefix="sqlalchemy.",
|
||||
poolclass=pool.NullPool,
|
||||
)
|
||||
|
||||
async with connectable.connect() as connection:
|
||||
await connection.run_sync(do_run_migrations)
|
||||
|
||||
await connectable.dispose()
|
||||
|
||||
|
||||
def run_migrations_online() -> None:
|
||||
"""Run migrations in 'online' mode."""
|
||||
asyncio.run(run_async_migrations())
|
||||
|
||||
|
||||
if context.is_offline_mode():
|
||||
run_migrations_offline()
|
||||
else:
|
||||
run_migrations_online()
|
||||
26
alembic/script.py.mako
Normal file
26
alembic/script.py.mako
Normal file
@@ -0,0 +1,26 @@
|
||||
"""${message}
|
||||
|
||||
Revision ID: ${up_revision}
|
||||
Revises: ${down_revision | comma,n}
|
||||
Create Date: ${create_date}
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
${imports if imports else ""}
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = ${repr(up_revision)}
|
||||
down_revision: Union[str, None] = ${repr(down_revision)}
|
||||
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
|
||||
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
${downgrades if downgrades else "pass"}
|
||||
180
alembic/versions/001_initial_schema.py
Normal file
180
alembic/versions/001_initial_schema.py
Normal file
@@ -0,0 +1,180 @@
|
||||
"""initial_schema
|
||||
|
||||
Revision ID: 001
|
||||
Revises:
|
||||
Create Date: 2025-01-01 00:00:00.000000
|
||||
|
||||
"""
|
||||
from typing import Sequence, Union
|
||||
|
||||
from alembic import op
|
||||
import sqlalchemy as sa
|
||||
|
||||
|
||||
# revision identifiers, used by Alembic.
|
||||
revision: str = "001"
|
||||
down_revision: Union[str, None] = None
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
# Independent tables (no foreign keys)
|
||||
op.create_table(
|
||||
"system_settings",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("key", sa.String(length=100), nullable=False),
|
||||
sa.Column("value", sa.Text(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("key"),
|
||||
)
|
||||
op.create_table(
|
||||
"tickers",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("symbol", sa.String(length=10), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("symbol"),
|
||||
)
|
||||
op.create_table(
|
||||
"users",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("username", sa.String(length=100), nullable=False),
|
||||
sa.Column("password_hash", sa.String(length=255), nullable=False),
|
||||
sa.Column("role", sa.String(length=20), nullable=False),
|
||||
sa.Column("has_access", sa.Boolean(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("username"),
|
||||
)
|
||||
|
||||
# Tables with FK to tickers
|
||||
op.create_table(
|
||||
"composite_scores",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("ticker_id", sa.Integer(), nullable=False),
|
||||
sa.Column("score", sa.Float(), nullable=False),
|
||||
sa.Column("is_stale", sa.Boolean(), nullable=False),
|
||||
sa.Column("weights_json", sa.Text(), nullable=False),
|
||||
sa.Column("computed_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"dimension_scores",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("ticker_id", sa.Integer(), nullable=False),
|
||||
sa.Column("dimension", sa.String(length=50), nullable=False),
|
||||
sa.Column("score", sa.Float(), nullable=False),
|
||||
sa.Column("is_stale", sa.Boolean(), nullable=False),
|
||||
sa.Column("computed_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"fundamental_data",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("ticker_id", sa.Integer(), nullable=False),
|
||||
sa.Column("pe_ratio", sa.Float(), nullable=True),
|
||||
sa.Column("revenue_growth", sa.Float(), nullable=True),
|
||||
sa.Column("earnings_surprise", sa.Float(), nullable=True),
|
||||
sa.Column("market_cap", sa.Float(), nullable=True),
|
||||
sa.Column("fetched_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"ingestion_progress",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("ticker_id", sa.Integer(), nullable=False),
|
||||
sa.Column("last_ingested_date", sa.Date(), nullable=False),
|
||||
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("ticker_id", name="uq_ingestion_progress_ticker"),
|
||||
)
|
||||
op.create_table(
|
||||
"ohlcv_records",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("ticker_id", sa.Integer(), nullable=False),
|
||||
sa.Column("date", sa.Date(), nullable=False),
|
||||
sa.Column("open", sa.Float(), nullable=False),
|
||||
sa.Column("high", sa.Float(), nullable=False),
|
||||
sa.Column("low", sa.Float(), nullable=False),
|
||||
sa.Column("close", sa.Float(), nullable=False),
|
||||
sa.Column("volume", sa.BigInteger(), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("ticker_id", "date", name="uq_ohlcv_ticker_date"),
|
||||
)
|
||||
op.create_index("ix_ohlcv_ticker_date", "ohlcv_records", ["ticker_id", "date"], unique=False)
|
||||
op.create_table(
|
||||
"sentiment_scores",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("ticker_id", sa.Integer(), nullable=False),
|
||||
sa.Column("classification", sa.String(length=20), nullable=False),
|
||||
sa.Column("confidence", sa.Integer(), nullable=False),
|
||||
sa.Column("source", sa.String(length=100), nullable=False),
|
||||
sa.Column("timestamp", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"sr_levels",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("ticker_id", sa.Integer(), nullable=False),
|
||||
sa.Column("price_level", sa.Float(), nullable=False),
|
||||
sa.Column("type", sa.String(length=20), nullable=False),
|
||||
sa.Column("strength", sa.Integer(), nullable=False),
|
||||
sa.Column("detection_method", sa.String(length=50), nullable=False),
|
||||
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
op.create_table(
|
||||
"trade_setups",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("ticker_id", sa.Integer(), nullable=False),
|
||||
sa.Column("direction", sa.String(length=10), nullable=False),
|
||||
sa.Column("entry_price", sa.Float(), nullable=False),
|
||||
sa.Column("stop_loss", sa.Float(), nullable=False),
|
||||
sa.Column("target", sa.Float(), nullable=False),
|
||||
sa.Column("rr_ratio", sa.Float(), nullable=False),
|
||||
sa.Column("composite_score", sa.Float(), nullable=False),
|
||||
sa.Column("detected_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
)
|
||||
|
||||
# Table with FKs to both users and tickers
|
||||
op.create_table(
|
||||
"watchlist_entries",
|
||||
sa.Column("id", sa.Integer(), nullable=False),
|
||||
sa.Column("user_id", sa.Integer(), nullable=False),
|
||||
sa.Column("ticker_id", sa.Integer(), nullable=False),
|
||||
sa.Column("entry_type", sa.String(length=10), nullable=False),
|
||||
sa.Column("added_at", sa.DateTime(timezone=True), nullable=False),
|
||||
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
|
||||
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
|
||||
sa.PrimaryKeyConstraint("id"),
|
||||
sa.UniqueConstraint("user_id", "ticker_id", name="uq_watchlist_user_ticker"),
|
||||
)
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_table("watchlist_entries")
|
||||
op.drop_table("trade_setups")
|
||||
op.drop_table("sr_levels")
|
||||
op.drop_table("sentiment_scores")
|
||||
op.drop_index("ix_ohlcv_ticker_date", table_name="ohlcv_records")
|
||||
op.drop_table("ohlcv_records")
|
||||
op.drop_table("ingestion_progress")
|
||||
op.drop_table("fundamental_data")
|
||||
op.drop_table("dimension_scores")
|
||||
op.drop_table("composite_scores")
|
||||
op.drop_table("users")
|
||||
op.drop_table("tickers")
|
||||
op.drop_table("system_settings")
|
||||
1
app/__init__.py
Normal file
1
app/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
86
app/cache.py
Normal file
86
app/cache.py
Normal file
@@ -0,0 +1,86 @@
|
||||
"""LRU cache wrapper with per-ticker invalidation.
|
||||
|
||||
Provides an in-memory cache (max 1000 entries) keyed on
|
||||
(ticker, start_date, end_date, indicator_type). Supports selective
|
||||
invalidation of all entries for a given ticker — needed when new
|
||||
OHLCV data is ingested.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import OrderedDict
|
||||
from typing import Any, Hashable
|
||||
|
||||
CacheKey = tuple[str, Any, Any, str] # (ticker, start_date, end_date, indicator_type)
|
||||
|
||||
_DEFAULT_MAX_SIZE = 1000
|
||||
|
||||
|
||||
class LRUCache:
|
||||
"""Simple LRU cache backed by an ``OrderedDict``.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
max_size:
|
||||
Maximum number of entries. When exceeded the least-recently-used
|
||||
entry is evicted. Defaults to 1000.
|
||||
"""
|
||||
|
||||
def __init__(self, max_size: int = _DEFAULT_MAX_SIZE) -> None:
|
||||
self._max_size = max_size
|
||||
self._store: OrderedDict[Hashable, Any] = OrderedDict()
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Public API
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
def get(self, key: CacheKey) -> Any | None:
|
||||
"""Return cached value or ``None`` on miss.
|
||||
|
||||
Accessing an entry promotes it to most-recently-used.
|
||||
"""
|
||||
if key not in self._store:
|
||||
return None
|
||||
self._store.move_to_end(key)
|
||||
return self._store[key]
|
||||
|
||||
def set(self, key: CacheKey, value: Any) -> None:
|
||||
"""Insert or update *key* with *value*.
|
||||
|
||||
If the cache is full the least-recently-used entry is evicted.
|
||||
"""
|
||||
if key in self._store:
|
||||
self._store.move_to_end(key)
|
||||
self._store[key] = value
|
||||
return
|
||||
if len(self._store) >= self._max_size:
|
||||
self._store.popitem(last=False) # evict LRU
|
||||
self._store[key] = value
|
||||
|
||||
def invalidate_ticker(self, ticker: str) -> int:
|
||||
"""Remove all entries whose first key element matches *ticker*.
|
||||
|
||||
Returns the number of evicted entries.
|
||||
"""
|
||||
keys_to_remove = [k for k in self._store if k[0] == ticker]
|
||||
for k in keys_to_remove:
|
||||
del self._store[k]
|
||||
return len(keys_to_remove)
|
||||
|
||||
def clear(self) -> None:
|
||||
"""Remove all entries."""
|
||||
self._store.clear()
|
||||
|
||||
@property
|
||||
def size(self) -> int:
|
||||
"""Current number of cached entries."""
|
||||
return len(self._store)
|
||||
|
||||
@property
|
||||
def max_size(self) -> int:
|
||||
"""Maximum capacity."""
|
||||
return self._max_size
|
||||
|
||||
|
||||
# Module-level singleton used by the indicator service.
|
||||
indicator_cache = LRUCache()
|
||||
43
app/config.py
Normal file
43
app/config.py
Normal file
@@ -0,0 +1,43 @@
|
||||
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8")
|
||||
|
||||
# Database
|
||||
database_url: str = "postgresql+asyncpg://stock_backend:changeme@localhost:5432/stock_data_backend"
|
||||
|
||||
# Auth
|
||||
jwt_secret: str = "change-this-to-a-random-secret"
|
||||
jwt_expiry_minutes: int = 60
|
||||
|
||||
# OHLCV Provider — Alpaca Markets
|
||||
alpaca_api_key: str = ""
|
||||
alpaca_api_secret: str = ""
|
||||
|
||||
# Sentiment Provider — Gemini with Search Grounding
|
||||
gemini_api_key: str = ""
|
||||
gemini_model: str = "gemini-2.0-flash"
|
||||
|
||||
# Fundamentals Provider — Financial Modeling Prep
|
||||
fmp_api_key: str = ""
|
||||
|
||||
# Scheduled Jobs
|
||||
data_collector_frequency: str = "daily"
|
||||
sentiment_poll_interval_minutes: int = 30
|
||||
fundamental_fetch_frequency: str = "daily"
|
||||
rr_scan_frequency: str = "daily"
|
||||
|
||||
# Scoring Defaults
|
||||
default_watchlist_auto_size: int = 10
|
||||
default_rr_threshold: float = 3.0
|
||||
|
||||
# Database Pool
|
||||
db_pool_size: int = 5
|
||||
db_pool_timeout: int = 30
|
||||
|
||||
# Logging
|
||||
log_level: str = "INFO"
|
||||
|
||||
|
||||
settings = Settings()
|
||||
33
app/database.py
Normal file
33
app/database.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
from sqlalchemy.ext.asyncio import (
|
||||
AsyncSession,
|
||||
async_sessionmaker,
|
||||
create_async_engine,
|
||||
)
|
||||
from sqlalchemy.orm import DeclarativeBase
|
||||
|
||||
from app.config import settings
|
||||
|
||||
engine = create_async_engine(
|
||||
settings.database_url,
|
||||
pool_size=settings.db_pool_size,
|
||||
pool_timeout=settings.db_pool_timeout,
|
||||
pool_pre_ping=True,
|
||||
echo=False,
|
||||
)
|
||||
|
||||
async_session_factory = async_sessionmaker(
|
||||
engine,
|
||||
class_=AsyncSession,
|
||||
expire_on_commit=False,
|
||||
)
|
||||
|
||||
|
||||
class Base(DeclarativeBase):
|
||||
pass
|
||||
|
||||
|
||||
async def get_session() -> AsyncGenerator[AsyncSession, None]:
|
||||
async with async_session_factory() as session:
|
||||
yield session
|
||||
82
app/dependencies.py
Normal file
82
app/dependencies.py
Normal file
@@ -0,0 +1,82 @@
|
||||
"""FastAPI dependency injection factories.
|
||||
|
||||
Provides DB session, current user extraction from JWT, and role/access guards.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
from fastapi import Depends
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
from jose import JWTError, jwt
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import settings
|
||||
from app.database import get_session
|
||||
from app.exceptions import AuthenticationError, AuthorizationError
|
||||
from app.models.user import User
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_bearer_scheme = HTTPBearer(auto_error=False)
|
||||
|
||||
JWT_ALGORITHM = "HS256"
|
||||
|
||||
|
||||
async def get_db() -> AsyncGenerator[AsyncSession, None]:
|
||||
"""Yield an async DB session."""
|
||||
async for session in get_session():
|
||||
yield session
|
||||
|
||||
|
||||
async def get_current_user(
|
||||
credentials: HTTPAuthorizationCredentials | None = Depends(_bearer_scheme),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> User:
|
||||
"""Extract and validate JWT from Authorization header, return the User."""
|
||||
if credentials is None:
|
||||
raise AuthenticationError("Authentication required")
|
||||
|
||||
token = credentials.credentials
|
||||
try:
|
||||
payload = jwt.decode(
|
||||
token,
|
||||
settings.jwt_secret,
|
||||
algorithms=[JWT_ALGORITHM],
|
||||
)
|
||||
user_id_str: str | None = payload.get("sub")
|
||||
if user_id_str is None:
|
||||
raise AuthenticationError("Invalid token: missing subject")
|
||||
user_id = int(user_id_str)
|
||||
except JWTError as exc:
|
||||
if "expired" in str(exc).lower():
|
||||
raise AuthenticationError("Token expired") from exc
|
||||
raise AuthenticationError("Invalid token") from exc
|
||||
except (ValueError, TypeError) as exc:
|
||||
raise AuthenticationError("Invalid token: bad subject") from exc
|
||||
|
||||
result = await db.execute(select(User).where(User.id == user_id))
|
||||
user = result.scalar_one_or_none()
|
||||
if user is None:
|
||||
raise AuthenticationError("User not found")
|
||||
|
||||
return user
|
||||
|
||||
|
||||
async def require_admin(
|
||||
user: User = Depends(get_current_user),
|
||||
) -> User:
|
||||
"""Guard that ensures the current user has the admin role."""
|
||||
if user.role != "admin":
|
||||
raise AuthorizationError("Insufficient permissions")
|
||||
return user
|
||||
|
||||
|
||||
async def require_access(
|
||||
user: User = Depends(get_current_user),
|
||||
) -> User:
|
||||
"""Guard that ensures the current user has API access granted."""
|
||||
if not user.has_access:
|
||||
raise AuthorizationError("Insufficient permissions")
|
||||
return user
|
||||
52
app/exceptions.py
Normal file
52
app/exceptions.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""Application exception hierarchy.
|
||||
|
||||
All custom exceptions inherit from AppError. The global exception handler
|
||||
in middleware.py catches these and returns the appropriate JSON envelope.
|
||||
"""
|
||||
|
||||
|
||||
class AppError(Exception):
|
||||
"""Base application error."""
|
||||
|
||||
status_code: int = 500
|
||||
message: str = "Internal server error"
|
||||
|
||||
def __init__(self, message: str | None = None):
|
||||
if message is not None:
|
||||
self.message = message
|
||||
super().__init__(self.message)
|
||||
|
||||
|
||||
class ValidationError(AppError):
|
||||
status_code = 400
|
||||
message = "Validation error"
|
||||
|
||||
|
||||
class NotFoundError(AppError):
|
||||
status_code = 404
|
||||
message = "Resource not found"
|
||||
|
||||
|
||||
class DuplicateError(AppError):
|
||||
status_code = 409
|
||||
message = "Resource already exists"
|
||||
|
||||
|
||||
class AuthenticationError(AppError):
|
||||
status_code = 401
|
||||
message = "Authentication required"
|
||||
|
||||
|
||||
class AuthorizationError(AppError):
|
||||
status_code = 403
|
||||
message = "Insufficient permissions"
|
||||
|
||||
|
||||
class ProviderError(AppError):
|
||||
status_code = 502
|
||||
message = "Market data provider unavailable"
|
||||
|
||||
|
||||
class RateLimitError(AppError):
|
||||
status_code = 429
|
||||
message = "Rate limited"
|
||||
106
app/main.py
Normal file
106
app/main.py
Normal file
@@ -0,0 +1,106 @@
|
||||
"""FastAPI application entry point with lifespan management."""
|
||||
|
||||
import logging
|
||||
import sys
|
||||
from contextlib import asynccontextmanager
|
||||
from collections.abc import AsyncGenerator
|
||||
|
||||
from fastapi import FastAPI
|
||||
from passlib.hash import bcrypt
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import settings
|
||||
from app.database import async_session_factory, engine
|
||||
from app.middleware import register_exception_handlers
|
||||
from app.models.user import User
|
||||
from app.scheduler import configure_scheduler, scheduler
|
||||
from app.routers.admin import router as admin_router
|
||||
from app.routers.auth import router as auth_router
|
||||
from app.routers.health import router as health_router
|
||||
from app.routers.ingestion import router as ingestion_router
|
||||
from app.routers.ohlcv import router as ohlcv_router
|
||||
from app.routers.indicators import router as indicators_router
|
||||
from app.routers.fundamentals import router as fundamentals_router
|
||||
from app.routers.scores import router as scores_router
|
||||
from app.routers.trades import router as trades_router
|
||||
from app.routers.watchlist import router as watchlist_router
|
||||
from app.routers.sentiment import router as sentiment_router
|
||||
from app.routers.sr_levels import router as sr_levels_router
|
||||
from app.routers.tickers import router as tickers_router
|
||||
|
||||
|
||||
def _configure_logging() -> None:
|
||||
"""Set up structured JSON-style logging."""
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
'{"time":"%(asctime)s","level":"%(levelname)s",'
|
||||
'"logger":"%(name)s","message":"%(message)s"}'
|
||||
)
|
||||
)
|
||||
root = logging.getLogger()
|
||||
root.handlers.clear()
|
||||
root.addHandler(handler)
|
||||
root.setLevel(settings.log_level.upper())
|
||||
|
||||
|
||||
async def _create_default_admin(session: AsyncSession) -> None:
|
||||
"""Create the default admin account if no admin user exists."""
|
||||
result = await session.execute(
|
||||
select(User).where(User.role == "admin")
|
||||
)
|
||||
if result.scalar_one_or_none() is None:
|
||||
admin = User(
|
||||
username="admin",
|
||||
password_hash=bcrypt.hash("admin"),
|
||||
role="admin",
|
||||
has_access=True,
|
||||
)
|
||||
session.add(admin)
|
||||
await session.commit()
|
||||
logging.getLogger(__name__).info("Default admin account created")
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(_app: FastAPI) -> AsyncGenerator[None, None]:
|
||||
"""Manage startup and shutdown lifecycle."""
|
||||
logger = logging.getLogger(__name__)
|
||||
_configure_logging()
|
||||
logger.info("Starting Stock Data Backend")
|
||||
|
||||
async with async_session_factory() as session:
|
||||
await _create_default_admin(session)
|
||||
|
||||
configure_scheduler()
|
||||
scheduler.start()
|
||||
logger.info("Scheduler started")
|
||||
|
||||
yield
|
||||
|
||||
scheduler.shutdown(wait=False)
|
||||
logger.info("Scheduler stopped")
|
||||
await engine.dispose()
|
||||
logger.info("Shutting down")
|
||||
|
||||
|
||||
app = FastAPI(
|
||||
title="Stock Data Backend",
|
||||
version="0.1.0",
|
||||
lifespan=lifespan,
|
||||
)
|
||||
|
||||
register_exception_handlers(app)
|
||||
app.include_router(health_router, prefix="/api/v1")
|
||||
app.include_router(auth_router, prefix="/api/v1")
|
||||
app.include_router(admin_router, prefix="/api/v1")
|
||||
app.include_router(tickers_router, prefix="/api/v1")
|
||||
app.include_router(ohlcv_router, prefix="/api/v1")
|
||||
app.include_router(ingestion_router, prefix="/api/v1")
|
||||
app.include_router(indicators_router, prefix="/api/v1")
|
||||
app.include_router(sr_levels_router, prefix="/api/v1")
|
||||
app.include_router(sentiment_router, prefix="/api/v1")
|
||||
app.include_router(fundamentals_router, prefix="/api/v1")
|
||||
app.include_router(scores_router, prefix="/api/v1")
|
||||
app.include_router(trades_router, prefix="/api/v1")
|
||||
app.include_router(watchlist_router, prefix="/api/v1")
|
||||
61
app/middleware.py
Normal file
61
app/middleware.py
Normal file
@@ -0,0 +1,61 @@
|
||||
"""Global exception handlers for the FastAPI application.
|
||||
|
||||
Maps AppError subclasses and other exceptions to JSON envelope responses.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import traceback
|
||||
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from app.exceptions import AppError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def register_exception_handlers(app: FastAPI) -> None:
|
||||
"""Register all global exception handlers on the FastAPI app."""
|
||||
|
||||
@app.exception_handler(AppError)
|
||||
async def app_error_handler(_request: Request, exc: AppError) -> JSONResponse:
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code,
|
||||
content={
|
||||
"status": "error",
|
||||
"data": None,
|
||||
"error": exc.message,
|
||||
},
|
||||
)
|
||||
|
||||
@app.exception_handler(RequestValidationError)
|
||||
async def validation_error_handler(
|
||||
_request: Request, exc: RequestValidationError
|
||||
) -> JSONResponse:
|
||||
details = "; ".join(
|
||||
f"{'.'.join(str(loc) for loc in e['loc'])}: {e['msg']}"
|
||||
for e in exc.errors()
|
||||
)
|
||||
return JSONResponse(
|
||||
status_code=400,
|
||||
content={
|
||||
"status": "error",
|
||||
"data": None,
|
||||
"error": f"Validation error: {details}",
|
||||
},
|
||||
)
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
async def unhandled_error_handler(
|
||||
_request: Request, exc: Exception
|
||||
) -> JSONResponse:
|
||||
logger.error("Unhandled exception:\n%s", traceback.format_exc())
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={
|
||||
"status": "error",
|
||||
"data": None,
|
||||
"error": "Internal server error",
|
||||
},
|
||||
)
|
||||
25
app/models/__init__.py
Normal file
25
app/models/__init__.py
Normal file
@@ -0,0 +1,25 @@
|
||||
from app.models.ticker import Ticker
|
||||
from app.models.ohlcv import OHLCVRecord
|
||||
from app.models.user import User
|
||||
from app.models.sentiment import SentimentScore
|
||||
from app.models.fundamental import FundamentalData
|
||||
from app.models.score import DimensionScore, CompositeScore
|
||||
from app.models.sr_level import SRLevel
|
||||
from app.models.trade_setup import TradeSetup
|
||||
from app.models.watchlist import WatchlistEntry
|
||||
from app.models.settings import SystemSetting, IngestionProgress
|
||||
|
||||
__all__ = [
|
||||
"Ticker",
|
||||
"OHLCVRecord",
|
||||
"User",
|
||||
"SentimentScore",
|
||||
"FundamentalData",
|
||||
"DimensionScore",
|
||||
"CompositeScore",
|
||||
"SRLevel",
|
||||
"TradeSetup",
|
||||
"WatchlistEntry",
|
||||
"SystemSetting",
|
||||
"IngestionProgress",
|
||||
]
|
||||
24
app/models/fundamental.py
Normal file
24
app/models/fundamental.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import DateTime, Float, ForeignKey
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class FundamentalData(Base):
|
||||
__tablename__ = "fundamental_data"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
ticker_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
pe_ratio: Mapped[float | None] = mapped_column(Float, nullable=True)
|
||||
revenue_growth: Mapped[float | None] = mapped_column(Float, nullable=True)
|
||||
earnings_surprise: Mapped[float | None] = mapped_column(Float, nullable=True)
|
||||
market_cap: Mapped[float | None] = mapped_column(Float, nullable=True)
|
||||
fetched_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False
|
||||
)
|
||||
|
||||
ticker = relationship("Ticker", back_populates="fundamental_data")
|
||||
30
app/models/ohlcv.py
Normal file
30
app/models/ohlcv.py
Normal file
@@ -0,0 +1,30 @@
|
||||
from datetime import date, datetime
|
||||
|
||||
from sqlalchemy import BigInteger, Date, DateTime, Float, ForeignKey, Index, UniqueConstraint
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class OHLCVRecord(Base):
|
||||
__tablename__ = "ohlcv_records"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("ticker_id", "date", name="uq_ohlcv_ticker_date"),
|
||||
Index("ix_ohlcv_ticker_date", "ticker_id", "date"),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
ticker_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
date: Mapped[date] = mapped_column(Date, nullable=False)
|
||||
open: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
high: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
low: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
close: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
volume: Mapped[int] = mapped_column(BigInteger, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, nullable=False
|
||||
)
|
||||
|
||||
ticker = relationship("Ticker", back_populates="ohlcv_records")
|
||||
40
app/models/score.py
Normal file
40
app/models/score.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Boolean, DateTime, Float, ForeignKey, String, Text
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class DimensionScore(Base):
|
||||
__tablename__ = "dimension_scores"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
ticker_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
dimension: Mapped[str] = mapped_column(String(50), nullable=False)
|
||||
score: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
is_stale: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
|
||||
computed_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False
|
||||
)
|
||||
|
||||
ticker = relationship("Ticker", back_populates="dimension_scores")
|
||||
|
||||
|
||||
class CompositeScore(Base):
|
||||
__tablename__ = "composite_scores"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
ticker_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
score: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
is_stale: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
|
||||
weights_json: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
computed_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False
|
||||
)
|
||||
|
||||
ticker = relationship("Ticker", back_populates="composite_scores")
|
||||
23
app/models/sentiment.py
Normal file
23
app/models/sentiment.py
Normal file
@@ -0,0 +1,23 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, Integer, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class SentimentScore(Base):
|
||||
__tablename__ = "sentiment_scores"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
ticker_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
classification: Mapped[str] = mapped_column(String(20), nullable=False)
|
||||
confidence: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
source: Mapped[str] = mapped_column(String(100), nullable=False)
|
||||
timestamp: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False
|
||||
)
|
||||
|
||||
ticker = relationship("Ticker", back_populates="sentiment_scores")
|
||||
35
app/models/settings.py
Normal file
35
app/models/settings.py
Normal file
@@ -0,0 +1,35 @@
|
||||
from datetime import date, datetime
|
||||
|
||||
from sqlalchemy import Date, DateTime, ForeignKey, String, Text, UniqueConstraint
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class SystemSetting(Base):
|
||||
__tablename__ = "system_settings"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
key: Mapped[str] = mapped_column(String(100), unique=True, nullable=False)
|
||||
value: Mapped[str] = mapped_column(Text, nullable=False)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
|
||||
)
|
||||
|
||||
|
||||
class IngestionProgress(Base):
|
||||
__tablename__ = "ingestion_progress"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("ticker_id", name="uq_ingestion_progress_ticker"),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
ticker_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
last_ingested_date: Mapped[date] = mapped_column(Date, nullable=False)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
|
||||
)
|
||||
|
||||
ticker = relationship("Ticker", back_populates="ingestion_progress")
|
||||
24
app/models/sr_level.py
Normal file
24
app/models/sr_level.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import DateTime, Float, ForeignKey, Integer, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class SRLevel(Base):
|
||||
__tablename__ = "sr_levels"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
ticker_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
price_level: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
type: Mapped[str] = mapped_column(String(20), nullable=False)
|
||||
strength: Mapped[int] = mapped_column(Integer, nullable=False)
|
||||
detection_method: Mapped[str] = mapped_column(String(50), nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, nullable=False
|
||||
)
|
||||
|
||||
ticker = relationship("Ticker", back_populates="sr_levels")
|
||||
27
app/models/ticker.py
Normal file
27
app/models/ticker.py
Normal file
@@ -0,0 +1,27 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import String, DateTime
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class Ticker(Base):
|
||||
__tablename__ = "tickers"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
symbol: Mapped[str] = mapped_column(String(10), unique=True, nullable=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, nullable=False
|
||||
)
|
||||
|
||||
# Relationships (cascade deletes)
|
||||
ohlcv_records = relationship("OHLCVRecord", back_populates="ticker", cascade="all, delete-orphan")
|
||||
sentiment_scores = relationship("SentimentScore", back_populates="ticker", cascade="all, delete-orphan")
|
||||
fundamental_data = relationship("FundamentalData", back_populates="ticker", cascade="all, delete-orphan")
|
||||
sr_levels = relationship("SRLevel", back_populates="ticker", cascade="all, delete-orphan")
|
||||
dimension_scores = relationship("DimensionScore", back_populates="ticker", cascade="all, delete-orphan")
|
||||
composite_scores = relationship("CompositeScore", back_populates="ticker", cascade="all, delete-orphan")
|
||||
trade_setups = relationship("TradeSetup", back_populates="ticker", cascade="all, delete-orphan")
|
||||
watchlist_entries = relationship("WatchlistEntry", back_populates="ticker", cascade="all, delete-orphan")
|
||||
ingestion_progress = relationship("IngestionProgress", back_populates="ticker", cascade="all, delete-orphan", uselist=False)
|
||||
26
app/models/trade_setup.py
Normal file
26
app/models/trade_setup.py
Normal file
@@ -0,0 +1,26 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import DateTime, Float, ForeignKey, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class TradeSetup(Base):
|
||||
__tablename__ = "trade_setups"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
ticker_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
direction: Mapped[str] = mapped_column(String(10), nullable=False)
|
||||
entry_price: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
stop_loss: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
target: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
rr_ratio: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
composite_score: Mapped[float] = mapped_column(Float, nullable=False)
|
||||
detected_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), nullable=False
|
||||
)
|
||||
|
||||
ticker = relationship("Ticker", back_populates="trade_setups")
|
||||
24
app/models/user.py
Normal file
24
app/models/user.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import Boolean, DateTime, String
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class User(Base):
|
||||
__tablename__ = "users"
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
username: Mapped[str] = mapped_column(String(100), unique=True, nullable=False)
|
||||
password_hash: Mapped[str] = mapped_column(String(255), nullable=False)
|
||||
role: Mapped[str] = mapped_column(String(20), nullable=False, default="user")
|
||||
has_access: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False)
|
||||
created_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, nullable=False
|
||||
)
|
||||
updated_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False
|
||||
)
|
||||
|
||||
watchlist_entries = relationship("WatchlistEntry", back_populates="user", cascade="all, delete-orphan")
|
||||
28
app/models/watchlist.py
Normal file
28
app/models/watchlist.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import DateTime, ForeignKey, String, UniqueConstraint
|
||||
from sqlalchemy.orm import Mapped, mapped_column, relationship
|
||||
|
||||
from app.database import Base
|
||||
|
||||
|
||||
class WatchlistEntry(Base):
|
||||
__tablename__ = "watchlist_entries"
|
||||
__table_args__ = (
|
||||
UniqueConstraint("user_id", "ticker_id", name="uq_watchlist_user_ticker"),
|
||||
)
|
||||
|
||||
id: Mapped[int] = mapped_column(primary_key=True)
|
||||
user_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("users.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
ticker_id: Mapped[int] = mapped_column(
|
||||
ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False
|
||||
)
|
||||
entry_type: Mapped[str] = mapped_column(String(10), nullable=False)
|
||||
added_at: Mapped[datetime] = mapped_column(
|
||||
DateTime(timezone=True), default=datetime.utcnow, nullable=False
|
||||
)
|
||||
|
||||
user = relationship("User", back_populates="watchlist_entries")
|
||||
ticker = relationship("Ticker", back_populates="watchlist_entries")
|
||||
1
app/providers/__init__.py
Normal file
1
app/providers/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
63
app/providers/alpaca.py
Normal file
63
app/providers/alpaca.py
Normal file
@@ -0,0 +1,63 @@
|
||||
"""Alpaca Markets OHLCV provider using the alpaca-py SDK."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from datetime import date
|
||||
|
||||
from alpaca.data.historical import StockHistoricalDataClient
|
||||
from alpaca.data.requests import StockBarsRequest
|
||||
from alpaca.data.timeframe import TimeFrame
|
||||
|
||||
from app.exceptions import ProviderError, RateLimitError
|
||||
from app.providers.protocol import OHLCVData
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AlpacaOHLCVProvider:
|
||||
"""Fetches daily OHLCV bars from Alpaca Markets Data API."""
|
||||
|
||||
def __init__(self, api_key: str, api_secret: str) -> None:
|
||||
if not api_key or not api_secret:
|
||||
raise ProviderError("Alpaca API key and secret are required")
|
||||
self._client = StockHistoricalDataClient(api_key, api_secret)
|
||||
|
||||
async def fetch_ohlcv(
|
||||
self, ticker: str, start_date: date, end_date: date
|
||||
) -> list[OHLCVData]:
|
||||
"""Fetch daily OHLCV bars for *ticker* between *start_date* and *end_date*."""
|
||||
try:
|
||||
request = StockBarsRequest(
|
||||
symbol_or_symbols=ticker,
|
||||
timeframe=TimeFrame.Day,
|
||||
start=start_date,
|
||||
end=end_date,
|
||||
)
|
||||
|
||||
# alpaca-py's client is synchronous — run in a thread
|
||||
bars = await asyncio.to_thread(self._client.get_stock_bars, request)
|
||||
|
||||
results: list[OHLCVData] = []
|
||||
bar_set = bars.get(ticker, []) if hasattr(bars, "get") else getattr(bars, "data", {}).get(ticker, [])
|
||||
for bar in bar_set:
|
||||
results.append(
|
||||
OHLCVData(
|
||||
ticker=ticker,
|
||||
date=bar.timestamp.date(),
|
||||
open=float(bar.open),
|
||||
high=float(bar.high),
|
||||
low=float(bar.low),
|
||||
close=float(bar.close),
|
||||
volume=int(bar.volume),
|
||||
)
|
||||
)
|
||||
return results
|
||||
|
||||
except Exception as exc:
|
||||
msg = str(exc).lower()
|
||||
if "rate" in msg and "limit" in msg:
|
||||
raise RateLimitError(f"Alpaca rate limit hit for {ticker}") from exc
|
||||
logger.error("Alpaca provider error for %s: %s", ticker, exc)
|
||||
raise ProviderError(f"Alpaca provider error for {ticker}: {exc}") from exc
|
||||
94
app/providers/fmp.py
Normal file
94
app/providers/fmp.py
Normal file
@@ -0,0 +1,94 @@
|
||||
"""Financial Modeling Prep (FMP) fundamentals provider using httpx."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
|
||||
import httpx
|
||||
|
||||
from app.exceptions import ProviderError, RateLimitError
|
||||
from app.providers.protocol import FundamentalData
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_FMP_BASE_URL = "https://financialmodelingprep.com/api/v3"
|
||||
|
||||
|
||||
class FMPFundamentalProvider:
|
||||
"""Fetches fundamental data from Financial Modeling Prep REST API."""
|
||||
|
||||
def __init__(self, api_key: str) -> None:
|
||||
if not api_key:
|
||||
raise ProviderError("FMP API key is required")
|
||||
self._api_key = api_key
|
||||
|
||||
async def fetch_fundamentals(self, ticker: str) -> FundamentalData:
|
||||
"""Fetch P/E, revenue growth, earnings surprise, and market cap."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=30.0) as client:
|
||||
profile = await self._fetch_profile(client, ticker)
|
||||
earnings = await self._fetch_earnings_surprise(client, ticker)
|
||||
|
||||
pe_ratio = self._safe_float(profile.get("pe"))
|
||||
revenue_growth = self._safe_float(profile.get("revenueGrowth"))
|
||||
market_cap = self._safe_float(profile.get("mktCap"))
|
||||
earnings_surprise = self._safe_float(earnings)
|
||||
|
||||
return FundamentalData(
|
||||
ticker=ticker,
|
||||
pe_ratio=pe_ratio,
|
||||
revenue_growth=revenue_growth,
|
||||
earnings_surprise=earnings_surprise,
|
||||
market_cap=market_cap,
|
||||
fetched_at=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
except (ProviderError, RateLimitError):
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error("FMP provider error for %s: %s", ticker, exc)
|
||||
raise ProviderError(f"FMP provider error for {ticker}: {exc}") from exc
|
||||
|
||||
async def _fetch_profile(self, client: httpx.AsyncClient, ticker: str) -> dict:
|
||||
"""Fetch company profile (P/E, revenue growth, market cap)."""
|
||||
url = f"{_FMP_BASE_URL}/profile/{ticker}"
|
||||
resp = await client.get(url, params={"apikey": self._api_key})
|
||||
self._check_response(resp, ticker, "profile")
|
||||
data = resp.json()
|
||||
if isinstance(data, list) and data:
|
||||
return data[0]
|
||||
return data if isinstance(data, dict) else {}
|
||||
|
||||
async def _fetch_earnings_surprise(
|
||||
self, client: httpx.AsyncClient, ticker: str
|
||||
) -> float | None:
|
||||
"""Fetch the most recent earnings surprise percentage."""
|
||||
url = f"{_FMP_BASE_URL}/earnings-surprises/{ticker}"
|
||||
resp = await client.get(url, params={"apikey": self._api_key})
|
||||
self._check_response(resp, ticker, "earnings-surprises")
|
||||
data = resp.json()
|
||||
if isinstance(data, list) and data:
|
||||
return self._safe_float(data[0].get("actualEarningResult"))
|
||||
return None
|
||||
|
||||
def _check_response(
|
||||
self, resp: httpx.Response, ticker: str, endpoint: str
|
||||
) -> None:
|
||||
"""Raise appropriate errors for non-200 responses."""
|
||||
if resp.status_code == 429:
|
||||
raise RateLimitError(f"FMP rate limit hit for {ticker} ({endpoint})")
|
||||
if resp.status_code != 200:
|
||||
raise ProviderError(
|
||||
f"FMP {endpoint} error for {ticker}: HTTP {resp.status_code}"
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def _safe_float(value: object) -> float | None:
|
||||
"""Convert a value to float, returning None on failure."""
|
||||
if value is None:
|
||||
return None
|
||||
try:
|
||||
return float(value)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
90
app/providers/gemini_sentiment.py
Normal file
90
app/providers/gemini_sentiment.py
Normal file
@@ -0,0 +1,90 @@
|
||||
"""Gemini sentiment provider using google-genai with search grounding."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from google import genai
|
||||
from google.genai import types
|
||||
|
||||
from app.exceptions import ProviderError, RateLimitError
|
||||
from app.providers.protocol import SentimentData
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_SENTIMENT_PROMPT = """\
|
||||
Analyze the current market sentiment for the stock ticker {ticker}.
|
||||
Search the web for recent news articles, social media mentions, and analyst opinions.
|
||||
|
||||
Respond ONLY with a JSON object in this exact format (no markdown, no extra text):
|
||||
{{"classification": "<bullish|bearish|neutral>", "confidence": <0-100>, "reasoning": "<brief explanation>"}}
|
||||
|
||||
Rules:
|
||||
- classification must be exactly one of: bullish, bearish, neutral
|
||||
- confidence must be an integer from 0 to 100
|
||||
- reasoning should be a brief one-sentence explanation
|
||||
"""
|
||||
|
||||
VALID_CLASSIFICATIONS = {"bullish", "bearish", "neutral"}
|
||||
|
||||
|
||||
class GeminiSentimentProvider:
|
||||
"""Fetches sentiment analysis from Gemini with search grounding."""
|
||||
|
||||
def __init__(self, api_key: str, model: str = "gemini-2.0-flash") -> None:
|
||||
if not api_key:
|
||||
raise ProviderError("Gemini API key is required")
|
||||
self._client = genai.Client(api_key=api_key)
|
||||
self._model = model
|
||||
|
||||
async def fetch_sentiment(self, ticker: str) -> SentimentData:
|
||||
"""Send a structured prompt to Gemini and parse the JSON response."""
|
||||
try:
|
||||
response = await self._client.aio.models.generate_content(
|
||||
model=self._model,
|
||||
contents=_SENTIMENT_PROMPT.format(ticker=ticker),
|
||||
config=types.GenerateContentConfig(
|
||||
tools=[types.Tool(google_search=types.GoogleSearch())],
|
||||
response_mime_type="application/json",
|
||||
),
|
||||
)
|
||||
|
||||
raw_text = response.text.strip()
|
||||
logger.debug("Gemini raw response for %s: %s", ticker, raw_text)
|
||||
parsed = json.loads(raw_text)
|
||||
|
||||
classification = parsed.get("classification", "").lower()
|
||||
if classification not in VALID_CLASSIFICATIONS:
|
||||
raise ProviderError(
|
||||
f"Invalid classification '{classification}' from Gemini for {ticker}"
|
||||
)
|
||||
|
||||
confidence = int(parsed.get("confidence", 50))
|
||||
confidence = max(0, min(100, confidence))
|
||||
|
||||
reasoning = parsed.get("reasoning", "")
|
||||
if reasoning:
|
||||
logger.info("Gemini sentiment for %s: %s (confidence=%d) — %s",
|
||||
ticker, classification, confidence, reasoning)
|
||||
|
||||
return SentimentData(
|
||||
ticker=ticker,
|
||||
classification=classification,
|
||||
confidence=confidence,
|
||||
source="gemini",
|
||||
timestamp=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
except json.JSONDecodeError as exc:
|
||||
logger.error("Failed to parse Gemini JSON for %s: %s", ticker, exc)
|
||||
raise ProviderError(f"Invalid JSON from Gemini for {ticker}") from exc
|
||||
except ProviderError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
msg = str(exc).lower()
|
||||
if "rate" in msg or "quota" in msg or "429" in msg:
|
||||
raise RateLimitError(f"Gemini rate limit hit for {ticker}") from exc
|
||||
logger.error("Gemini provider error for %s: %s", ticker, exc)
|
||||
raise ProviderError(f"Gemini provider error for {ticker}: {exc}") from exc
|
||||
84
app/providers/protocol.py
Normal file
84
app/providers/protocol.py
Normal file
@@ -0,0 +1,84 @@
|
||||
"""Provider protocols and lightweight data transfer objects.
|
||||
|
||||
Protocols define the interface for external data providers.
|
||||
DTOs are simple dataclasses — NOT SQLAlchemy models — used to
|
||||
transfer data between providers and the service layer.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from dataclasses import dataclass
|
||||
from datetime import date, datetime
|
||||
from typing import Protocol
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Data Transfer Objects
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class OHLCVData:
|
||||
"""Lightweight OHLCV record returned by market data providers."""
|
||||
|
||||
ticker: str
|
||||
date: date
|
||||
open: float
|
||||
high: float
|
||||
low: float
|
||||
close: float
|
||||
volume: int
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class SentimentData:
|
||||
"""Sentiment analysis result returned by sentiment providers."""
|
||||
|
||||
ticker: str
|
||||
classification: str # "bullish" | "bearish" | "neutral"
|
||||
confidence: int # 0-100
|
||||
source: str
|
||||
timestamp: datetime
|
||||
|
||||
|
||||
@dataclass(frozen=True, slots=True)
|
||||
class FundamentalData:
|
||||
"""Fundamental metrics returned by fundamental providers."""
|
||||
|
||||
ticker: str
|
||||
pe_ratio: float | None
|
||||
revenue_growth: float | None
|
||||
earnings_surprise: float | None
|
||||
market_cap: float | None
|
||||
fetched_at: datetime
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Provider Protocols
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class MarketDataProvider(Protocol):
|
||||
"""Protocol for OHLCV market data providers."""
|
||||
|
||||
async def fetch_ohlcv(
|
||||
self, ticker: str, start_date: date, end_date: date
|
||||
) -> list[OHLCVData]:
|
||||
"""Fetch OHLCV data for a ticker in a date range."""
|
||||
...
|
||||
|
||||
|
||||
class SentimentProvider(Protocol):
|
||||
"""Protocol for sentiment analysis providers."""
|
||||
|
||||
async def fetch_sentiment(self, ticker: str) -> SentimentData:
|
||||
"""Fetch current sentiment analysis for a ticker."""
|
||||
...
|
||||
|
||||
|
||||
class FundamentalProvider(Protocol):
|
||||
"""Protocol for fundamental data providers."""
|
||||
|
||||
async def fetch_fundamentals(self, ticker: str) -> FundamentalData:
|
||||
"""Fetch fundamental data for a ticker."""
|
||||
...
|
||||
1
app/routers/__init__.py
Normal file
1
app/routers/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
193
app/routers/admin.py
Normal file
193
app/routers/admin.py
Normal file
@@ -0,0 +1,193 @@
|
||||
"""Admin router: user management, system settings, data cleanup, job control.
|
||||
|
||||
All endpoints require admin role.
|
||||
"""
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import get_db, require_admin
|
||||
from app.models.user import User
|
||||
from app.schemas.admin import (
|
||||
CreateUserRequest,
|
||||
DataCleanupRequest,
|
||||
JobToggle,
|
||||
PasswordReset,
|
||||
RegistrationToggle,
|
||||
SystemSettingUpdate,
|
||||
UserManagement,
|
||||
)
|
||||
from app.schemas.common import APIEnvelope
|
||||
from app.services import admin_service
|
||||
|
||||
router = APIRouter(tags=["admin"])
|
||||
|
||||
|
||||
def _user_dict(user: User) -> dict:
|
||||
return {
|
||||
"id": user.id,
|
||||
"username": user.username,
|
||||
"role": user.role,
|
||||
"has_access": user.has_access,
|
||||
"created_at": user.created_at.isoformat() if user.created_at else None,
|
||||
"updated_at": user.updated_at.isoformat() if user.updated_at else None,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# User management
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/admin/users", response_model=APIEnvelope)
|
||||
async def list_users(
|
||||
_admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""List all user accounts."""
|
||||
users = await admin_service.list_users(db)
|
||||
return APIEnvelope(status="success", data=[_user_dict(u) for u in users])
|
||||
|
||||
|
||||
@router.post("/admin/users", response_model=APIEnvelope, status_code=201)
|
||||
async def create_user(
|
||||
body: CreateUserRequest,
|
||||
_admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Create a new user account."""
|
||||
user = await admin_service.create_user(
|
||||
db, body.username, body.password, body.role, body.has_access
|
||||
)
|
||||
return APIEnvelope(status="success", data=_user_dict(user))
|
||||
|
||||
|
||||
@router.put("/admin/users/{user_id}/access", response_model=APIEnvelope)
|
||||
async def set_user_access(
|
||||
user_id: int,
|
||||
body: UserManagement,
|
||||
_admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Grant or revoke API access for a user."""
|
||||
user = await admin_service.set_user_access(db, user_id, body.has_access)
|
||||
return APIEnvelope(status="success", data=_user_dict(user))
|
||||
|
||||
|
||||
@router.put("/admin/users/{user_id}/password", response_model=APIEnvelope)
|
||||
async def reset_password(
|
||||
user_id: int,
|
||||
body: PasswordReset,
|
||||
_admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Reset a user's password."""
|
||||
user = await admin_service.reset_password(db, user_id, body.new_password)
|
||||
return APIEnvelope(status="success", data=_user_dict(user))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Registration toggle
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.put("/admin/settings/registration", response_model=APIEnvelope)
|
||||
async def toggle_registration(
|
||||
body: RegistrationToggle,
|
||||
_admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Enable or disable user registration."""
|
||||
setting = await admin_service.toggle_registration(db, body.enabled)
|
||||
return APIEnvelope(
|
||||
status="success",
|
||||
data={"key": setting.key, "value": setting.value},
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# System settings
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/admin/settings", response_model=APIEnvelope)
|
||||
async def list_settings(
|
||||
_admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""List all system settings."""
|
||||
settings_list = await admin_service.list_settings(db)
|
||||
return APIEnvelope(
|
||||
status="success",
|
||||
data=[
|
||||
{"key": s.key, "value": s.value, "updated_at": s.updated_at.isoformat() if s.updated_at else None}
|
||||
for s in settings_list
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@router.put("/admin/settings/{key}", response_model=APIEnvelope)
|
||||
async def update_setting(
|
||||
key: str,
|
||||
body: SystemSettingUpdate,
|
||||
_admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Create or update a system setting."""
|
||||
setting = await admin_service.update_setting(db, key, body.value)
|
||||
return APIEnvelope(
|
||||
status="success",
|
||||
data={"key": setting.key, "value": setting.value, "updated_at": setting.updated_at.isoformat() if setting.updated_at else None},
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Data cleanup
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.post("/admin/data/cleanup", response_model=APIEnvelope)
|
||||
async def cleanup_data(
|
||||
body: DataCleanupRequest,
|
||||
_admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Delete OHLCV, sentiment, and fundamental data older than N days."""
|
||||
counts = await admin_service.cleanup_data(db, body.older_than_days)
|
||||
return APIEnvelope(status="success", data=counts)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Job control
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@router.get("/admin/jobs", response_model=APIEnvelope)
|
||||
async def list_jobs(
|
||||
_admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""List all scheduled jobs with their current status."""
|
||||
jobs = await admin_service.list_jobs(db)
|
||||
return APIEnvelope(status="success", data=jobs)
|
||||
|
||||
|
||||
@router.post("/admin/jobs/{job_name}/trigger", response_model=APIEnvelope)
|
||||
async def trigger_job(
|
||||
job_name: str,
|
||||
_admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Trigger a manual job run (placeholder)."""
|
||||
result = await admin_service.trigger_job(db, job_name)
|
||||
return APIEnvelope(status="success", data=result)
|
||||
|
||||
|
||||
@router.put("/admin/jobs/{job_name}/toggle", response_model=APIEnvelope)
|
||||
async def toggle_job(
|
||||
job_name: str,
|
||||
body: JobToggle,
|
||||
_admin: User = Depends(require_admin),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Enable or disable a scheduled job (placeholder)."""
|
||||
setting = await admin_service.toggle_job(db, job_name, body.enabled)
|
||||
return APIEnvelope(
|
||||
status="success",
|
||||
data={"key": setting.key, "value": setting.value},
|
||||
)
|
||||
34
app/routers/auth.py
Normal file
34
app/routers/auth.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""Auth router: registration and login endpoints."""
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import get_db
|
||||
from app.schemas.auth import LoginRequest, RegisterRequest, TokenResponse
|
||||
from app.schemas.common import APIEnvelope
|
||||
from app.services import auth_service
|
||||
|
||||
router = APIRouter(tags=["auth"])
|
||||
|
||||
|
||||
@router.post("/auth/register", response_model=APIEnvelope)
|
||||
async def register(body: RegisterRequest, db: AsyncSession = Depends(get_db)):
|
||||
"""Public endpoint — register a new user."""
|
||||
user = await auth_service.register(db, body.username, body.password)
|
||||
return APIEnvelope(
|
||||
status="success",
|
||||
data={
|
||||
"id": user.id,
|
||||
"username": user.username,
|
||||
"role": user.role,
|
||||
"has_access": user.has_access,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@router.post("/auth/login", response_model=APIEnvelope)
|
||||
async def login(body: LoginRequest, db: AsyncSession = Depends(get_db)):
|
||||
"""Public endpoint — login and receive a JWT."""
|
||||
token = await auth_service.login(db, body.username, body.password)
|
||||
token_resp = TokenResponse(access_token=token)
|
||||
return APIEnvelope(status="success", data=token_resp.model_dump())
|
||||
35
app/routers/fundamentals.py
Normal file
35
app/routers/fundamentals.py
Normal file
@@ -0,0 +1,35 @@
|
||||
"""Fundamentals router — fundamental data endpoints."""
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import get_db, require_access
|
||||
from app.schemas.common import APIEnvelope
|
||||
from app.schemas.fundamental import FundamentalResponse
|
||||
from app.services.fundamental_service import get_fundamental
|
||||
|
||||
router = APIRouter(tags=["fundamentals"])
|
||||
|
||||
|
||||
@router.get("/fundamentals/{symbol}", response_model=APIEnvelope)
|
||||
async def read_fundamentals(
|
||||
symbol: str,
|
||||
_user=Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> APIEnvelope:
|
||||
"""Get latest fundamental data for a symbol."""
|
||||
record = await get_fundamental(db, symbol)
|
||||
|
||||
if record is None:
|
||||
data = FundamentalResponse(symbol=symbol.strip().upper())
|
||||
else:
|
||||
data = FundamentalResponse(
|
||||
symbol=symbol.strip().upper(),
|
||||
pe_ratio=record.pe_ratio,
|
||||
revenue_growth=record.revenue_growth,
|
||||
earnings_surprise=record.earnings_surprise,
|
||||
market_cap=record.market_cap,
|
||||
fetched_at=record.fetched_at,
|
||||
)
|
||||
|
||||
return APIEnvelope(status="success", data=data.model_dump())
|
||||
36
app/routers/health.py
Normal file
36
app/routers/health.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""Health check endpoint — unauthenticated."""
|
||||
|
||||
import logging
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from fastapi.responses import JSONResponse
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import get_db
|
||||
from app.schemas.common import APIEnvelope
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(tags=["health"])
|
||||
|
||||
|
||||
@router.get("/health")
|
||||
async def health_check(db: AsyncSession = Depends(get_db)) -> APIEnvelope:
|
||||
"""Return service health including database connectivity."""
|
||||
try:
|
||||
await db.execute(text("SELECT 1"))
|
||||
return APIEnvelope(
|
||||
status="success",
|
||||
data={"status": "healthy", "database": "connected"},
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("Health check: database unreachable")
|
||||
return JSONResponse(
|
||||
status_code=503,
|
||||
content={
|
||||
"status": "error",
|
||||
"data": None,
|
||||
"error": "Database unreachable",
|
||||
},
|
||||
)
|
||||
64
app/routers/indicators.py
Normal file
64
app/routers/indicators.py
Normal file
@@ -0,0 +1,64 @@
|
||||
"""Indicators router — technical analysis endpoints."""
|
||||
|
||||
from datetime import date
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import get_db, require_access
|
||||
from app.schemas.common import APIEnvelope
|
||||
from app.schemas.indicator import (
|
||||
EMACrossResponse,
|
||||
EMACrossResult,
|
||||
IndicatorResponse,
|
||||
IndicatorResult,
|
||||
)
|
||||
from app.services.indicator_service import get_ema_cross, get_indicator
|
||||
|
||||
router = APIRouter(tags=["indicators"])
|
||||
|
||||
|
||||
# NOTE: ema-cross must be registered BEFORE {indicator_type} to avoid
|
||||
# FastAPI matching "ema-cross" as an indicator_type path parameter.
|
||||
|
||||
|
||||
@router.get("/indicators/{symbol}/ema-cross", response_model=APIEnvelope)
|
||||
async def read_ema_cross(
|
||||
symbol: str,
|
||||
start_date: date | None = Query(None),
|
||||
end_date: date | None = Query(None),
|
||||
short_period: int = Query(20),
|
||||
long_period: int = Query(50),
|
||||
_user=Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> APIEnvelope:
|
||||
"""Compute EMA cross signal for a symbol."""
|
||||
result = await get_ema_cross(
|
||||
db, symbol, start_date, end_date, short_period, long_period
|
||||
)
|
||||
data = EMACrossResponse(
|
||||
symbol=symbol.upper(),
|
||||
ema_cross=EMACrossResult(**result),
|
||||
)
|
||||
return APIEnvelope(status="success", data=data.model_dump())
|
||||
|
||||
|
||||
@router.get("/indicators/{symbol}/{indicator_type}", response_model=APIEnvelope)
|
||||
async def read_indicator(
|
||||
symbol: str,
|
||||
indicator_type: str,
|
||||
start_date: date | None = Query(None),
|
||||
end_date: date | None = Query(None),
|
||||
period: int | None = Query(None),
|
||||
_user=Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> APIEnvelope:
|
||||
"""Compute a technical indicator for a symbol."""
|
||||
result = await get_indicator(
|
||||
db, symbol, indicator_type, start_date, end_date, period
|
||||
)
|
||||
data = IndicatorResponse(
|
||||
symbol=symbol.upper(),
|
||||
indicator=IndicatorResult(**result),
|
||||
)
|
||||
return APIEnvelope(status="success", data=data.model_dump())
|
||||
127
app/routers/ingestion.py
Normal file
127
app/routers/ingestion.py
Normal file
@@ -0,0 +1,127 @@
|
||||
"""Ingestion router: trigger data fetches from the market data provider.
|
||||
|
||||
Provides both a single-source OHLCV endpoint and a comprehensive
|
||||
fetch-all endpoint that collects OHLCV + sentiment + fundamentals
|
||||
in one call with per-source status reporting.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from datetime import date
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import settings
|
||||
from app.dependencies import get_db, require_access
|
||||
from app.exceptions import ProviderError
|
||||
from app.models.user import User
|
||||
from app.providers.alpaca import AlpacaOHLCVProvider
|
||||
from app.providers.fmp import FMPFundamentalProvider
|
||||
from app.providers.gemini_sentiment import GeminiSentimentProvider
|
||||
from app.schemas.common import APIEnvelope
|
||||
from app.services import fundamental_service, ingestion_service, sentiment_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(tags=["ingestion"])
|
||||
|
||||
|
||||
def _get_provider() -> AlpacaOHLCVProvider:
|
||||
"""Build the OHLCV provider from current settings."""
|
||||
if not settings.alpaca_api_key or not settings.alpaca_api_secret:
|
||||
raise ProviderError("Alpaca API credentials not configured")
|
||||
return AlpacaOHLCVProvider(settings.alpaca_api_key, settings.alpaca_api_secret)
|
||||
|
||||
|
||||
@router.post("/ingestion/fetch/{symbol}", response_model=APIEnvelope)
|
||||
async def fetch_symbol(
|
||||
symbol: str,
|
||||
start_date: date | None = Query(None, description="Start date (YYYY-MM-DD)"),
|
||||
end_date: date | None = Query(None, description="End date (YYYY-MM-DD)"),
|
||||
_user: User = Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Fetch all data sources for a ticker: OHLCV, sentiment, and fundamentals.
|
||||
|
||||
Returns a per-source breakdown so the frontend can show exactly what
|
||||
succeeded and what failed.
|
||||
"""
|
||||
symbol_upper = symbol.strip().upper()
|
||||
sources: dict[str, dict] = {}
|
||||
|
||||
# --- OHLCV ---
|
||||
try:
|
||||
provider = _get_provider()
|
||||
result = await ingestion_service.fetch_and_ingest(
|
||||
db, provider, symbol_upper, start_date, end_date
|
||||
)
|
||||
sources["ohlcv"] = {
|
||||
"status": "ok" if result.status in ("complete", "partial") else "error",
|
||||
"records": result.records_ingested,
|
||||
"message": result.message,
|
||||
}
|
||||
except Exception as exc:
|
||||
logger.error("OHLCV fetch failed for %s: %s", symbol_upper, exc)
|
||||
sources["ohlcv"] = {"status": "error", "records": 0, "message": str(exc)}
|
||||
|
||||
# --- Sentiment ---
|
||||
if settings.gemini_api_key:
|
||||
try:
|
||||
sent_provider = GeminiSentimentProvider(
|
||||
settings.gemini_api_key, settings.gemini_model
|
||||
)
|
||||
data = await sent_provider.fetch_sentiment(symbol_upper)
|
||||
await sentiment_service.store_sentiment(
|
||||
db,
|
||||
symbol=symbol_upper,
|
||||
classification=data.classification,
|
||||
confidence=data.confidence,
|
||||
source=data.source,
|
||||
timestamp=data.timestamp,
|
||||
)
|
||||
sources["sentiment"] = {
|
||||
"status": "ok",
|
||||
"classification": data.classification,
|
||||
"confidence": data.confidence,
|
||||
"message": None,
|
||||
}
|
||||
except Exception as exc:
|
||||
logger.error("Sentiment fetch failed for %s: %s", symbol_upper, exc)
|
||||
sources["sentiment"] = {"status": "error", "message": str(exc)}
|
||||
else:
|
||||
sources["sentiment"] = {
|
||||
"status": "skipped",
|
||||
"message": "Gemini API key not configured",
|
||||
}
|
||||
|
||||
# --- Fundamentals ---
|
||||
if settings.fmp_api_key:
|
||||
try:
|
||||
fmp_provider = FMPFundamentalProvider(settings.fmp_api_key)
|
||||
fdata = await fmp_provider.fetch_fundamentals(symbol_upper)
|
||||
await fundamental_service.store_fundamental(
|
||||
db,
|
||||
symbol=symbol_upper,
|
||||
pe_ratio=fdata.pe_ratio,
|
||||
revenue_growth=fdata.revenue_growth,
|
||||
earnings_surprise=fdata.earnings_surprise,
|
||||
market_cap=fdata.market_cap,
|
||||
)
|
||||
sources["fundamentals"] = {"status": "ok", "message": None}
|
||||
except Exception as exc:
|
||||
logger.error("Fundamentals fetch failed for %s: %s", symbol_upper, exc)
|
||||
sources["fundamentals"] = {"status": "error", "message": str(exc)}
|
||||
else:
|
||||
sources["fundamentals"] = {
|
||||
"status": "skipped",
|
||||
"message": "FMP API key not configured",
|
||||
}
|
||||
|
||||
# Always return success — per-source breakdown tells the full story
|
||||
return APIEnvelope(
|
||||
status="success",
|
||||
data={"symbol": symbol_upper, "sources": sources},
|
||||
error=None,
|
||||
)
|
||||
56
app/routers/ohlcv.py
Normal file
56
app/routers/ohlcv.py
Normal file
@@ -0,0 +1,56 @@
|
||||
"""OHLCV router: endpoints for storing and querying price data."""
|
||||
|
||||
from datetime import date
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import get_db, require_access
|
||||
from app.models.user import User
|
||||
from app.schemas.common import APIEnvelope
|
||||
from app.schemas.ohlcv import OHLCVCreate, OHLCVResponse
|
||||
from app.services import price_service
|
||||
|
||||
router = APIRouter(tags=["ohlcv"])
|
||||
|
||||
|
||||
@router.post("/ohlcv", response_model=APIEnvelope)
|
||||
async def create_ohlcv(
|
||||
body: OHLCVCreate,
|
||||
_user: User = Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Upsert an OHLCV record for a ticker and date."""
|
||||
record = await price_service.upsert_ohlcv(
|
||||
db,
|
||||
symbol=body.symbol,
|
||||
record_date=body.date,
|
||||
open_=body.open,
|
||||
high=body.high,
|
||||
low=body.low,
|
||||
close=body.close,
|
||||
volume=body.volume,
|
||||
)
|
||||
return APIEnvelope(
|
||||
status="success",
|
||||
data=OHLCVResponse.model_validate(record).model_dump(mode="json"),
|
||||
)
|
||||
|
||||
|
||||
@router.get("/ohlcv/{symbol}", response_model=APIEnvelope)
|
||||
async def get_ohlcv(
|
||||
symbol: str,
|
||||
start_date: date | None = Query(None, description="Start date (YYYY-MM-DD)"),
|
||||
end_date: date | None = Query(None, description="End date (YYYY-MM-DD)"),
|
||||
_user: User = Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Query OHLCV records for a ticker, optionally filtered by date range."""
|
||||
records = await price_service.query_ohlcv(db, symbol, start_date, end_date)
|
||||
return APIEnvelope(
|
||||
status="success",
|
||||
data=[
|
||||
OHLCVResponse.model_validate(r).model_dump(mode="json")
|
||||
for r in records
|
||||
],
|
||||
)
|
||||
75
app/routers/scores.py
Normal file
75
app/routers/scores.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Scores router — scoring engine endpoints."""
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import get_db, require_access
|
||||
from app.schemas.common import APIEnvelope
|
||||
from app.schemas.score import (
|
||||
DimensionScoreResponse,
|
||||
RankingEntry,
|
||||
RankingResponse,
|
||||
ScoreResponse,
|
||||
WeightUpdateRequest,
|
||||
)
|
||||
from app.services.scoring_service import get_rankings, get_score, update_weights
|
||||
|
||||
router = APIRouter(tags=["scores"])
|
||||
|
||||
|
||||
@router.get("/scores/{symbol}", response_model=APIEnvelope)
|
||||
async def read_score(
|
||||
symbol: str,
|
||||
_user=Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> APIEnvelope:
|
||||
"""Get composite + dimension scores for a symbol. Recomputes stale scores."""
|
||||
result = await get_score(db, symbol)
|
||||
|
||||
data = ScoreResponse(
|
||||
symbol=result["symbol"],
|
||||
composite_score=result["composite_score"],
|
||||
composite_stale=result["composite_stale"],
|
||||
weights=result["weights"],
|
||||
dimensions=[
|
||||
DimensionScoreResponse(**d) for d in result["dimensions"]
|
||||
],
|
||||
missing_dimensions=result["missing_dimensions"],
|
||||
computed_at=result["computed_at"],
|
||||
)
|
||||
return APIEnvelope(status="success", data=data.model_dump(mode="json"))
|
||||
|
||||
|
||||
@router.get("/rankings", response_model=APIEnvelope)
|
||||
async def read_rankings(
|
||||
_user=Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> APIEnvelope:
|
||||
"""Get all tickers ranked by composite score descending."""
|
||||
result = await get_rankings(db)
|
||||
|
||||
data = RankingResponse(
|
||||
rankings=[
|
||||
RankingEntry(
|
||||
symbol=r["symbol"],
|
||||
composite_score=r["composite_score"],
|
||||
dimensions=[
|
||||
DimensionScoreResponse(**d) for d in r["dimensions"]
|
||||
],
|
||||
)
|
||||
for r in result["rankings"]
|
||||
],
|
||||
weights=result["weights"],
|
||||
)
|
||||
return APIEnvelope(status="success", data=data.model_dump(mode="json"))
|
||||
|
||||
|
||||
@router.put("/scores/weights", response_model=APIEnvelope)
|
||||
async def update_score_weights(
|
||||
body: WeightUpdateRequest,
|
||||
_user=Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> APIEnvelope:
|
||||
"""Update dimension weights and recompute all composite scores."""
|
||||
new_weights = await update_weights(db, body.weights)
|
||||
return APIEnvelope(status="success", data={"weights": new_weights})
|
||||
46
app/routers/sentiment.py
Normal file
46
app/routers/sentiment.py
Normal file
@@ -0,0 +1,46 @@
|
||||
"""Sentiment router — sentiment data endpoints."""
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import get_db, require_access
|
||||
from app.schemas.common import APIEnvelope
|
||||
from app.schemas.sentiment import SentimentResponse, SentimentScoreResult
|
||||
from app.services.sentiment_service import (
|
||||
compute_sentiment_dimension_score,
|
||||
get_sentiment_scores,
|
||||
)
|
||||
|
||||
router = APIRouter(tags=["sentiment"])
|
||||
|
||||
|
||||
@router.get("/sentiment/{symbol}", response_model=APIEnvelope)
|
||||
async def read_sentiment(
|
||||
symbol: str,
|
||||
lookback_hours: float = Query(24, gt=0, description="Lookback window in hours"),
|
||||
_user=Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> APIEnvelope:
|
||||
"""Get recent sentiment scores and computed dimension score for a symbol."""
|
||||
scores = await get_sentiment_scores(db, symbol, lookback_hours)
|
||||
dimension_score = await compute_sentiment_dimension_score(
|
||||
db, symbol, lookback_hours
|
||||
)
|
||||
|
||||
data = SentimentResponse(
|
||||
symbol=symbol.strip().upper(),
|
||||
scores=[
|
||||
SentimentScoreResult(
|
||||
id=s.id,
|
||||
classification=s.classification,
|
||||
confidence=s.confidence,
|
||||
source=s.source,
|
||||
timestamp=s.timestamp,
|
||||
)
|
||||
for s in scores
|
||||
],
|
||||
count=len(scores),
|
||||
dimension_score=round(dimension_score, 2) if dimension_score is not None else None,
|
||||
lookback_hours=lookback_hours,
|
||||
)
|
||||
return APIEnvelope(status="success", data=data.model_dump())
|
||||
38
app/routers/sr_levels.py
Normal file
38
app/routers/sr_levels.py
Normal file
@@ -0,0 +1,38 @@
|
||||
"""S/R Levels router — support/resistance detection endpoints."""
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import get_db, require_access
|
||||
from app.schemas.common import APIEnvelope
|
||||
from app.schemas.sr_level import SRLevelResponse, SRLevelResult
|
||||
from app.services.sr_service import get_sr_levels
|
||||
|
||||
router = APIRouter(tags=["sr-levels"])
|
||||
|
||||
|
||||
@router.get("/sr-levels/{symbol}", response_model=APIEnvelope)
|
||||
async def read_sr_levels(
|
||||
symbol: str,
|
||||
tolerance: float = Query(0.005, ge=0, le=0.1, description="Merge tolerance (default 0.5%)"),
|
||||
_user=Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> APIEnvelope:
|
||||
"""Get support/resistance levels for a symbol, sorted by strength descending."""
|
||||
levels = await get_sr_levels(db, symbol, tolerance)
|
||||
data = SRLevelResponse(
|
||||
symbol=symbol.upper(),
|
||||
levels=[
|
||||
SRLevelResult(
|
||||
id=lvl.id,
|
||||
price_level=lvl.price_level,
|
||||
type=lvl.type,
|
||||
strength=lvl.strength,
|
||||
detection_method=lvl.detection_method,
|
||||
created_at=lvl.created_at,
|
||||
)
|
||||
for lvl in levels
|
||||
],
|
||||
count=len(levels),
|
||||
)
|
||||
return APIEnvelope(status="success", data=data.model_dump())
|
||||
53
app/routers/tickers.py
Normal file
53
app/routers/tickers.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""Tickers router: CRUD endpoints for the Ticker Registry."""
|
||||
|
||||
from fastapi import APIRouter, Depends
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import get_db, require_access
|
||||
from app.models.user import User
|
||||
from app.schemas.common import APIEnvelope
|
||||
from app.schemas.ticker import TickerCreate, TickerResponse
|
||||
from app.services import ticker_service
|
||||
|
||||
router = APIRouter(tags=["tickers"])
|
||||
|
||||
|
||||
@router.post("/tickers", response_model=APIEnvelope)
|
||||
async def create_ticker(
|
||||
body: TickerCreate,
|
||||
_user: User = Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Add a new ticker to the registry."""
|
||||
ticker = await ticker_service.add_ticker(db, body.symbol)
|
||||
return APIEnvelope(
|
||||
status="success",
|
||||
data=TickerResponse.model_validate(ticker).model_dump(mode="json"),
|
||||
)
|
||||
|
||||
|
||||
@router.get("/tickers", response_model=APIEnvelope)
|
||||
async def list_tickers(
|
||||
_user: User = Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""List all tracked tickers sorted alphabetically."""
|
||||
tickers = await ticker_service.list_tickers(db)
|
||||
return APIEnvelope(
|
||||
status="success",
|
||||
data=[
|
||||
TickerResponse.model_validate(t).model_dump(mode="json")
|
||||
for t in tickers
|
||||
],
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/tickers/{symbol}", response_model=APIEnvelope)
|
||||
async def delete_ticker(
|
||||
symbol: str,
|
||||
_user: User = Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
):
|
||||
"""Delete a ticker and all associated data."""
|
||||
await ticker_service.delete_ticker(db, symbol)
|
||||
return APIEnvelope(status="success", data=None)
|
||||
28
app/routers/trades.py
Normal file
28
app/routers/trades.py
Normal file
@@ -0,0 +1,28 @@
|
||||
"""Trades router — R:R scanner trade setup endpoints."""
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import get_db, require_access
|
||||
from app.schemas.common import APIEnvelope
|
||||
from app.schemas.trade_setup import TradeSetupResponse
|
||||
from app.services.rr_scanner_service import get_trade_setups
|
||||
|
||||
router = APIRouter(tags=["trades"])
|
||||
|
||||
|
||||
@router.get("/trades", response_model=APIEnvelope)
|
||||
async def list_trade_setups(
|
||||
direction: str | None = Query(
|
||||
None, description="Filter by direction: long or short"
|
||||
),
|
||||
_user=Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> APIEnvelope:
|
||||
"""Get all trade setups sorted by R:R desc, secondary composite desc.
|
||||
|
||||
Optional direction filter (long/short).
|
||||
"""
|
||||
rows = await get_trade_setups(db, direction=direction)
|
||||
data = [TradeSetupResponse(**r).model_dump(mode="json") for r in rows]
|
||||
return APIEnvelope(status="success", data=data)
|
||||
59
app/routers/watchlist.py
Normal file
59
app/routers/watchlist.py
Normal file
@@ -0,0 +1,59 @@
|
||||
"""Watchlist router — manage user's curated watchlist."""
|
||||
|
||||
from fastapi import APIRouter, Depends, Query
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.dependencies import get_db, require_access
|
||||
from app.models.user import User
|
||||
from app.schemas.common import APIEnvelope
|
||||
from app.schemas.watchlist import WatchlistEntryResponse
|
||||
from app.services.watchlist_service import (
|
||||
add_manual_entry,
|
||||
get_watchlist,
|
||||
remove_entry,
|
||||
)
|
||||
|
||||
router = APIRouter(tags=["watchlist"])
|
||||
|
||||
|
||||
@router.get("/watchlist", response_model=APIEnvelope)
|
||||
async def list_watchlist(
|
||||
sort_by: str = Query(
|
||||
"composite",
|
||||
description=(
|
||||
"Sort by: composite, rr, or a dimension name "
|
||||
"(technical, sr_quality, sentiment, fundamental, momentum)"
|
||||
),
|
||||
),
|
||||
user: User = Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> APIEnvelope:
|
||||
"""Get current user's watchlist with enriched data."""
|
||||
rows = await get_watchlist(db, user.id, sort_by=sort_by)
|
||||
data = [WatchlistEntryResponse(**r).model_dump(mode="json") for r in rows]
|
||||
return APIEnvelope(status="success", data=data)
|
||||
|
||||
|
||||
@router.post("/watchlist/{symbol}", response_model=APIEnvelope)
|
||||
async def add_to_watchlist(
|
||||
symbol: str,
|
||||
user: User = Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> APIEnvelope:
|
||||
"""Add a manual entry to the watchlist."""
|
||||
entry = await add_manual_entry(db, user.id, symbol)
|
||||
return APIEnvelope(
|
||||
status="success",
|
||||
data={"symbol": symbol.strip().upper(), "entry_type": entry.entry_type},
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/watchlist/{symbol}", response_model=APIEnvelope)
|
||||
async def remove_from_watchlist(
|
||||
symbol: str,
|
||||
user: User = Depends(require_access),
|
||||
db: AsyncSession = Depends(get_db),
|
||||
) -> APIEnvelope:
|
||||
"""Remove an entry from the watchlist."""
|
||||
await remove_entry(db, user.id, symbol)
|
||||
return APIEnvelope(status="success", data=None)
|
||||
437
app/scheduler.py
Normal file
437
app/scheduler.py
Normal file
@@ -0,0 +1,437 @@
|
||||
"""APScheduler job definitions and FastAPI lifespan integration.
|
||||
|
||||
Defines four scheduled jobs:
|
||||
- Data Collector (OHLCV fetch for all tickers)
|
||||
- Sentiment Collector (sentiment for all tickers)
|
||||
- Fundamental Collector (fundamentals for all tickers)
|
||||
- R:R Scanner (trade setup scan for all tickers)
|
||||
|
||||
Each job processes tickers independently, logs errors as structured JSON,
|
||||
handles rate limits by recording the last successful ticker, and checks
|
||||
SystemSetting for enabled/disabled state.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import date, timedelta
|
||||
|
||||
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import settings
|
||||
from app.database import async_session_factory
|
||||
from app.models.settings import SystemSetting
|
||||
from app.models.ticker import Ticker
|
||||
from app.providers.alpaca import AlpacaOHLCVProvider
|
||||
from app.providers.fmp import FMPFundamentalProvider
|
||||
from app.providers.gemini_sentiment import GeminiSentimentProvider
|
||||
from app.services import fundamental_service, ingestion_service, sentiment_service
|
||||
from app.services.rr_scanner_service import scan_all_tickers
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Module-level scheduler instance
|
||||
scheduler = AsyncIOScheduler()
|
||||
|
||||
# Track last successful ticker per job for rate-limit resume
|
||||
_last_successful: dict[str, str | None] = {
|
||||
"data_collector": None,
|
||||
"sentiment_collector": None,
|
||||
"fundamental_collector": None,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def _log_job_error(job_name: str, ticker: str, error: Exception) -> None:
|
||||
"""Log a job error as structured JSON."""
|
||||
logger.error(
|
||||
json.dumps({
|
||||
"event": "job_error",
|
||||
"job": job_name,
|
||||
"ticker": ticker,
|
||||
"error_type": type(error).__name__,
|
||||
"message": str(error),
|
||||
})
|
||||
)
|
||||
|
||||
|
||||
async def _is_job_enabled(db: AsyncSession, job_name: str) -> bool:
|
||||
"""Check SystemSetting for job enabled state. Defaults to True."""
|
||||
key = f"job_{job_name}_enabled"
|
||||
result = await db.execute(
|
||||
select(SystemSetting).where(SystemSetting.key == key)
|
||||
)
|
||||
setting = result.scalar_one_or_none()
|
||||
if setting is None:
|
||||
return True
|
||||
return setting.value.lower() == "true"
|
||||
|
||||
|
||||
async def _get_all_tickers(db: AsyncSession) -> list[str]:
|
||||
"""Return all tracked ticker symbols sorted alphabetically."""
|
||||
result = await db.execute(select(Ticker.symbol).order_by(Ticker.symbol))
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
def _resume_tickers(symbols: list[str], job_name: str) -> list[str]:
|
||||
"""Reorder tickers to resume after the last successful one (rate-limit resume).
|
||||
|
||||
If a previous run was rate-limited, start from the ticker after the last
|
||||
successful one. Otherwise return the full list.
|
||||
"""
|
||||
last = _last_successful.get(job_name)
|
||||
if last is None or last not in symbols:
|
||||
return symbols
|
||||
idx = symbols.index(last)
|
||||
# Start from the next ticker, then wrap around
|
||||
return symbols[idx + 1:] + symbols[:idx + 1]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Job: Data Collector (OHLCV)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
async def collect_ohlcv() -> None:
|
||||
"""Fetch latest daily OHLCV for all tracked tickers.
|
||||
|
||||
Uses AlpacaOHLCVProvider. Processes each ticker independently.
|
||||
On rate limit, records last successful ticker for resume.
|
||||
"""
|
||||
job_name = "data_collector"
|
||||
logger.info(json.dumps({"event": "job_start", "job": job_name}))
|
||||
|
||||
async with async_session_factory() as db:
|
||||
if not await _is_job_enabled(db, job_name):
|
||||
logger.info(json.dumps({"event": "job_skipped", "job": job_name, "reason": "disabled"}))
|
||||
return
|
||||
|
||||
symbols = await _get_all_tickers(db)
|
||||
if not symbols:
|
||||
logger.info(json.dumps({"event": "job_complete", "job": job_name, "tickers": 0}))
|
||||
return
|
||||
|
||||
# Reorder for rate-limit resume
|
||||
symbols = _resume_tickers(symbols, job_name)
|
||||
|
||||
# Build provider (skip if keys not configured)
|
||||
if not settings.alpaca_api_key or not settings.alpaca_api_secret:
|
||||
logger.warning(json.dumps({"event": "job_skipped", "job": job_name, "reason": "alpaca keys not configured"}))
|
||||
return
|
||||
|
||||
try:
|
||||
provider = AlpacaOHLCVProvider(settings.alpaca_api_key, settings.alpaca_api_secret)
|
||||
except Exception as exc:
|
||||
logger.error(json.dumps({"event": "job_error", "job": job_name, "error_type": type(exc).__name__, "message": str(exc)}))
|
||||
return
|
||||
|
||||
end_date = date.today()
|
||||
start_date = end_date - timedelta(days=5) # Fetch last 5 days to catch up
|
||||
processed = 0
|
||||
|
||||
for symbol in symbols:
|
||||
async with async_session_factory() as db:
|
||||
try:
|
||||
result = await ingestion_service.fetch_and_ingest(
|
||||
db, provider, symbol, start_date=start_date, end_date=end_date,
|
||||
)
|
||||
_last_successful[job_name] = symbol
|
||||
processed += 1
|
||||
logger.info(json.dumps({
|
||||
"event": "ticker_collected",
|
||||
"job": job_name,
|
||||
"ticker": symbol,
|
||||
"status": result.status,
|
||||
"records": result.records_ingested,
|
||||
}))
|
||||
if result.status == "partial":
|
||||
# Rate limited — stop and resume next run
|
||||
logger.warning(json.dumps({
|
||||
"event": "rate_limited",
|
||||
"job": job_name,
|
||||
"ticker": symbol,
|
||||
"processed": processed,
|
||||
}))
|
||||
return
|
||||
except Exception as exc:
|
||||
_log_job_error(job_name, symbol, exc)
|
||||
|
||||
# Reset resume pointer on full completion
|
||||
_last_successful[job_name] = None
|
||||
logger.info(json.dumps({"event": "job_complete", "job": job_name, "tickers": processed}))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Job: Sentiment Collector
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
async def collect_sentiment() -> None:
|
||||
"""Fetch sentiment for all tracked tickers via Gemini.
|
||||
|
||||
Processes each ticker independently. On rate limit, records last
|
||||
successful ticker for resume.
|
||||
"""
|
||||
job_name = "sentiment_collector"
|
||||
logger.info(json.dumps({"event": "job_start", "job": job_name}))
|
||||
|
||||
async with async_session_factory() as db:
|
||||
if not await _is_job_enabled(db, job_name):
|
||||
logger.info(json.dumps({"event": "job_skipped", "job": job_name, "reason": "disabled"}))
|
||||
return
|
||||
|
||||
symbols = await _get_all_tickers(db)
|
||||
if not symbols:
|
||||
logger.info(json.dumps({"event": "job_complete", "job": job_name, "tickers": 0}))
|
||||
return
|
||||
|
||||
symbols = _resume_tickers(symbols, job_name)
|
||||
|
||||
if not settings.gemini_api_key:
|
||||
logger.warning(json.dumps({"event": "job_skipped", "job": job_name, "reason": "gemini key not configured"}))
|
||||
return
|
||||
|
||||
try:
|
||||
provider = GeminiSentimentProvider(settings.gemini_api_key, settings.gemini_model)
|
||||
except Exception as exc:
|
||||
logger.error(json.dumps({"event": "job_error", "job": job_name, "error_type": type(exc).__name__, "message": str(exc)}))
|
||||
return
|
||||
|
||||
processed = 0
|
||||
|
||||
for symbol in symbols:
|
||||
async with async_session_factory() as db:
|
||||
try:
|
||||
data = await provider.fetch_sentiment(symbol)
|
||||
await sentiment_service.store_sentiment(
|
||||
db,
|
||||
symbol=symbol,
|
||||
classification=data.classification,
|
||||
confidence=data.confidence,
|
||||
source=data.source,
|
||||
timestamp=data.timestamp,
|
||||
)
|
||||
_last_successful[job_name] = symbol
|
||||
processed += 1
|
||||
logger.info(json.dumps({
|
||||
"event": "ticker_collected",
|
||||
"job": job_name,
|
||||
"ticker": symbol,
|
||||
"classification": data.classification,
|
||||
"confidence": data.confidence,
|
||||
}))
|
||||
except Exception as exc:
|
||||
msg = str(exc).lower()
|
||||
if "rate" in msg or "quota" in msg or "429" in msg:
|
||||
logger.warning(json.dumps({
|
||||
"event": "rate_limited",
|
||||
"job": job_name,
|
||||
"ticker": symbol,
|
||||
"processed": processed,
|
||||
}))
|
||||
return
|
||||
_log_job_error(job_name, symbol, exc)
|
||||
|
||||
_last_successful[job_name] = None
|
||||
logger.info(json.dumps({"event": "job_complete", "job": job_name, "tickers": processed}))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Job: Fundamental Collector
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
async def collect_fundamentals() -> None:
|
||||
"""Fetch fundamentals for all tracked tickers via FMP.
|
||||
|
||||
Processes each ticker independently. On rate limit, records last
|
||||
successful ticker for resume.
|
||||
"""
|
||||
job_name = "fundamental_collector"
|
||||
logger.info(json.dumps({"event": "job_start", "job": job_name}))
|
||||
|
||||
async with async_session_factory() as db:
|
||||
if not await _is_job_enabled(db, job_name):
|
||||
logger.info(json.dumps({"event": "job_skipped", "job": job_name, "reason": "disabled"}))
|
||||
return
|
||||
|
||||
symbols = await _get_all_tickers(db)
|
||||
if not symbols:
|
||||
logger.info(json.dumps({"event": "job_complete", "job": job_name, "tickers": 0}))
|
||||
return
|
||||
|
||||
symbols = _resume_tickers(symbols, job_name)
|
||||
|
||||
if not settings.fmp_api_key:
|
||||
logger.warning(json.dumps({"event": "job_skipped", "job": job_name, "reason": "fmp key not configured"}))
|
||||
return
|
||||
|
||||
try:
|
||||
provider = FMPFundamentalProvider(settings.fmp_api_key)
|
||||
except Exception as exc:
|
||||
logger.error(json.dumps({"event": "job_error", "job": job_name, "error_type": type(exc).__name__, "message": str(exc)}))
|
||||
return
|
||||
|
||||
processed = 0
|
||||
|
||||
for symbol in symbols:
|
||||
async with async_session_factory() as db:
|
||||
try:
|
||||
data = await provider.fetch_fundamentals(symbol)
|
||||
await fundamental_service.store_fundamental(
|
||||
db,
|
||||
symbol=symbol,
|
||||
pe_ratio=data.pe_ratio,
|
||||
revenue_growth=data.revenue_growth,
|
||||
earnings_surprise=data.earnings_surprise,
|
||||
market_cap=data.market_cap,
|
||||
)
|
||||
_last_successful[job_name] = symbol
|
||||
processed += 1
|
||||
logger.info(json.dumps({
|
||||
"event": "ticker_collected",
|
||||
"job": job_name,
|
||||
"ticker": symbol,
|
||||
}))
|
||||
except Exception as exc:
|
||||
msg = str(exc).lower()
|
||||
if "rate" in msg or "429" in msg:
|
||||
logger.warning(json.dumps({
|
||||
"event": "rate_limited",
|
||||
"job": job_name,
|
||||
"ticker": symbol,
|
||||
"processed": processed,
|
||||
}))
|
||||
return
|
||||
_log_job_error(job_name, symbol, exc)
|
||||
|
||||
_last_successful[job_name] = None
|
||||
logger.info(json.dumps({"event": "job_complete", "job": job_name, "tickers": processed}))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Job: R:R Scanner
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
async def scan_rr() -> None:
|
||||
"""Scan all tickers for trade setups meeting the R:R threshold.
|
||||
|
||||
Uses rr_scanner_service.scan_all_tickers which already handles
|
||||
per-ticker error isolation internally.
|
||||
"""
|
||||
job_name = "rr_scanner"
|
||||
logger.info(json.dumps({"event": "job_start", "job": job_name}))
|
||||
|
||||
async with async_session_factory() as db:
|
||||
if not await _is_job_enabled(db, job_name):
|
||||
logger.info(json.dumps({"event": "job_skipped", "job": job_name, "reason": "disabled"}))
|
||||
return
|
||||
|
||||
try:
|
||||
setups = await scan_all_tickers(
|
||||
db, rr_threshold=settings.default_rr_threshold,
|
||||
)
|
||||
logger.info(json.dumps({
|
||||
"event": "job_complete",
|
||||
"job": job_name,
|
||||
"setups_found": len(setups),
|
||||
}))
|
||||
except Exception as exc:
|
||||
logger.error(json.dumps({
|
||||
"event": "job_error",
|
||||
"job": job_name,
|
||||
"error_type": type(exc).__name__,
|
||||
"message": str(exc),
|
||||
}))
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Frequency helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_FREQUENCY_MAP: dict[str, dict[str, int]] = {
|
||||
"hourly": {"hours": 1},
|
||||
"daily": {"hours": 24},
|
||||
}
|
||||
|
||||
|
||||
def _parse_frequency(freq: str) -> dict[str, int]:
|
||||
"""Convert a frequency string to APScheduler interval kwargs."""
|
||||
return _FREQUENCY_MAP.get(freq.lower(), {"hours": 24})
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Scheduler setup
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def configure_scheduler() -> None:
|
||||
"""Add all jobs to the scheduler with configured intervals.
|
||||
|
||||
Call this once before scheduler.start(). Removes any existing jobs first
|
||||
to ensure idempotency.
|
||||
"""
|
||||
scheduler.remove_all_jobs()
|
||||
|
||||
# Data Collector — configurable frequency (default: hourly)
|
||||
ohlcv_interval = _parse_frequency(settings.data_collector_frequency)
|
||||
scheduler.add_job(
|
||||
collect_ohlcv,
|
||||
"interval",
|
||||
**ohlcv_interval,
|
||||
id="data_collector",
|
||||
name="Data Collector (OHLCV)",
|
||||
replace_existing=True,
|
||||
)
|
||||
|
||||
# Sentiment Collector — default 30 min
|
||||
scheduler.add_job(
|
||||
collect_sentiment,
|
||||
"interval",
|
||||
minutes=settings.sentiment_poll_interval_minutes,
|
||||
id="sentiment_collector",
|
||||
name="Sentiment Collector",
|
||||
replace_existing=True,
|
||||
)
|
||||
|
||||
# Fundamental Collector — configurable frequency (default: daily)
|
||||
fund_interval = _parse_frequency(settings.fundamental_fetch_frequency)
|
||||
scheduler.add_job(
|
||||
collect_fundamentals,
|
||||
"interval",
|
||||
**fund_interval,
|
||||
id="fundamental_collector",
|
||||
name="Fundamental Collector",
|
||||
replace_existing=True,
|
||||
)
|
||||
|
||||
# R:R Scanner — configurable frequency (default: hourly)
|
||||
rr_interval = _parse_frequency(settings.rr_scan_frequency)
|
||||
scheduler.add_job(
|
||||
scan_rr,
|
||||
"interval",
|
||||
**rr_interval,
|
||||
id="rr_scanner",
|
||||
name="R:R Scanner",
|
||||
replace_existing=True,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
json.dumps({
|
||||
"event": "scheduler_configured",
|
||||
"jobs": {
|
||||
"data_collector": ohlcv_interval,
|
||||
"sentiment_collector": {"minutes": settings.sentiment_poll_interval_minutes},
|
||||
"fundamental_collector": fund_interval,
|
||||
"rr_scanner": rr_interval,
|
||||
},
|
||||
})
|
||||
)
|
||||
1
app/schemas/__init__.py
Normal file
1
app/schemas/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
41
app/schemas/admin.py
Normal file
41
app/schemas/admin.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""Admin request/response schemas."""
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class UserManagement(BaseModel):
|
||||
"""Schema for user access management."""
|
||||
has_access: bool
|
||||
|
||||
|
||||
class PasswordReset(BaseModel):
|
||||
"""Schema for resetting a user's password."""
|
||||
new_password: str = Field(..., min_length=6)
|
||||
|
||||
|
||||
class CreateUserRequest(BaseModel):
|
||||
"""Schema for admin-created user accounts."""
|
||||
username: str = Field(..., min_length=1)
|
||||
password: str = Field(..., min_length=6)
|
||||
role: str = Field(default="user", pattern=r"^(user|admin)$")
|
||||
has_access: bool = False
|
||||
|
||||
|
||||
class RegistrationToggle(BaseModel):
|
||||
"""Schema for toggling registration on/off."""
|
||||
enabled: bool
|
||||
|
||||
|
||||
class SystemSettingUpdate(BaseModel):
|
||||
"""Schema for updating a system setting."""
|
||||
value: str = Field(..., min_length=1)
|
||||
|
||||
|
||||
class DataCleanupRequest(BaseModel):
|
||||
"""Schema for data cleanup — delete records older than N days."""
|
||||
older_than_days: int = Field(..., gt=0)
|
||||
|
||||
|
||||
class JobToggle(BaseModel):
|
||||
"""Schema for enabling/disabling a scheduled job."""
|
||||
enabled: bool
|
||||
18
app/schemas/auth.py
Normal file
18
app/schemas/auth.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Auth request/response schemas."""
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class RegisterRequest(BaseModel):
|
||||
username: str = Field(..., min_length=1)
|
||||
password: str = Field(..., min_length=6)
|
||||
|
||||
|
||||
class LoginRequest(BaseModel):
|
||||
username: str
|
||||
password: str
|
||||
|
||||
|
||||
class TokenResponse(BaseModel):
|
||||
access_token: str
|
||||
token_type: str = "bearer"
|
||||
13
app/schemas/common.py
Normal file
13
app/schemas/common.py
Normal file
@@ -0,0 +1,13 @@
|
||||
"""Shared API schemas used across all endpoints."""
|
||||
|
||||
from typing import Any, Literal
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class APIEnvelope(BaseModel):
|
||||
"""Standard JSON envelope for all API responses."""
|
||||
|
||||
status: Literal["success", "error"]
|
||||
data: Any | None = None
|
||||
error: str | None = None
|
||||
18
app/schemas/fundamental.py
Normal file
18
app/schemas/fundamental.py
Normal file
@@ -0,0 +1,18 @@
|
||||
"""Pydantic schemas for fundamental data endpoints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class FundamentalResponse(BaseModel):
|
||||
"""Envelope-ready fundamental data response."""
|
||||
|
||||
symbol: str
|
||||
pe_ratio: float | None = None
|
||||
revenue_growth: float | None = None
|
||||
earnings_surprise: float | None = None
|
||||
market_cap: float | None = None
|
||||
fetched_at: datetime | None = None
|
||||
49
app/schemas/indicator.py
Normal file
49
app/schemas/indicator.py
Normal file
@@ -0,0 +1,49 @@
|
||||
"""Pydantic schemas for technical indicator endpoints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date
|
||||
from typing import Any, Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class IndicatorRequest(BaseModel):
|
||||
"""Query parameters for indicator computation."""
|
||||
|
||||
start_date: date | None = None
|
||||
end_date: date | None = None
|
||||
period: int | None = None
|
||||
|
||||
|
||||
class IndicatorResult(BaseModel):
|
||||
"""Raw indicator values plus normalized score."""
|
||||
|
||||
indicator_type: str
|
||||
values: dict[str, Any]
|
||||
score: float = Field(ge=0, le=100)
|
||||
bars_used: int
|
||||
|
||||
|
||||
class IndicatorResponse(BaseModel):
|
||||
"""Envelope-ready indicator response."""
|
||||
|
||||
symbol: str
|
||||
indicator: IndicatorResult
|
||||
|
||||
|
||||
class EMACrossResult(BaseModel):
|
||||
"""EMA cross signal details."""
|
||||
|
||||
short_ema: float
|
||||
long_ema: float
|
||||
short_period: int
|
||||
long_period: int
|
||||
signal: Literal["bullish", "bearish", "neutral"]
|
||||
|
||||
|
||||
class EMACrossResponse(BaseModel):
|
||||
"""Envelope-ready EMA cross response."""
|
||||
|
||||
symbol: str
|
||||
ema_cross: EMACrossResult
|
||||
31
app/schemas/ohlcv.py
Normal file
31
app/schemas/ohlcv.py
Normal file
@@ -0,0 +1,31 @@
|
||||
"""OHLCV request/response schemas."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import datetime as _dt
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class OHLCVCreate(BaseModel):
|
||||
symbol: str = Field(..., description="Ticker symbol (e.g. AAPL)")
|
||||
date: _dt.date = Field(..., description="Trading date (YYYY-MM-DD)")
|
||||
open: float = Field(..., ge=0, description="Opening price")
|
||||
high: float = Field(..., ge=0, description="High price")
|
||||
low: float = Field(..., ge=0, description="Low price")
|
||||
close: float = Field(..., ge=0, description="Closing price")
|
||||
volume: int = Field(..., ge=0, description="Trading volume")
|
||||
|
||||
|
||||
class OHLCVResponse(BaseModel):
|
||||
id: int
|
||||
ticker_id: int
|
||||
date: _dt.date
|
||||
open: float
|
||||
high: float
|
||||
low: float
|
||||
close: float
|
||||
volume: int
|
||||
created_at: _dt.datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
52
app/schemas/score.py
Normal file
52
app/schemas/score.py
Normal file
@@ -0,0 +1,52 @@
|
||||
"""Pydantic schemas for scoring endpoints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class DimensionScoreResponse(BaseModel):
|
||||
"""A single dimension score."""
|
||||
|
||||
dimension: str
|
||||
score: float
|
||||
is_stale: bool
|
||||
computed_at: datetime | None = None
|
||||
|
||||
|
||||
class ScoreResponse(BaseModel):
|
||||
"""Full score response for a ticker: composite + all dimensions."""
|
||||
|
||||
symbol: str
|
||||
composite_score: float | None = None
|
||||
composite_stale: bool = False
|
||||
weights: dict[str, float] = {}
|
||||
dimensions: list[DimensionScoreResponse] = []
|
||||
missing_dimensions: list[str] = []
|
||||
computed_at: datetime | None = None
|
||||
|
||||
|
||||
class WeightUpdateRequest(BaseModel):
|
||||
"""Request to update dimension weights."""
|
||||
|
||||
weights: dict[str, float] = Field(
|
||||
...,
|
||||
description="Dimension name → weight mapping. All weights must be positive.",
|
||||
)
|
||||
|
||||
|
||||
class RankingEntry(BaseModel):
|
||||
"""A single entry in the rankings list."""
|
||||
|
||||
symbol: str
|
||||
composite_score: float
|
||||
dimensions: list[DimensionScoreResponse] = []
|
||||
|
||||
|
||||
class RankingResponse(BaseModel):
|
||||
"""Rankings response: tickers sorted by composite score descending."""
|
||||
|
||||
rankings: list[RankingEntry] = []
|
||||
weights: dict[str, float] = {}
|
||||
30
app/schemas/sentiment.py
Normal file
30
app/schemas/sentiment.py
Normal file
@@ -0,0 +1,30 @@
|
||||
"""Pydantic schemas for sentiment endpoints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SentimentScoreResult(BaseModel):
|
||||
"""A single sentiment score record."""
|
||||
|
||||
id: int
|
||||
classification: Literal["bullish", "bearish", "neutral"]
|
||||
confidence: int = Field(ge=0, le=100)
|
||||
source: str
|
||||
timestamp: datetime
|
||||
|
||||
|
||||
class SentimentResponse(BaseModel):
|
||||
"""Envelope-ready sentiment response."""
|
||||
|
||||
symbol: str
|
||||
scores: list[SentimentScoreResult]
|
||||
count: int
|
||||
dimension_score: float | None = Field(
|
||||
None, ge=0, le=100, description="Time-decay weighted sentiment dimension score"
|
||||
)
|
||||
lookback_hours: float
|
||||
27
app/schemas/sr_level.py
Normal file
27
app/schemas/sr_level.py
Normal file
@@ -0,0 +1,27 @@
|
||||
"""Pydantic schemas for S/R level endpoints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SRLevelResult(BaseModel):
|
||||
"""A single support/resistance level."""
|
||||
|
||||
id: int
|
||||
price_level: float
|
||||
type: Literal["support", "resistance"]
|
||||
strength: int = Field(ge=0, le=100)
|
||||
detection_method: Literal["volume_profile", "pivot_point", "merged"]
|
||||
created_at: datetime
|
||||
|
||||
|
||||
class SRLevelResponse(BaseModel):
|
||||
"""Envelope-ready S/R levels response."""
|
||||
|
||||
symbol: str
|
||||
levels: list[SRLevelResult]
|
||||
count: int
|
||||
17
app/schemas/ticker.py
Normal file
17
app/schemas/ticker.py
Normal file
@@ -0,0 +1,17 @@
|
||||
"""Ticker request/response schemas."""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class TickerCreate(BaseModel):
|
||||
symbol: str = Field(..., description="NASDAQ ticker symbol (e.g. AAPL)")
|
||||
|
||||
|
||||
class TickerResponse(BaseModel):
|
||||
id: int
|
||||
symbol: str
|
||||
created_at: datetime
|
||||
|
||||
model_config = {"from_attributes": True}
|
||||
21
app/schemas/trade_setup.py
Normal file
21
app/schemas/trade_setup.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Pydantic schemas for trade setup endpoints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class TradeSetupResponse(BaseModel):
|
||||
"""A single trade setup detected by the R:R scanner."""
|
||||
|
||||
id: int
|
||||
symbol: str
|
||||
direction: str
|
||||
entry_price: float
|
||||
stop_loss: float
|
||||
target: float
|
||||
rr_ratio: float
|
||||
composite_score: float
|
||||
detected_at: datetime
|
||||
36
app/schemas/watchlist.py
Normal file
36
app/schemas/watchlist.py
Normal file
@@ -0,0 +1,36 @@
|
||||
"""Pydantic schemas for watchlist endpoints."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Literal
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
class SRLevelSummary(BaseModel):
|
||||
"""Compact SR level for watchlist entry."""
|
||||
|
||||
price_level: float
|
||||
type: Literal["support", "resistance"]
|
||||
strength: int = Field(ge=0, le=100)
|
||||
|
||||
|
||||
class DimensionScoreSummary(BaseModel):
|
||||
"""Compact dimension score for watchlist entry."""
|
||||
|
||||
dimension: str
|
||||
score: float
|
||||
|
||||
|
||||
class WatchlistEntryResponse(BaseModel):
|
||||
"""A single watchlist entry with enriched data."""
|
||||
|
||||
symbol: str
|
||||
entry_type: Literal["auto", "manual"]
|
||||
composite_score: float | None = None
|
||||
dimensions: list[DimensionScoreSummary] = []
|
||||
rr_ratio: float | None = None
|
||||
rr_direction: str | None = None
|
||||
sr_levels: list[SRLevelSummary] = []
|
||||
added_at: datetime
|
||||
1
app/services/__init__.py
Normal file
1
app/services/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
|
||||
238
app/services/admin_service.py
Normal file
238
app/services/admin_service.py
Normal file
@@ -0,0 +1,238 @@
|
||||
"""Admin service: user management, system settings, data cleanup, job control."""
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from passlib.hash import bcrypt
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.exceptions import DuplicateError, NotFoundError, ValidationError
|
||||
from app.models.fundamental import FundamentalData
|
||||
from app.models.ohlcv import OHLCVRecord
|
||||
from app.models.sentiment import SentimentScore
|
||||
from app.models.settings import SystemSetting
|
||||
from app.models.user import User
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# User management
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def list_users(db: AsyncSession) -> list[User]:
|
||||
"""Return all users ordered by id."""
|
||||
result = await db.execute(select(User).order_by(User.id))
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
async def create_user(
|
||||
db: AsyncSession,
|
||||
username: str,
|
||||
password: str,
|
||||
role: str = "user",
|
||||
has_access: bool = False,
|
||||
) -> User:
|
||||
"""Create a new user account (admin action)."""
|
||||
result = await db.execute(select(User).where(User.username == username))
|
||||
if result.scalar_one_or_none() is not None:
|
||||
raise DuplicateError(f"Username already exists: {username}")
|
||||
|
||||
user = User(
|
||||
username=username,
|
||||
password_hash=bcrypt.hash(password),
|
||||
role=role,
|
||||
has_access=has_access,
|
||||
)
|
||||
db.add(user)
|
||||
await db.commit()
|
||||
await db.refresh(user)
|
||||
return user
|
||||
|
||||
|
||||
async def set_user_access(db: AsyncSession, user_id: int, has_access: bool) -> User:
|
||||
"""Grant or revoke API access for a user."""
|
||||
result = await db.execute(select(User).where(User.id == user_id))
|
||||
user = result.scalar_one_or_none()
|
||||
if user is None:
|
||||
raise NotFoundError(f"User not found: {user_id}")
|
||||
|
||||
user.has_access = has_access
|
||||
await db.commit()
|
||||
await db.refresh(user)
|
||||
return user
|
||||
|
||||
|
||||
async def reset_password(db: AsyncSession, user_id: int, new_password: str) -> User:
|
||||
"""Reset a user's password."""
|
||||
result = await db.execute(select(User).where(User.id == user_id))
|
||||
user = result.scalar_one_or_none()
|
||||
if user is None:
|
||||
raise NotFoundError(f"User not found: {user_id}")
|
||||
|
||||
user.password_hash = bcrypt.hash(new_password)
|
||||
await db.commit()
|
||||
await db.refresh(user)
|
||||
return user
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Registration toggle
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def toggle_registration(db: AsyncSession, enabled: bool) -> SystemSetting:
|
||||
"""Enable or disable user registration via SystemSetting."""
|
||||
result = await db.execute(
|
||||
select(SystemSetting).where(SystemSetting.key == "registration_enabled")
|
||||
)
|
||||
setting = result.scalar_one_or_none()
|
||||
value = str(enabled).lower()
|
||||
|
||||
if setting is None:
|
||||
setting = SystemSetting(key="registration_enabled", value=value)
|
||||
db.add(setting)
|
||||
else:
|
||||
setting.value = value
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(setting)
|
||||
return setting
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# System settings CRUD
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def list_settings(db: AsyncSession) -> list[SystemSetting]:
|
||||
"""Return all system settings."""
|
||||
result = await db.execute(select(SystemSetting).order_by(SystemSetting.key))
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
async def update_setting(db: AsyncSession, key: str, value: str) -> SystemSetting:
|
||||
"""Create or update a system setting."""
|
||||
result = await db.execute(
|
||||
select(SystemSetting).where(SystemSetting.key == key)
|
||||
)
|
||||
setting = result.scalar_one_or_none()
|
||||
|
||||
if setting is None:
|
||||
setting = SystemSetting(key=key, value=value)
|
||||
db.add(setting)
|
||||
else:
|
||||
setting.value = value
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(setting)
|
||||
return setting
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Data cleanup
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def cleanup_data(db: AsyncSession, older_than_days: int) -> dict[str, int]:
|
||||
"""Delete OHLCV, sentiment, and fundamental records older than N days.
|
||||
|
||||
Preserves tickers, users, and latest scores.
|
||||
Returns a dict with counts of deleted records per table.
|
||||
"""
|
||||
cutoff = datetime.now(timezone.utc) - timedelta(days=older_than_days)
|
||||
counts: dict[str, int] = {}
|
||||
|
||||
# OHLCV — date column is a date, compare with cutoff date
|
||||
result = await db.execute(
|
||||
delete(OHLCVRecord).where(OHLCVRecord.date < cutoff.date())
|
||||
)
|
||||
counts["ohlcv"] = result.rowcount # type: ignore[assignment]
|
||||
|
||||
# Sentiment — timestamp is datetime
|
||||
result = await db.execute(
|
||||
delete(SentimentScore).where(SentimentScore.timestamp < cutoff)
|
||||
)
|
||||
counts["sentiment"] = result.rowcount # type: ignore[assignment]
|
||||
|
||||
# Fundamentals — fetched_at is datetime
|
||||
result = await db.execute(
|
||||
delete(FundamentalData).where(FundamentalData.fetched_at < cutoff)
|
||||
)
|
||||
counts["fundamentals"] = result.rowcount # type: ignore[assignment]
|
||||
|
||||
await db.commit()
|
||||
return counts
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Job control (placeholder — scheduler is Task 12.1)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
VALID_JOB_NAMES = {"data_collector", "sentiment_collector", "fundamental_collector", "rr_scanner"}
|
||||
|
||||
JOB_LABELS = {
|
||||
"data_collector": "Data Collector (OHLCV)",
|
||||
"sentiment_collector": "Sentiment Collector",
|
||||
"fundamental_collector": "Fundamental Collector",
|
||||
"rr_scanner": "R:R Scanner",
|
||||
}
|
||||
|
||||
|
||||
async def list_jobs(db: AsyncSession) -> list[dict]:
|
||||
"""Return status of all scheduled jobs."""
|
||||
from app.scheduler import scheduler
|
||||
|
||||
jobs_out = []
|
||||
for name in sorted(VALID_JOB_NAMES):
|
||||
# Check enabled setting
|
||||
key = f"job_{name}_enabled"
|
||||
result = await db.execute(
|
||||
select(SystemSetting).where(SystemSetting.key == key)
|
||||
)
|
||||
setting = result.scalar_one_or_none()
|
||||
enabled = setting.value == "true" if setting else True # default enabled
|
||||
|
||||
# Get scheduler job info
|
||||
job = scheduler.get_job(name)
|
||||
next_run = None
|
||||
if job and job.next_run_time:
|
||||
next_run = job.next_run_time.isoformat()
|
||||
|
||||
jobs_out.append({
|
||||
"name": name,
|
||||
"label": JOB_LABELS.get(name, name),
|
||||
"enabled": enabled,
|
||||
"next_run_at": next_run,
|
||||
"registered": job is not None,
|
||||
})
|
||||
|
||||
return jobs_out
|
||||
|
||||
|
||||
async def trigger_job(db: AsyncSession, job_name: str) -> dict[str, str]:
|
||||
"""Trigger a manual job run via the scheduler.
|
||||
|
||||
Runs the job immediately (in addition to its regular schedule).
|
||||
"""
|
||||
if job_name not in VALID_JOB_NAMES:
|
||||
raise ValidationError(f"Unknown job: {job_name}. Valid jobs: {', '.join(sorted(VALID_JOB_NAMES))}")
|
||||
|
||||
from app.scheduler import scheduler
|
||||
|
||||
job = scheduler.get_job(job_name)
|
||||
if job is None:
|
||||
return {"job": job_name, "status": "not_found", "message": f"Job '{job_name}' is not registered in the scheduler"}
|
||||
|
||||
job.modify(next_run_time=None) # Reset, then trigger immediately
|
||||
from datetime import datetime, timezone
|
||||
job.modify(next_run_time=datetime.now(timezone.utc))
|
||||
|
||||
return {"job": job_name, "status": "triggered", "message": f"Job '{job_name}' triggered for immediate execution"}
|
||||
|
||||
|
||||
async def toggle_job(db: AsyncSession, job_name: str, enabled: bool) -> SystemSetting:
|
||||
"""Enable or disable a scheduled job by storing state in SystemSetting.
|
||||
|
||||
Actual scheduler integration happens in Task 12.1.
|
||||
"""
|
||||
if job_name not in VALID_JOB_NAMES:
|
||||
raise ValidationError(f"Unknown job: {job_name}. Valid jobs: {', '.join(sorted(VALID_JOB_NAMES))}")
|
||||
|
||||
key = f"job_{job_name}_enabled"
|
||||
return await update_setting(db, key, str(enabled).lower())
|
||||
66
app/services/auth_service.py
Normal file
66
app/services/auth_service.py
Normal file
@@ -0,0 +1,66 @@
|
||||
"""Auth service: registration, login, and JWT token generation."""
|
||||
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from jose import jwt
|
||||
from passlib.hash import bcrypt
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.config import settings
|
||||
from app.dependencies import JWT_ALGORITHM
|
||||
from app.exceptions import AuthenticationError, AuthorizationError, DuplicateError
|
||||
from app.models.settings import SystemSetting
|
||||
from app.models.user import User
|
||||
|
||||
|
||||
async def register(db: AsyncSession, username: str, password: str) -> User:
|
||||
"""Register a new user.
|
||||
|
||||
Checks if registration is enabled via SystemSetting, rejects duplicates,
|
||||
and creates a user with role='user' and has_access=False.
|
||||
"""
|
||||
# Check registration toggle
|
||||
result = await db.execute(
|
||||
select(SystemSetting).where(SystemSetting.key == "registration_enabled")
|
||||
)
|
||||
setting = result.scalar_one_or_none()
|
||||
if setting is not None and setting.value.lower() == "false":
|
||||
raise AuthorizationError("Registration is closed")
|
||||
|
||||
# Check duplicate username
|
||||
result = await db.execute(select(User).where(User.username == username))
|
||||
if result.scalar_one_or_none() is not None:
|
||||
raise DuplicateError(f"Username already exists: {username}")
|
||||
|
||||
user = User(
|
||||
username=username,
|
||||
password_hash=bcrypt.hash(password),
|
||||
role="user",
|
||||
has_access=False,
|
||||
)
|
||||
db.add(user)
|
||||
await db.commit()
|
||||
await db.refresh(user)
|
||||
return user
|
||||
|
||||
|
||||
async def login(db: AsyncSession, username: str, password: str) -> str:
|
||||
"""Authenticate user and return a JWT access token.
|
||||
|
||||
Returns the same error message for wrong username or wrong password
|
||||
to avoid leaking which field is incorrect.
|
||||
"""
|
||||
result = await db.execute(select(User).where(User.username == username))
|
||||
user = result.scalar_one_or_none()
|
||||
|
||||
if user is None or not bcrypt.verify(password, user.password_hash):
|
||||
raise AuthenticationError("Invalid credentials")
|
||||
|
||||
payload = {
|
||||
"sub": str(user.id),
|
||||
"role": user.role,
|
||||
"exp": datetime.now(timezone.utc) + timedelta(minutes=settings.jwt_expiry_minutes),
|
||||
}
|
||||
token = jwt.encode(payload, settings.jwt_secret, algorithm=JWT_ALGORITHM)
|
||||
return token
|
||||
101
app/services/fundamental_service.py
Normal file
101
app/services/fundamental_service.py
Normal file
@@ -0,0 +1,101 @@
|
||||
"""Fundamental data service.
|
||||
|
||||
Stores fundamental data (P/E, revenue growth, earnings surprise, market cap)
|
||||
and marks the fundamental dimension score as stale on new data.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.exceptions import NotFoundError
|
||||
from app.models.fundamental import FundamentalData
|
||||
from app.models.score import DimensionScore
|
||||
from app.models.ticker import Ticker
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker:
|
||||
"""Look up a ticker by symbol."""
|
||||
normalised = symbol.strip().upper()
|
||||
result = await db.execute(select(Ticker).where(Ticker.symbol == normalised))
|
||||
ticker = result.scalar_one_or_none()
|
||||
if ticker is None:
|
||||
raise NotFoundError(f"Ticker not found: {normalised}")
|
||||
return ticker
|
||||
|
||||
|
||||
async def store_fundamental(
|
||||
db: AsyncSession,
|
||||
symbol: str,
|
||||
pe_ratio: float | None = None,
|
||||
revenue_growth: float | None = None,
|
||||
earnings_surprise: float | None = None,
|
||||
market_cap: float | None = None,
|
||||
) -> FundamentalData:
|
||||
"""Store or update fundamental data for a ticker.
|
||||
|
||||
Keeps a single latest snapshot per ticker. On new data, marks the
|
||||
fundamental dimension score as stale (if one exists).
|
||||
"""
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
|
||||
# Check for existing record
|
||||
result = await db.execute(
|
||||
select(FundamentalData).where(FundamentalData.ticker_id == ticker.id)
|
||||
)
|
||||
existing = result.scalar_one_or_none()
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
if existing is not None:
|
||||
existing.pe_ratio = pe_ratio
|
||||
existing.revenue_growth = revenue_growth
|
||||
existing.earnings_surprise = earnings_surprise
|
||||
existing.market_cap = market_cap
|
||||
existing.fetched_at = now
|
||||
record = existing
|
||||
else:
|
||||
record = FundamentalData(
|
||||
ticker_id=ticker.id,
|
||||
pe_ratio=pe_ratio,
|
||||
revenue_growth=revenue_growth,
|
||||
earnings_surprise=earnings_surprise,
|
||||
market_cap=market_cap,
|
||||
fetched_at=now,
|
||||
)
|
||||
db.add(record)
|
||||
|
||||
# Mark fundamental dimension score as stale if it exists
|
||||
# TODO: Use DimensionScore service when built
|
||||
dim_result = await db.execute(
|
||||
select(DimensionScore).where(
|
||||
DimensionScore.ticker_id == ticker.id,
|
||||
DimensionScore.dimension == "fundamental",
|
||||
)
|
||||
)
|
||||
dim_score = dim_result.scalar_one_or_none()
|
||||
if dim_score is not None:
|
||||
dim_score.is_stale = True
|
||||
|
||||
await db.commit()
|
||||
await db.refresh(record)
|
||||
return record
|
||||
|
||||
|
||||
async def get_fundamental(
|
||||
db: AsyncSession,
|
||||
symbol: str,
|
||||
) -> FundamentalData | None:
|
||||
"""Get the latest fundamental data for a ticker."""
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
|
||||
result = await db.execute(
|
||||
select(FundamentalData).where(FundamentalData.ticker_id == ticker.id)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
509
app/services/indicator_service.py
Normal file
509
app/services/indicator_service.py
Normal file
@@ -0,0 +1,509 @@
|
||||
"""Technical Analysis service.
|
||||
|
||||
Computes indicators from OHLCV data. Each indicator function is a pure
|
||||
function that takes a list of OHLCV-like records and returns raw values
|
||||
plus a normalized 0-100 score. The service layer handles DB fetching,
|
||||
caching, and minimum-data validation.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import date
|
||||
from typing import Any
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.cache import indicator_cache
|
||||
from app.exceptions import ValidationError
|
||||
from app.services.price_service import query_ohlcv
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Minimum data requirements per indicator
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
MIN_BARS: dict[str, int] = {
|
||||
"adx": 28,
|
||||
"ema": 0, # dynamic: period + 1
|
||||
"rsi": 15,
|
||||
"atr": 15,
|
||||
"volume_profile": 20,
|
||||
"pivot_points": 5,
|
||||
}
|
||||
|
||||
DEFAULT_PERIODS: dict[str, int] = {
|
||||
"adx": 14,
|
||||
"ema": 20,
|
||||
"rsi": 14,
|
||||
"atr": 14,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Pure computation helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _ema(values: list[float], period: int) -> list[float]:
|
||||
"""Compute EMA series. Returns list same length as *values*."""
|
||||
if len(values) < period:
|
||||
return []
|
||||
k = 2.0 / (period + 1)
|
||||
ema_vals: list[float] = [sum(values[:period]) / period]
|
||||
for v in values[period:]:
|
||||
ema_vals.append(v * k + ema_vals[-1] * (1 - k))
|
||||
return ema_vals
|
||||
|
||||
|
||||
def compute_adx(
|
||||
highs: list[float],
|
||||
lows: list[float],
|
||||
closes: list[float],
|
||||
period: int = 14,
|
||||
) -> dict[str, Any]:
|
||||
"""Compute ADX from high/low/close arrays.
|
||||
|
||||
Returns dict with ``adx``, ``plus_di``, ``minus_di``, ``score``.
|
||||
"""
|
||||
n = len(closes)
|
||||
if n < 2 * period:
|
||||
raise ValidationError(
|
||||
f"ADX requires at least {2 * period} bars, got {n}"
|
||||
)
|
||||
|
||||
# True Range, +DM, -DM
|
||||
tr_list: list[float] = []
|
||||
plus_dm: list[float] = []
|
||||
minus_dm: list[float] = []
|
||||
for i in range(1, n):
|
||||
h, l, pc = highs[i], lows[i], closes[i - 1]
|
||||
tr_list.append(max(h - l, abs(h - pc), abs(l - pc)))
|
||||
up = highs[i] - highs[i - 1]
|
||||
down = lows[i - 1] - lows[i]
|
||||
plus_dm.append(up if up > down and up > 0 else 0.0)
|
||||
minus_dm.append(down if down > up and down > 0 else 0.0)
|
||||
|
||||
# Smoothed TR, +DM, -DM (Wilder smoothing)
|
||||
def _smooth(vals: list[float], p: int) -> list[float]:
|
||||
s = [sum(vals[:p])]
|
||||
for v in vals[p:]:
|
||||
s.append(s[-1] - s[-1] / p + v)
|
||||
return s
|
||||
|
||||
s_tr = _smooth(tr_list, period)
|
||||
s_plus = _smooth(plus_dm, period)
|
||||
s_minus = _smooth(minus_dm, period)
|
||||
|
||||
# +DI, -DI, DX
|
||||
dx_list: list[float] = []
|
||||
plus_di_last = 0.0
|
||||
minus_di_last = 0.0
|
||||
for i in range(len(s_tr)):
|
||||
tr_v = s_tr[i] if s_tr[i] != 0 else 1e-10
|
||||
pdi = 100.0 * s_plus[i] / tr_v
|
||||
mdi = 100.0 * s_minus[i] / tr_v
|
||||
denom = pdi + mdi if (pdi + mdi) != 0 else 1e-10
|
||||
dx_list.append(100.0 * abs(pdi - mdi) / denom)
|
||||
plus_di_last = pdi
|
||||
minus_di_last = mdi
|
||||
|
||||
# ADX = smoothed DX
|
||||
if len(dx_list) < period:
|
||||
adx_val = sum(dx_list) / len(dx_list) if dx_list else 0.0
|
||||
else:
|
||||
adx_vals = _smooth(dx_list, period)
|
||||
adx_val = adx_vals[-1]
|
||||
|
||||
score = max(0.0, min(100.0, adx_val))
|
||||
|
||||
return {
|
||||
"adx": round(adx_val, 4),
|
||||
"plus_di": round(plus_di_last, 4),
|
||||
"minus_di": round(minus_di_last, 4),
|
||||
"score": round(score, 4),
|
||||
}
|
||||
|
||||
|
||||
def compute_ema(
|
||||
closes: list[float],
|
||||
period: int = 20,
|
||||
) -> dict[str, Any]:
|
||||
"""Compute EMA for *closes* with given *period*.
|
||||
|
||||
Score: normalized position of latest close relative to EMA.
|
||||
Above EMA → higher score, below → lower.
|
||||
"""
|
||||
min_bars = period + 1
|
||||
if len(closes) < min_bars:
|
||||
raise ValidationError(
|
||||
f"EMA({period}) requires at least {min_bars} bars, got {len(closes)}"
|
||||
)
|
||||
|
||||
ema_vals = _ema(closes, period)
|
||||
latest_ema = ema_vals[-1]
|
||||
latest_close = closes[-1]
|
||||
|
||||
# Score: 50 = at EMA, 100 = 5%+ above, 0 = 5%+ below
|
||||
if latest_ema == 0:
|
||||
pct = 0.0
|
||||
else:
|
||||
pct = (latest_close - latest_ema) / latest_ema * 100.0
|
||||
score = max(0.0, min(100.0, 50.0 + pct * 10.0))
|
||||
|
||||
return {
|
||||
"ema": round(latest_ema, 4),
|
||||
"period": period,
|
||||
"latest_close": round(latest_close, 4),
|
||||
"score": round(score, 4),
|
||||
}
|
||||
|
||||
|
||||
def compute_rsi(
|
||||
closes: list[float],
|
||||
period: int = 14,
|
||||
) -> dict[str, Any]:
|
||||
"""Compute RSI. Score = RSI value (already 0-100)."""
|
||||
n = len(closes)
|
||||
if n < period + 1:
|
||||
raise ValidationError(
|
||||
f"RSI requires at least {period + 1} bars, got {n}"
|
||||
)
|
||||
|
||||
deltas = [closes[i] - closes[i - 1] for i in range(1, n)]
|
||||
gains = [d if d > 0 else 0.0 for d in deltas]
|
||||
losses = [-d if d < 0 else 0.0 for d in deltas]
|
||||
|
||||
avg_gain = sum(gains[:period]) / period
|
||||
avg_loss = sum(losses[:period]) / period
|
||||
|
||||
for i in range(period, len(deltas)):
|
||||
avg_gain = (avg_gain * (period - 1) + gains[i]) / period
|
||||
avg_loss = (avg_loss * (period - 1) + losses[i]) / period
|
||||
|
||||
if avg_loss == 0:
|
||||
rsi = 100.0
|
||||
else:
|
||||
rs = avg_gain / avg_loss
|
||||
rsi = 100.0 - 100.0 / (1.0 + rs)
|
||||
|
||||
score = max(0.0, min(100.0, rsi))
|
||||
|
||||
return {
|
||||
"rsi": round(rsi, 4),
|
||||
"period": period,
|
||||
"score": round(score, 4),
|
||||
}
|
||||
|
||||
|
||||
def compute_atr(
|
||||
highs: list[float],
|
||||
lows: list[float],
|
||||
closes: list[float],
|
||||
period: int = 14,
|
||||
) -> dict[str, Any]:
|
||||
"""Compute ATR. Score = normalized inverse (lower ATR = higher score)."""
|
||||
n = len(closes)
|
||||
if n < period + 1:
|
||||
raise ValidationError(
|
||||
f"ATR requires at least {period + 1} bars, got {n}"
|
||||
)
|
||||
|
||||
tr_list: list[float] = []
|
||||
for i in range(1, n):
|
||||
h, l, pc = highs[i], lows[i], closes[i - 1]
|
||||
tr_list.append(max(h - l, abs(h - pc), abs(l - pc)))
|
||||
|
||||
# Wilder smoothing
|
||||
atr = sum(tr_list[:period]) / period
|
||||
for tr in tr_list[period:]:
|
||||
atr = (atr * (period - 1) + tr) / period
|
||||
|
||||
# Score: inverse normalized. ATR as % of price; lower = higher score.
|
||||
latest_close = closes[-1]
|
||||
if latest_close == 0:
|
||||
atr_pct = 0.0
|
||||
else:
|
||||
atr_pct = atr / latest_close * 100.0
|
||||
# 0% ATR → 100 score, 10%+ ATR → 0 score
|
||||
score = max(0.0, min(100.0, 100.0 - atr_pct * 10.0))
|
||||
|
||||
return {
|
||||
"atr": round(atr, 4),
|
||||
"period": period,
|
||||
"atr_percent": round(atr_pct, 4),
|
||||
"score": round(score, 4),
|
||||
}
|
||||
|
||||
|
||||
def compute_volume_profile(
|
||||
highs: list[float],
|
||||
lows: list[float],
|
||||
closes: list[float],
|
||||
volumes: list[int],
|
||||
num_bins: int = 20,
|
||||
) -> dict[str, Any]:
|
||||
"""Compute Volume Profile: POC, Value Area, HVN, LVN.
|
||||
|
||||
Score: proximity of latest close to POC (closer = higher).
|
||||
"""
|
||||
n = len(closes)
|
||||
if n < 20:
|
||||
raise ValidationError(
|
||||
f"Volume Profile requires at least 20 bars, got {n}"
|
||||
)
|
||||
|
||||
price_min = min(lows)
|
||||
price_max = max(highs)
|
||||
if price_max == price_min:
|
||||
price_max = price_min + 1.0 # avoid zero-width range
|
||||
|
||||
bin_width = (price_max - price_min) / num_bins
|
||||
bins: list[float] = [0.0] * num_bins
|
||||
bin_prices: list[float] = [
|
||||
price_min + (i + 0.5) * bin_width for i in range(num_bins)
|
||||
]
|
||||
|
||||
for i in range(n):
|
||||
# Distribute volume across bins the bar spans
|
||||
bar_low, bar_high = lows[i], highs[i]
|
||||
for b in range(num_bins):
|
||||
bl = price_min + b * bin_width
|
||||
bh = bl + bin_width
|
||||
if bar_high >= bl and bar_low <= bh:
|
||||
bins[b] += volumes[i]
|
||||
|
||||
total_vol = sum(bins)
|
||||
if total_vol == 0:
|
||||
total_vol = 1.0
|
||||
|
||||
# POC = bin with highest volume
|
||||
poc_idx = bins.index(max(bins))
|
||||
poc = round(bin_prices[poc_idx], 4)
|
||||
|
||||
# Value Area: 70% of total volume around POC
|
||||
sorted_bins = sorted(range(num_bins), key=lambda i: bins[i], reverse=True)
|
||||
va_vol = 0.0
|
||||
va_indices: list[int] = []
|
||||
for idx in sorted_bins:
|
||||
va_vol += bins[idx]
|
||||
va_indices.append(idx)
|
||||
if va_vol >= total_vol * 0.7:
|
||||
break
|
||||
va_low = round(price_min + min(va_indices) * bin_width, 4)
|
||||
va_high = round(price_min + (max(va_indices) + 1) * bin_width, 4)
|
||||
|
||||
# HVN / LVN: bins above/below average volume
|
||||
avg_vol = total_vol / num_bins
|
||||
hvn = [round(bin_prices[i], 4) for i in range(num_bins) if bins[i] > avg_vol]
|
||||
lvn = [round(bin_prices[i], 4) for i in range(num_bins) if bins[i] < avg_vol]
|
||||
|
||||
# Score: proximity of latest close to POC
|
||||
latest = closes[-1]
|
||||
price_range = price_max - price_min
|
||||
if price_range == 0:
|
||||
score = 100.0
|
||||
else:
|
||||
dist_pct = abs(latest - poc) / price_range
|
||||
score = max(0.0, min(100.0, 100.0 * (1.0 - dist_pct)))
|
||||
|
||||
return {
|
||||
"poc": poc,
|
||||
"value_area_low": va_low,
|
||||
"value_area_high": va_high,
|
||||
"hvn": hvn,
|
||||
"lvn": lvn,
|
||||
"score": round(score, 4),
|
||||
}
|
||||
|
||||
|
||||
def compute_pivot_points(
|
||||
highs: list[float],
|
||||
lows: list[float],
|
||||
closes: list[float],
|
||||
window: int = 2,
|
||||
) -> dict[str, Any]:
|
||||
"""Detect swing highs/lows as pivot points.
|
||||
|
||||
A swing high at index *i* means highs[i] >= all highs in [i-window, i+window].
|
||||
Score: based on number of pivots near current price.
|
||||
"""
|
||||
n = len(closes)
|
||||
if n < 5:
|
||||
raise ValidationError(
|
||||
f"Pivot Points requires at least 5 bars, got {n}"
|
||||
)
|
||||
|
||||
swing_highs: list[float] = []
|
||||
swing_lows: list[float] = []
|
||||
|
||||
for i in range(window, n - window):
|
||||
# Swing high
|
||||
if all(highs[i] >= highs[j] for j in range(i - window, i + window + 1)):
|
||||
swing_highs.append(round(highs[i], 4))
|
||||
# Swing low
|
||||
if all(lows[i] <= lows[j] for j in range(i - window, i + window + 1)):
|
||||
swing_lows.append(round(lows[i], 4))
|
||||
|
||||
all_pivots = swing_highs + swing_lows
|
||||
latest = closes[-1]
|
||||
|
||||
# Score: fraction of pivots within 2% of current price → 0-100
|
||||
if not all_pivots or latest == 0:
|
||||
score = 0.0
|
||||
else:
|
||||
near = sum(1 for p in all_pivots if abs(p - latest) / latest <= 0.02)
|
||||
score = min(100.0, (near / max(len(all_pivots), 1)) * 100.0)
|
||||
|
||||
return {
|
||||
"swing_highs": swing_highs,
|
||||
"swing_lows": swing_lows,
|
||||
"pivot_count": len(all_pivots),
|
||||
"score": round(score, 4),
|
||||
}
|
||||
|
||||
|
||||
def compute_ema_cross(
|
||||
closes: list[float],
|
||||
short_period: int = 20,
|
||||
long_period: int = 50,
|
||||
tolerance: float = 1e-6,
|
||||
) -> dict[str, Any]:
|
||||
"""Compare short EMA vs long EMA.
|
||||
|
||||
Returns signal: bullish (short > long), bearish (short < long),
|
||||
neutral (within tolerance).
|
||||
"""
|
||||
min_bars = long_period + 1
|
||||
if len(closes) < min_bars:
|
||||
raise ValidationError(
|
||||
f"EMA Cross requires at least {min_bars} bars, got {len(closes)}"
|
||||
)
|
||||
|
||||
short_ema_vals = _ema(closes, short_period)
|
||||
long_ema_vals = _ema(closes, long_period)
|
||||
|
||||
short_ema = short_ema_vals[-1]
|
||||
long_ema = long_ema_vals[-1]
|
||||
|
||||
diff = short_ema - long_ema
|
||||
if abs(diff) <= tolerance:
|
||||
signal = "neutral"
|
||||
elif diff > 0:
|
||||
signal = "bullish"
|
||||
else:
|
||||
signal = "bearish"
|
||||
|
||||
return {
|
||||
"short_ema": round(short_ema, 4),
|
||||
"long_ema": round(long_ema, 4),
|
||||
"short_period": short_period,
|
||||
"long_period": long_period,
|
||||
"signal": signal,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Supported indicator types
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
INDICATOR_TYPES = {"adx", "ema", "rsi", "atr", "volume_profile", "pivot_points"}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Service-layer functions (DB + cache + validation)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _extract_ohlcv(records: list) -> tuple[
|
||||
list[float], list[float], list[float], list[float], list[int]
|
||||
]:
|
||||
"""Extract parallel arrays from OHLCVRecord list."""
|
||||
opens = [float(r.open) for r in records]
|
||||
highs = [float(r.high) for r in records]
|
||||
lows = [float(r.low) for r in records]
|
||||
closes = [float(r.close) for r in records]
|
||||
volumes = [int(r.volume) for r in records]
|
||||
return opens, highs, lows, closes, volumes
|
||||
|
||||
|
||||
async def get_indicator(
|
||||
db: AsyncSession,
|
||||
symbol: str,
|
||||
indicator_type: str,
|
||||
start_date: date | None = None,
|
||||
end_date: date | None = None,
|
||||
period: int | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Compute a single indicator for *symbol*.
|
||||
|
||||
Checks cache first; stores result after computing.
|
||||
"""
|
||||
indicator_type = indicator_type.lower()
|
||||
if indicator_type not in INDICATOR_TYPES:
|
||||
raise ValidationError(
|
||||
f"Unknown indicator type: {indicator_type}. "
|
||||
f"Supported: {', '.join(sorted(INDICATOR_TYPES))}"
|
||||
)
|
||||
|
||||
cache_key = (symbol.upper(), str(start_date), str(end_date), indicator_type)
|
||||
cached = indicator_cache.get(cache_key)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
records = await query_ohlcv(db, symbol, start_date, end_date)
|
||||
_, highs, lows, closes, volumes = _extract_ohlcv(records)
|
||||
n = len(records)
|
||||
|
||||
if indicator_type == "adx":
|
||||
p = period or DEFAULT_PERIODS["adx"]
|
||||
result = compute_adx(highs, lows, closes, period=p)
|
||||
elif indicator_type == "ema":
|
||||
p = period or DEFAULT_PERIODS["ema"]
|
||||
result = compute_ema(closes, period=p)
|
||||
elif indicator_type == "rsi":
|
||||
p = period or DEFAULT_PERIODS["rsi"]
|
||||
result = compute_rsi(closes, period=p)
|
||||
elif indicator_type == "atr":
|
||||
p = period or DEFAULT_PERIODS["atr"]
|
||||
result = compute_atr(highs, lows, closes, period=p)
|
||||
elif indicator_type == "volume_profile":
|
||||
result = compute_volume_profile(highs, lows, closes, volumes)
|
||||
elif indicator_type == "pivot_points":
|
||||
result = compute_pivot_points(highs, lows, closes)
|
||||
else:
|
||||
raise ValidationError(f"Unknown indicator type: {indicator_type}")
|
||||
|
||||
response = {
|
||||
"indicator_type": indicator_type,
|
||||
"values": {k: v for k, v in result.items() if k != "score"},
|
||||
"score": result["score"],
|
||||
"bars_used": n,
|
||||
}
|
||||
|
||||
indicator_cache.set(cache_key, response)
|
||||
return response
|
||||
|
||||
|
||||
async def get_ema_cross(
|
||||
db: AsyncSession,
|
||||
symbol: str,
|
||||
start_date: date | None = None,
|
||||
end_date: date | None = None,
|
||||
short_period: int = 20,
|
||||
long_period: int = 50,
|
||||
) -> dict[str, Any]:
|
||||
"""Compute EMA cross signal for *symbol*."""
|
||||
cache_key = (
|
||||
symbol.upper(),
|
||||
str(start_date),
|
||||
str(end_date),
|
||||
f"ema_cross_{short_period}_{long_period}",
|
||||
)
|
||||
cached = indicator_cache.get(cache_key)
|
||||
if cached is not None:
|
||||
return cached
|
||||
|
||||
records = await query_ohlcv(db, symbol, start_date, end_date)
|
||||
_, _, _, closes, _ = _extract_ohlcv(records)
|
||||
|
||||
result = compute_ema_cross(closes, short_period, long_period)
|
||||
|
||||
indicator_cache.set(cache_key, result)
|
||||
return result
|
||||
172
app/services/ingestion_service.py
Normal file
172
app/services/ingestion_service.py
Normal file
@@ -0,0 +1,172 @@
|
||||
"""Ingestion Pipeline service: fetch from provider, validate, upsert into Price Store.
|
||||
|
||||
Handles rate-limit resume via IngestionProgress and provider error isolation.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass
|
||||
from datetime import date, timedelta
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.exceptions import NotFoundError, ProviderError, RateLimitError
|
||||
from app.models.settings import IngestionProgress
|
||||
from app.models.ticker import Ticker
|
||||
from app.providers.protocol import MarketDataProvider
|
||||
from app.services import price_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class IngestionResult:
|
||||
"""Result of an ingestion run."""
|
||||
|
||||
symbol: str
|
||||
records_ingested: int
|
||||
last_date: date | None
|
||||
status: str # "complete" | "partial" | "error"
|
||||
message: str | None = None
|
||||
|
||||
|
||||
async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker:
|
||||
"""Look up ticker by symbol. Raises NotFoundError if missing."""
|
||||
normalised = symbol.strip().upper()
|
||||
result = await db.execute(select(Ticker).where(Ticker.symbol == normalised))
|
||||
ticker = result.scalar_one_or_none()
|
||||
if ticker is None:
|
||||
raise NotFoundError(f"Ticker not found: {normalised}")
|
||||
return ticker
|
||||
|
||||
|
||||
async def _get_progress(db: AsyncSession, ticker_id: int) -> IngestionProgress | None:
|
||||
"""Get the IngestionProgress record for a ticker, if any."""
|
||||
result = await db.execute(
|
||||
select(IngestionProgress).where(IngestionProgress.ticker_id == ticker_id)
|
||||
)
|
||||
return result.scalar_one_or_none()
|
||||
|
||||
|
||||
async def _update_progress(
|
||||
db: AsyncSession, ticker_id: int, last_date: date
|
||||
) -> None:
|
||||
"""Create or update the IngestionProgress record for a ticker."""
|
||||
progress = await _get_progress(db, ticker_id)
|
||||
if progress is None:
|
||||
progress = IngestionProgress(ticker_id=ticker_id, last_ingested_date=last_date)
|
||||
db.add(progress)
|
||||
else:
|
||||
progress.last_ingested_date = last_date
|
||||
await db.commit()
|
||||
|
||||
|
||||
async def fetch_and_ingest(
|
||||
db: AsyncSession,
|
||||
provider: MarketDataProvider,
|
||||
symbol: str,
|
||||
start_date: date | None = None,
|
||||
end_date: date | None = None,
|
||||
) -> IngestionResult:
|
||||
"""Fetch OHLCV data from provider and upsert into Price Store.
|
||||
|
||||
- Resolves start_date from IngestionProgress if not provided (resume).
|
||||
- Defaults end_date to today.
|
||||
- Tracks last_ingested_date after each successful upsert.
|
||||
- On RateLimitError from provider: returns partial progress.
|
||||
- On ProviderError: returns error, no data modification.
|
||||
"""
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
|
||||
# Resolve end_date
|
||||
if end_date is None:
|
||||
end_date = date.today()
|
||||
|
||||
# Resolve start_date: use progress resume or default to 1 year ago
|
||||
if start_date is None:
|
||||
progress = await _get_progress(db, ticker.id)
|
||||
if progress is not None:
|
||||
start_date = progress.last_ingested_date + timedelta(days=1)
|
||||
else:
|
||||
start_date = end_date - timedelta(days=365)
|
||||
|
||||
# If start > end, nothing to fetch
|
||||
if start_date > end_date:
|
||||
return IngestionResult(
|
||||
symbol=ticker.symbol,
|
||||
records_ingested=0,
|
||||
last_date=None,
|
||||
status="complete",
|
||||
message="Already up to date",
|
||||
)
|
||||
|
||||
# Fetch from provider
|
||||
try:
|
||||
records = await provider.fetch_ohlcv(ticker.symbol, start_date, end_date)
|
||||
except RateLimitError:
|
||||
# No data fetched at all — return partial with 0 records
|
||||
return IngestionResult(
|
||||
symbol=ticker.symbol,
|
||||
records_ingested=0,
|
||||
last_date=None,
|
||||
status="partial",
|
||||
message="Rate limited before any records fetched. Resume available.",
|
||||
)
|
||||
except ProviderError as exc:
|
||||
logger.error("Provider error for %s: %s", ticker.symbol, exc)
|
||||
return IngestionResult(
|
||||
symbol=ticker.symbol,
|
||||
records_ingested=0,
|
||||
last_date=None,
|
||||
status="error",
|
||||
message=str(exc),
|
||||
)
|
||||
|
||||
# Sort records by date to ensure ordered ingestion
|
||||
records.sort(key=lambda r: r.date)
|
||||
|
||||
ingested_count = 0
|
||||
last_ingested: date | None = None
|
||||
|
||||
for record in records:
|
||||
try:
|
||||
await price_service.upsert_ohlcv(
|
||||
db,
|
||||
symbol=ticker.symbol,
|
||||
record_date=record.date,
|
||||
open_=record.open,
|
||||
high=record.high,
|
||||
low=record.low,
|
||||
close=record.close,
|
||||
volume=record.volume,
|
||||
)
|
||||
ingested_count += 1
|
||||
last_ingested = record.date
|
||||
|
||||
# Update progress after each successful upsert
|
||||
await _update_progress(db, ticker.id, record.date)
|
||||
|
||||
except RateLimitError:
|
||||
# Mid-ingestion rate limit — return partial progress
|
||||
logger.warning(
|
||||
"Rate limited during ingestion for %s after %d records",
|
||||
ticker.symbol,
|
||||
ingested_count,
|
||||
)
|
||||
return IngestionResult(
|
||||
symbol=ticker.symbol,
|
||||
records_ingested=ingested_count,
|
||||
last_date=last_ingested,
|
||||
status="partial",
|
||||
message=f"Rate limited. Ingested {ingested_count} records. Resume available.",
|
||||
)
|
||||
|
||||
return IngestionResult(
|
||||
symbol=ticker.symbol,
|
||||
records_ingested=ingested_count,
|
||||
last_date=last_ingested,
|
||||
status="complete",
|
||||
message=f"Successfully ingested {ingested_count} records",
|
||||
)
|
||||
110
app/services/price_service.py
Normal file
110
app/services/price_service.py
Normal file
@@ -0,0 +1,110 @@
|
||||
"""Price Store service: upsert and query OHLCV records."""
|
||||
|
||||
from datetime import date, datetime
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.dialects.postgresql import insert as pg_insert
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.exceptions import NotFoundError, ValidationError
|
||||
from app.models.ohlcv import OHLCVRecord
|
||||
from app.models.ticker import Ticker
|
||||
|
||||
|
||||
async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker:
|
||||
"""Look up a ticker by symbol. Raises NotFoundError if missing."""
|
||||
normalised = symbol.strip().upper()
|
||||
result = await db.execute(select(Ticker).where(Ticker.symbol == normalised))
|
||||
ticker = result.scalar_one_or_none()
|
||||
if ticker is None:
|
||||
raise NotFoundError(f"Ticker not found: {normalised}")
|
||||
return ticker
|
||||
|
||||
|
||||
def _validate_ohlcv(
|
||||
high: float, low: float, open_: float, close: float, volume: int, record_date: date
|
||||
) -> None:
|
||||
"""Business-rule validation for an OHLCV record."""
|
||||
if high < low:
|
||||
raise ValidationError("Validation error: high must be >= low")
|
||||
if any(p < 0 for p in (open_, high, low, close)):
|
||||
raise ValidationError("Validation error: prices must be >= 0")
|
||||
if volume < 0:
|
||||
raise ValidationError("Validation error: volume must be >= 0")
|
||||
if record_date > date.today():
|
||||
raise ValidationError("Validation error: date must not be in the future")
|
||||
|
||||
|
||||
async def upsert_ohlcv(
|
||||
db: AsyncSession,
|
||||
symbol: str,
|
||||
record_date: date,
|
||||
open_: float,
|
||||
high: float,
|
||||
low: float,
|
||||
close: float,
|
||||
volume: int,
|
||||
) -> OHLCVRecord:
|
||||
"""Insert or update an OHLCV record for (ticker, date).
|
||||
|
||||
Validates business rules, resolves ticker, then uses
|
||||
ON CONFLICT DO UPDATE on the (ticker_id, date) unique constraint.
|
||||
"""
|
||||
_validate_ohlcv(high, low, open_, close, volume, record_date)
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
|
||||
stmt = pg_insert(OHLCVRecord).values(
|
||||
ticker_id=ticker.id,
|
||||
date=record_date,
|
||||
open=open_,
|
||||
high=high,
|
||||
low=low,
|
||||
close=close,
|
||||
volume=volume,
|
||||
created_at=datetime.utcnow(),
|
||||
)
|
||||
stmt = stmt.on_conflict_do_update(
|
||||
constraint="uq_ohlcv_ticker_date",
|
||||
set_={
|
||||
"open": stmt.excluded.open,
|
||||
"high": stmt.excluded.high,
|
||||
"low": stmt.excluded.low,
|
||||
"close": stmt.excluded.close,
|
||||
"volume": stmt.excluded.volume,
|
||||
"created_at": stmt.excluded.created_at,
|
||||
},
|
||||
)
|
||||
stmt = stmt.returning(OHLCVRecord)
|
||||
result = await db.execute(stmt)
|
||||
await db.commit()
|
||||
|
||||
record = result.scalar_one()
|
||||
|
||||
# TODO: Invalidate LRU cache entries for this ticker (Task 7.1)
|
||||
# TODO: Mark composite score as stale for this ticker (Task 10.1)
|
||||
|
||||
return record
|
||||
|
||||
|
||||
async def query_ohlcv(
|
||||
db: AsyncSession,
|
||||
symbol: str,
|
||||
start_date: date | None = None,
|
||||
end_date: date | None = None,
|
||||
) -> list[OHLCVRecord]:
|
||||
"""Query OHLCV records for a ticker, optionally filtered by date range.
|
||||
|
||||
Returns records sorted by date ascending.
|
||||
Raises NotFoundError if the ticker does not exist.
|
||||
"""
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
|
||||
stmt = select(OHLCVRecord).where(OHLCVRecord.ticker_id == ticker.id)
|
||||
if start_date is not None:
|
||||
stmt = stmt.where(OHLCVRecord.date >= start_date)
|
||||
if end_date is not None:
|
||||
stmt = stmt.where(OHLCVRecord.date <= end_date)
|
||||
stmt = stmt.order_by(OHLCVRecord.date.asc())
|
||||
|
||||
result = await db.execute(stmt)
|
||||
return list(result.scalars().all())
|
||||
241
app/services/rr_scanner_service.py
Normal file
241
app/services/rr_scanner_service.py
Normal file
@@ -0,0 +1,241 @@
|
||||
"""R:R Scanner service.
|
||||
|
||||
Scans tracked tickers for asymmetric risk-reward trade setups.
|
||||
Long: target = nearest SR above, stop = entry - ATR × multiplier.
|
||||
Short: target = nearest SR below, stop = entry + ATR × multiplier.
|
||||
Filters by configurable R:R threshold (default 3:1).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.exceptions import NotFoundError
|
||||
from app.models.score import CompositeScore
|
||||
from app.models.sr_level import SRLevel
|
||||
from app.models.ticker import Ticker
|
||||
from app.models.trade_setup import TradeSetup
|
||||
from app.services.indicator_service import _extract_ohlcv, compute_atr
|
||||
from app.services.price_service import query_ohlcv
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker:
|
||||
normalised = symbol.strip().upper()
|
||||
result = await db.execute(select(Ticker).where(Ticker.symbol == normalised))
|
||||
ticker = result.scalar_one_or_none()
|
||||
if ticker is None:
|
||||
raise NotFoundError(f"Ticker not found: {normalised}")
|
||||
return ticker
|
||||
|
||||
|
||||
async def scan_ticker(
|
||||
db: AsyncSession,
|
||||
symbol: str,
|
||||
rr_threshold: float = 3.0,
|
||||
atr_multiplier: float = 1.5,
|
||||
) -> list[TradeSetup]:
|
||||
"""Scan a single ticker for trade setups meeting the R:R threshold.
|
||||
|
||||
1. Fetch OHLCV data and compute ATR.
|
||||
2. Fetch SR levels.
|
||||
3. Compute long and short setups.
|
||||
4. Filter by R:R threshold.
|
||||
5. Delete old setups for this ticker and persist new ones.
|
||||
|
||||
Returns list of persisted TradeSetup models.
|
||||
"""
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
|
||||
# Fetch OHLCV
|
||||
records = await query_ohlcv(db, symbol)
|
||||
if not records or len(records) < 15:
|
||||
logger.info(
|
||||
"Skipping %s: insufficient OHLCV data (%d bars, need 15+)",
|
||||
symbol, len(records),
|
||||
)
|
||||
# Clear any stale setups
|
||||
await db.execute(
|
||||
delete(TradeSetup).where(TradeSetup.ticker_id == ticker.id)
|
||||
)
|
||||
return []
|
||||
|
||||
_, highs, lows, closes, _ = _extract_ohlcv(records)
|
||||
entry_price = closes[-1]
|
||||
|
||||
# Compute ATR
|
||||
try:
|
||||
atr_result = compute_atr(highs, lows, closes)
|
||||
atr_value = atr_result["atr"]
|
||||
except Exception:
|
||||
logger.info("Skipping %s: cannot compute ATR", symbol)
|
||||
await db.execute(
|
||||
delete(TradeSetup).where(TradeSetup.ticker_id == ticker.id)
|
||||
)
|
||||
return []
|
||||
|
||||
if atr_value <= 0:
|
||||
logger.info("Skipping %s: ATR is zero or negative", symbol)
|
||||
await db.execute(
|
||||
delete(TradeSetup).where(TradeSetup.ticker_id == ticker.id)
|
||||
)
|
||||
return []
|
||||
|
||||
# Fetch SR levels from DB (already computed by sr_service)
|
||||
sr_result = await db.execute(
|
||||
select(SRLevel).where(SRLevel.ticker_id == ticker.id)
|
||||
)
|
||||
sr_levels = list(sr_result.scalars().all())
|
||||
|
||||
if not sr_levels:
|
||||
logger.info("Skipping %s: no SR levels available", symbol)
|
||||
await db.execute(
|
||||
delete(TradeSetup).where(TradeSetup.ticker_id == ticker.id)
|
||||
)
|
||||
return []
|
||||
|
||||
levels_above = sorted(
|
||||
[lv for lv in sr_levels if lv.price_level > entry_price],
|
||||
key=lambda lv: lv.price_level,
|
||||
)
|
||||
levels_below = sorted(
|
||||
[lv for lv in sr_levels if lv.price_level < entry_price],
|
||||
key=lambda lv: lv.price_level,
|
||||
reverse=True,
|
||||
)
|
||||
|
||||
# Get composite score for this ticker
|
||||
comp_result = await db.execute(
|
||||
select(CompositeScore).where(CompositeScore.ticker_id == ticker.id)
|
||||
)
|
||||
comp = comp_result.scalar_one_or_none()
|
||||
composite_score = comp.score if comp else 0.0
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
setups: list[TradeSetup] = []
|
||||
|
||||
# Long setup: target = nearest SR above, stop = entry - ATR × multiplier
|
||||
if levels_above:
|
||||
target = levels_above[0].price_level
|
||||
stop = entry_price - (atr_value * atr_multiplier)
|
||||
reward = target - entry_price
|
||||
risk = entry_price - stop
|
||||
if risk > 0 and reward > 0:
|
||||
rr = reward / risk
|
||||
if rr >= rr_threshold:
|
||||
setups.append(TradeSetup(
|
||||
ticker_id=ticker.id,
|
||||
direction="long",
|
||||
entry_price=round(entry_price, 4),
|
||||
stop_loss=round(stop, 4),
|
||||
target=round(target, 4),
|
||||
rr_ratio=round(rr, 4),
|
||||
composite_score=round(composite_score, 4),
|
||||
detected_at=now,
|
||||
))
|
||||
|
||||
# Short setup: target = nearest SR below, stop = entry + ATR × multiplier
|
||||
if levels_below:
|
||||
target = levels_below[0].price_level
|
||||
stop = entry_price + (atr_value * atr_multiplier)
|
||||
reward = entry_price - target
|
||||
risk = stop - entry_price
|
||||
if risk > 0 and reward > 0:
|
||||
rr = reward / risk
|
||||
if rr >= rr_threshold:
|
||||
setups.append(TradeSetup(
|
||||
ticker_id=ticker.id,
|
||||
direction="short",
|
||||
entry_price=round(entry_price, 4),
|
||||
stop_loss=round(stop, 4),
|
||||
target=round(target, 4),
|
||||
rr_ratio=round(rr, 4),
|
||||
composite_score=round(composite_score, 4),
|
||||
detected_at=now,
|
||||
))
|
||||
|
||||
# Delete old setups for this ticker, persist new ones
|
||||
await db.execute(
|
||||
delete(TradeSetup).where(TradeSetup.ticker_id == ticker.id)
|
||||
)
|
||||
for setup in setups:
|
||||
db.add(setup)
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Refresh to get IDs
|
||||
for s in setups:
|
||||
await db.refresh(s)
|
||||
|
||||
return setups
|
||||
|
||||
|
||||
async def scan_all_tickers(
|
||||
db: AsyncSession,
|
||||
rr_threshold: float = 3.0,
|
||||
atr_multiplier: float = 1.5,
|
||||
) -> list[TradeSetup]:
|
||||
"""Scan all tracked tickers for trade setups.
|
||||
|
||||
Processes each ticker independently — one failure doesn't stop others.
|
||||
Returns all setups found across all tickers.
|
||||
"""
|
||||
result = await db.execute(select(Ticker).order_by(Ticker.symbol))
|
||||
tickers = list(result.scalars().all())
|
||||
|
||||
all_setups: list[TradeSetup] = []
|
||||
for ticker in tickers:
|
||||
try:
|
||||
setups = await scan_ticker(
|
||||
db, ticker.symbol, rr_threshold, atr_multiplier
|
||||
)
|
||||
all_setups.extend(setups)
|
||||
except Exception:
|
||||
logger.exception("Error scanning ticker %s", ticker.symbol)
|
||||
|
||||
return all_setups
|
||||
|
||||
|
||||
async def get_trade_setups(
|
||||
db: AsyncSession,
|
||||
direction: str | None = None,
|
||||
) -> list[dict]:
|
||||
"""Get all stored trade setups, optionally filtered by direction.
|
||||
|
||||
Returns dicts sorted by R:R desc, secondary composite desc.
|
||||
Each dict includes the ticker symbol.
|
||||
"""
|
||||
stmt = (
|
||||
select(TradeSetup, Ticker.symbol)
|
||||
.join(Ticker, TradeSetup.ticker_id == Ticker.id)
|
||||
)
|
||||
if direction is not None:
|
||||
stmt = stmt.where(TradeSetup.direction == direction.lower())
|
||||
|
||||
stmt = stmt.order_by(
|
||||
TradeSetup.rr_ratio.desc(),
|
||||
TradeSetup.composite_score.desc(),
|
||||
)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
rows = result.all()
|
||||
|
||||
return [
|
||||
{
|
||||
"id": setup.id,
|
||||
"symbol": symbol,
|
||||
"direction": setup.direction,
|
||||
"entry_price": setup.entry_price,
|
||||
"stop_loss": setup.stop_loss,
|
||||
"target": setup.target,
|
||||
"rr_ratio": setup.rr_ratio,
|
||||
"composite_score": setup.composite_score,
|
||||
"detected_at": setup.detected_at,
|
||||
}
|
||||
for setup, symbol in rows
|
||||
]
|
||||
584
app/services/scoring_service.py
Normal file
584
app/services/scoring_service.py
Normal file
@@ -0,0 +1,584 @@
|
||||
"""Scoring Engine service.
|
||||
|
||||
Computes dimension scores (technical, sr_quality, sentiment, fundamental,
|
||||
momentum) each 0-100, composite score as weighted average of available
|
||||
dimensions with re-normalized weights, staleness marking/recomputation
|
||||
on demand, and weight update triggers full recomputation.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.exceptions import NotFoundError, ValidationError
|
||||
from app.models.score import CompositeScore, DimensionScore
|
||||
from app.models.settings import SystemSetting
|
||||
from app.models.ticker import Ticker
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DIMENSIONS = ["technical", "sr_quality", "sentiment", "fundamental", "momentum"]
|
||||
|
||||
DEFAULT_WEIGHTS: dict[str, float] = {
|
||||
"technical": 0.25,
|
||||
"sr_quality": 0.20,
|
||||
"sentiment": 0.15,
|
||||
"fundamental": 0.20,
|
||||
"momentum": 0.20,
|
||||
}
|
||||
|
||||
SCORING_WEIGHTS_KEY = "scoring_weights"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker:
|
||||
normalised = symbol.strip().upper()
|
||||
result = await db.execute(select(Ticker).where(Ticker.symbol == normalised))
|
||||
ticker = result.scalar_one_or_none()
|
||||
if ticker is None:
|
||||
raise NotFoundError(f"Ticker not found: {normalised}")
|
||||
return ticker
|
||||
|
||||
|
||||
async def _get_weights(db: AsyncSession) -> dict[str, float]:
|
||||
"""Load scoring weights from SystemSetting, falling back to defaults."""
|
||||
result = await db.execute(
|
||||
select(SystemSetting).where(SystemSetting.key == SCORING_WEIGHTS_KEY)
|
||||
)
|
||||
setting = result.scalar_one_or_none()
|
||||
if setting is not None:
|
||||
try:
|
||||
return json.loads(setting.value)
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
logger.warning("Invalid scoring weights in DB, using defaults")
|
||||
return dict(DEFAULT_WEIGHTS)
|
||||
|
||||
|
||||
async def _save_weights(db: AsyncSession, weights: dict[str, float]) -> None:
|
||||
"""Persist scoring weights to SystemSetting."""
|
||||
result = await db.execute(
|
||||
select(SystemSetting).where(SystemSetting.key == SCORING_WEIGHTS_KEY)
|
||||
)
|
||||
setting = result.scalar_one_or_none()
|
||||
now = datetime.now(timezone.utc)
|
||||
if setting is not None:
|
||||
setting.value = json.dumps(weights)
|
||||
setting.updated_at = now
|
||||
else:
|
||||
setting = SystemSetting(
|
||||
key=SCORING_WEIGHTS_KEY,
|
||||
value=json.dumps(weights),
|
||||
updated_at=now,
|
||||
)
|
||||
db.add(setting)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Dimension score computation
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def _compute_technical_score(db: AsyncSession, symbol: str) -> float | None:
|
||||
"""Compute technical dimension score from ADX, EMA, RSI."""
|
||||
from app.services.indicator_service import (
|
||||
compute_adx,
|
||||
compute_ema,
|
||||
compute_rsi,
|
||||
_extract_ohlcv,
|
||||
)
|
||||
from app.services.price_service import query_ohlcv
|
||||
|
||||
records = await query_ohlcv(db, symbol)
|
||||
if not records:
|
||||
return None
|
||||
|
||||
_, highs, lows, closes, _ = _extract_ohlcv(records)
|
||||
|
||||
scores: list[tuple[float, float]] = [] # (weight, score)
|
||||
|
||||
# ADX (weight 0.4) — needs 28+ bars
|
||||
try:
|
||||
adx_result = compute_adx(highs, lows, closes)
|
||||
scores.append((0.4, adx_result["score"]))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# EMA (weight 0.3) — needs period+1 bars
|
||||
try:
|
||||
ema_result = compute_ema(closes)
|
||||
scores.append((0.3, ema_result["score"]))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# RSI (weight 0.3) — needs 15+ bars
|
||||
try:
|
||||
rsi_result = compute_rsi(closes)
|
||||
scores.append((0.3, rsi_result["score"]))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not scores:
|
||||
return None
|
||||
|
||||
total_weight = sum(w for w, _ in scores)
|
||||
if total_weight == 0:
|
||||
return None
|
||||
weighted = sum(w * s for w, s in scores) / total_weight
|
||||
return max(0.0, min(100.0, weighted))
|
||||
|
||||
|
||||
async def _compute_sr_quality_score(db: AsyncSession, symbol: str) -> float | None:
|
||||
"""Compute S/R quality dimension score.
|
||||
|
||||
Based on number of strong levels, proximity to current price, avg strength.
|
||||
"""
|
||||
from app.services.price_service import query_ohlcv
|
||||
from app.services.sr_service import get_sr_levels
|
||||
|
||||
records = await query_ohlcv(db, symbol)
|
||||
if not records:
|
||||
return None
|
||||
|
||||
current_price = float(records[-1].close)
|
||||
if current_price <= 0:
|
||||
return None
|
||||
|
||||
try:
|
||||
levels = await get_sr_levels(db, symbol)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if not levels:
|
||||
return None
|
||||
|
||||
# Factor 1: Number of strong levels (strength >= 50) — max 40 pts
|
||||
strong_count = sum(1 for lv in levels if lv.strength >= 50)
|
||||
count_score = min(40.0, strong_count * 10.0)
|
||||
|
||||
# Factor 2: Proximity of nearest level to current price — max 30 pts
|
||||
distances = [
|
||||
abs(lv.price_level - current_price) / current_price for lv in levels
|
||||
]
|
||||
nearest_dist = min(distances) if distances else 1.0
|
||||
# Closer = higher score. 0% distance = 30, 5%+ = 0
|
||||
proximity_score = max(0.0, min(30.0, 30.0 * (1.0 - nearest_dist / 0.05)))
|
||||
|
||||
# Factor 3: Average strength — max 30 pts
|
||||
avg_strength = sum(lv.strength for lv in levels) / len(levels)
|
||||
strength_score = min(30.0, avg_strength * 0.3)
|
||||
|
||||
total = count_score + proximity_score + strength_score
|
||||
return max(0.0, min(100.0, total))
|
||||
|
||||
|
||||
async def _compute_sentiment_score(db: AsyncSession, symbol: str) -> float | None:
|
||||
"""Compute sentiment dimension score via sentiment service."""
|
||||
from app.services.sentiment_service import compute_sentiment_dimension_score
|
||||
|
||||
try:
|
||||
return await compute_sentiment_dimension_score(db, symbol)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
async def _compute_fundamental_score(db: AsyncSession, symbol: str) -> float | None:
|
||||
"""Compute fundamental dimension score.
|
||||
|
||||
Normalized composite of P/E (lower is better), revenue growth
|
||||
(higher is better), earnings surprise (higher is better).
|
||||
"""
|
||||
from app.services.fundamental_service import get_fundamental
|
||||
|
||||
fund = await get_fundamental(db, symbol)
|
||||
if fund is None:
|
||||
return None
|
||||
|
||||
scores: list[float] = []
|
||||
|
||||
# P/E: lower is better. 0-15 = 100, 15-30 = 50-100, 30+ = 0-50
|
||||
if fund.pe_ratio is not None and fund.pe_ratio > 0:
|
||||
pe_score = max(0.0, min(100.0, 100.0 - (fund.pe_ratio - 15.0) * (100.0 / 30.0)))
|
||||
scores.append(pe_score)
|
||||
|
||||
# Revenue growth: higher is better. 0% = 50, 20%+ = 100, -20% = 0
|
||||
if fund.revenue_growth is not None:
|
||||
rg_score = max(0.0, min(100.0, 50.0 + fund.revenue_growth * 2.5))
|
||||
scores.append(rg_score)
|
||||
|
||||
# Earnings surprise: higher is better. 0% = 50, 10%+ = 100, -10% = 0
|
||||
if fund.earnings_surprise is not None:
|
||||
es_score = max(0.0, min(100.0, 50.0 + fund.earnings_surprise * 5.0))
|
||||
scores.append(es_score)
|
||||
|
||||
if not scores:
|
||||
return None
|
||||
|
||||
return sum(scores) / len(scores)
|
||||
|
||||
|
||||
async def _compute_momentum_score(db: AsyncSession, symbol: str) -> float | None:
|
||||
"""Compute momentum dimension score.
|
||||
|
||||
Rate of change of price over 5-day and 20-day lookback periods.
|
||||
"""
|
||||
from app.services.price_service import query_ohlcv
|
||||
|
||||
records = await query_ohlcv(db, symbol)
|
||||
if not records or len(records) < 6:
|
||||
return None
|
||||
|
||||
closes = [float(r.close) for r in records]
|
||||
latest = closes[-1]
|
||||
|
||||
scores: list[tuple[float, float]] = [] # (weight, score)
|
||||
|
||||
# 5-day ROC (weight 0.5)
|
||||
if len(closes) >= 6 and closes[-6] > 0:
|
||||
roc_5 = (latest - closes[-6]) / closes[-6] * 100.0
|
||||
# Map: -10% → 0, 0% → 50, +10% → 100
|
||||
score_5 = max(0.0, min(100.0, 50.0 + roc_5 * 5.0))
|
||||
scores.append((0.5, score_5))
|
||||
|
||||
# 20-day ROC (weight 0.5)
|
||||
if len(closes) >= 21 and closes[-21] > 0:
|
||||
roc_20 = (latest - closes[-21]) / closes[-21] * 100.0
|
||||
score_20 = max(0.0, min(100.0, 50.0 + roc_20 * 5.0))
|
||||
scores.append((0.5, score_20))
|
||||
|
||||
if not scores:
|
||||
return None
|
||||
|
||||
total_weight = sum(w for w, _ in scores)
|
||||
if total_weight == 0:
|
||||
return None
|
||||
weighted = sum(w * s for w, s in scores) / total_weight
|
||||
return max(0.0, min(100.0, weighted))
|
||||
|
||||
|
||||
_DIMENSION_COMPUTERS = {
|
||||
"technical": _compute_technical_score,
|
||||
"sr_quality": _compute_sr_quality_score,
|
||||
"sentiment": _compute_sentiment_score,
|
||||
"fundamental": _compute_fundamental_score,
|
||||
"momentum": _compute_momentum_score,
|
||||
}
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Public API
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
async def compute_dimension_score(
|
||||
db: AsyncSession, symbol: str, dimension: str
|
||||
) -> float | None:
|
||||
"""Compute a single dimension score for a ticker.
|
||||
|
||||
Returns the score (0-100) or None if insufficient data.
|
||||
Persists the result to the DimensionScore table.
|
||||
"""
|
||||
if dimension not in _DIMENSION_COMPUTERS:
|
||||
raise ValidationError(
|
||||
f"Unknown dimension: {dimension}. Valid: {', '.join(DIMENSIONS)}"
|
||||
)
|
||||
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
score_val = await _DIMENSION_COMPUTERS[dimension](db, symbol)
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
|
||||
# Upsert dimension score
|
||||
result = await db.execute(
|
||||
select(DimensionScore).where(
|
||||
DimensionScore.ticker_id == ticker.id,
|
||||
DimensionScore.dimension == dimension,
|
||||
)
|
||||
)
|
||||
existing = result.scalar_one_or_none()
|
||||
|
||||
if score_val is not None:
|
||||
score_val = max(0.0, min(100.0, score_val))
|
||||
|
||||
if existing is not None:
|
||||
if score_val is not None:
|
||||
existing.score = score_val
|
||||
existing.is_stale = False
|
||||
existing.computed_at = now
|
||||
else:
|
||||
# Can't compute — mark stale
|
||||
existing.is_stale = True
|
||||
elif score_val is not None:
|
||||
dim = DimensionScore(
|
||||
ticker_id=ticker.id,
|
||||
dimension=dimension,
|
||||
score=score_val,
|
||||
is_stale=False,
|
||||
computed_at=now,
|
||||
)
|
||||
db.add(dim)
|
||||
|
||||
return score_val
|
||||
|
||||
|
||||
async def compute_all_dimensions(
|
||||
db: AsyncSession, symbol: str
|
||||
) -> dict[str, float | None]:
|
||||
"""Compute all dimension scores for a ticker. Returns dimension → score map."""
|
||||
results: dict[str, float | None] = {}
|
||||
for dim in DIMENSIONS:
|
||||
results[dim] = await compute_dimension_score(db, symbol, dim)
|
||||
return results
|
||||
|
||||
|
||||
async def compute_composite_score(
|
||||
db: AsyncSession,
|
||||
symbol: str,
|
||||
weights: dict[str, float] | None = None,
|
||||
) -> tuple[float | None, list[str]]:
|
||||
"""Compute composite score from available dimension scores.
|
||||
|
||||
Returns (composite_score, missing_dimensions).
|
||||
Missing dimensions are excluded and weights re-normalized.
|
||||
"""
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
|
||||
if weights is None:
|
||||
weights = await _get_weights(db)
|
||||
|
||||
# Get current dimension scores
|
||||
result = await db.execute(
|
||||
select(DimensionScore).where(DimensionScore.ticker_id == ticker.id)
|
||||
)
|
||||
dim_scores = {ds.dimension: ds for ds in result.scalars().all()}
|
||||
|
||||
available: list[tuple[str, float, float]] = [] # (dim, weight, score)
|
||||
missing: list[str] = []
|
||||
|
||||
for dim in DIMENSIONS:
|
||||
w = weights.get(dim, 0.0)
|
||||
if w <= 0:
|
||||
continue
|
||||
ds = dim_scores.get(dim)
|
||||
if ds is not None and not ds.is_stale and ds.score is not None:
|
||||
available.append((dim, w, ds.score))
|
||||
else:
|
||||
missing.append(dim)
|
||||
|
||||
if not available:
|
||||
return None, missing
|
||||
|
||||
# Re-normalize weights
|
||||
total_weight = sum(w for _, w, _ in available)
|
||||
if total_weight == 0:
|
||||
return None, missing
|
||||
|
||||
composite = sum(w * s for _, w, s in available) / total_weight
|
||||
composite = max(0.0, min(100.0, composite))
|
||||
|
||||
# Persist composite score
|
||||
now = datetime.now(timezone.utc)
|
||||
comp_result = await db.execute(
|
||||
select(CompositeScore).where(CompositeScore.ticker_id == ticker.id)
|
||||
)
|
||||
existing = comp_result.scalar_one_or_none()
|
||||
|
||||
if existing is not None:
|
||||
existing.score = composite
|
||||
existing.is_stale = False
|
||||
existing.weights_json = json.dumps(weights)
|
||||
existing.computed_at = now
|
||||
else:
|
||||
comp = CompositeScore(
|
||||
ticker_id=ticker.id,
|
||||
score=composite,
|
||||
is_stale=False,
|
||||
weights_json=json.dumps(weights),
|
||||
computed_at=now,
|
||||
)
|
||||
db.add(comp)
|
||||
|
||||
return composite, missing
|
||||
|
||||
|
||||
async def get_score(
|
||||
db: AsyncSession, symbol: str
|
||||
) -> dict:
|
||||
"""Get composite + all dimension scores for a ticker.
|
||||
|
||||
Recomputes stale dimensions on demand, then recomputes composite.
|
||||
Returns a dict suitable for ScoreResponse.
|
||||
"""
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
weights = await _get_weights(db)
|
||||
|
||||
# Check for stale dimension scores and recompute them
|
||||
result = await db.execute(
|
||||
select(DimensionScore).where(DimensionScore.ticker_id == ticker.id)
|
||||
)
|
||||
dim_scores = {ds.dimension: ds for ds in result.scalars().all()}
|
||||
|
||||
for dim in DIMENSIONS:
|
||||
ds = dim_scores.get(dim)
|
||||
if ds is None or ds.is_stale:
|
||||
await compute_dimension_score(db, symbol, dim)
|
||||
|
||||
# Check composite staleness
|
||||
comp_result = await db.execute(
|
||||
select(CompositeScore).where(CompositeScore.ticker_id == ticker.id)
|
||||
)
|
||||
comp = comp_result.scalar_one_or_none()
|
||||
|
||||
if comp is None or comp.is_stale:
|
||||
await compute_composite_score(db, symbol, weights)
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Re-fetch everything fresh
|
||||
result = await db.execute(
|
||||
select(DimensionScore).where(DimensionScore.ticker_id == ticker.id)
|
||||
)
|
||||
dim_scores_list = list(result.scalars().all())
|
||||
|
||||
comp_result = await db.execute(
|
||||
select(CompositeScore).where(CompositeScore.ticker_id == ticker.id)
|
||||
)
|
||||
comp = comp_result.scalar_one_or_none()
|
||||
|
||||
dimensions = []
|
||||
missing = []
|
||||
for dim in DIMENSIONS:
|
||||
found = next((ds for ds in dim_scores_list if ds.dimension == dim), None)
|
||||
if found is not None:
|
||||
dimensions.append({
|
||||
"dimension": found.dimension,
|
||||
"score": found.score,
|
||||
"is_stale": found.is_stale,
|
||||
"computed_at": found.computed_at,
|
||||
})
|
||||
else:
|
||||
missing.append(dim)
|
||||
|
||||
return {
|
||||
"symbol": ticker.symbol,
|
||||
"composite_score": comp.score if comp else None,
|
||||
"composite_stale": comp.is_stale if comp else False,
|
||||
"weights": weights,
|
||||
"dimensions": dimensions,
|
||||
"missing_dimensions": missing,
|
||||
"computed_at": comp.computed_at if comp else None,
|
||||
}
|
||||
|
||||
|
||||
async def get_rankings(db: AsyncSession) -> dict:
|
||||
"""Get all tickers ranked by composite score descending.
|
||||
|
||||
Returns dict suitable for RankingResponse.
|
||||
"""
|
||||
weights = await _get_weights(db)
|
||||
|
||||
# Get all tickers
|
||||
result = await db.execute(select(Ticker).order_by(Ticker.symbol))
|
||||
tickers = list(result.scalars().all())
|
||||
|
||||
rankings: list[dict] = []
|
||||
for ticker in tickers:
|
||||
# Get composite score
|
||||
comp_result = await db.execute(
|
||||
select(CompositeScore).where(CompositeScore.ticker_id == ticker.id)
|
||||
)
|
||||
comp = comp_result.scalar_one_or_none()
|
||||
|
||||
# If no composite or stale, recompute
|
||||
if comp is None or comp.is_stale:
|
||||
# Recompute stale dimensions first
|
||||
dim_result = await db.execute(
|
||||
select(DimensionScore).where(
|
||||
DimensionScore.ticker_id == ticker.id
|
||||
)
|
||||
)
|
||||
dim_scores = {ds.dimension: ds for ds in dim_result.scalars().all()}
|
||||
for dim in DIMENSIONS:
|
||||
ds = dim_scores.get(dim)
|
||||
if ds is None or ds.is_stale:
|
||||
await compute_dimension_score(db, ticker.symbol, dim)
|
||||
|
||||
await compute_composite_score(db, ticker.symbol, weights)
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Re-fetch
|
||||
comp_result = await db.execute(
|
||||
select(CompositeScore).where(CompositeScore.ticker_id == ticker.id)
|
||||
)
|
||||
comp = comp_result.scalar_one_or_none()
|
||||
if comp is None:
|
||||
continue
|
||||
|
||||
dim_result = await db.execute(
|
||||
select(DimensionScore).where(
|
||||
DimensionScore.ticker_id == ticker.id
|
||||
)
|
||||
)
|
||||
dims = [
|
||||
{
|
||||
"dimension": ds.dimension,
|
||||
"score": ds.score,
|
||||
"is_stale": ds.is_stale,
|
||||
"computed_at": ds.computed_at,
|
||||
}
|
||||
for ds in dim_result.scalars().all()
|
||||
]
|
||||
|
||||
rankings.append({
|
||||
"symbol": ticker.symbol,
|
||||
"composite_score": comp.score,
|
||||
"dimensions": dims,
|
||||
})
|
||||
|
||||
# Sort by composite score descending
|
||||
rankings.sort(key=lambda r: r["composite_score"], reverse=True)
|
||||
|
||||
return {
|
||||
"rankings": rankings,
|
||||
"weights": weights,
|
||||
}
|
||||
|
||||
|
||||
async def update_weights(
|
||||
db: AsyncSession, weights: dict[str, float]
|
||||
) -> dict[str, float]:
|
||||
"""Update scoring weights and recompute all composite scores.
|
||||
|
||||
Validates that all weights are positive and dimensions are valid.
|
||||
Returns the new weights.
|
||||
"""
|
||||
# Validate
|
||||
for dim, w in weights.items():
|
||||
if dim not in DIMENSIONS:
|
||||
raise ValidationError(
|
||||
f"Unknown dimension: {dim}. Valid: {', '.join(DIMENSIONS)}"
|
||||
)
|
||||
if w < 0:
|
||||
raise ValidationError(f"Weight for {dim} must be non-negative, got {w}")
|
||||
|
||||
# Ensure all dimensions have a weight (default 0 for unspecified)
|
||||
full_weights = {dim: weights.get(dim, 0.0) for dim in DIMENSIONS}
|
||||
|
||||
# Persist
|
||||
await _save_weights(db, full_weights)
|
||||
|
||||
# Recompute all composite scores
|
||||
result = await db.execute(select(Ticker))
|
||||
tickers = list(result.scalars().all())
|
||||
|
||||
for ticker in tickers:
|
||||
await compute_composite_score(db, ticker.symbol, full_weights)
|
||||
|
||||
await db.commit()
|
||||
return full_weights
|
||||
131
app/services/sentiment_service.py
Normal file
131
app/services/sentiment_service.py
Normal file
@@ -0,0 +1,131 @@
|
||||
"""Sentiment service.
|
||||
|
||||
Stores sentiment records and computes the sentiment dimension score
|
||||
using a time-decay weighted average over a configurable lookback window.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import math
|
||||
from datetime import datetime, timedelta, timezone
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.exceptions import NotFoundError
|
||||
from app.models.sentiment import SentimentScore
|
||||
from app.models.ticker import Ticker
|
||||
|
||||
|
||||
async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker:
|
||||
"""Look up a ticker by symbol."""
|
||||
normalised = symbol.strip().upper()
|
||||
result = await db.execute(select(Ticker).where(Ticker.symbol == normalised))
|
||||
ticker = result.scalar_one_or_none()
|
||||
if ticker is None:
|
||||
raise NotFoundError(f"Ticker not found: {normalised}")
|
||||
return ticker
|
||||
|
||||
|
||||
async def store_sentiment(
|
||||
db: AsyncSession,
|
||||
symbol: str,
|
||||
classification: str,
|
||||
confidence: int,
|
||||
source: str,
|
||||
timestamp: datetime | None = None,
|
||||
) -> SentimentScore:
|
||||
"""Store a new sentiment record for a ticker."""
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
|
||||
if timestamp is None:
|
||||
timestamp = datetime.now(timezone.utc)
|
||||
|
||||
record = SentimentScore(
|
||||
ticker_id=ticker.id,
|
||||
classification=classification,
|
||||
confidence=confidence,
|
||||
source=source,
|
||||
timestamp=timestamp,
|
||||
)
|
||||
db.add(record)
|
||||
await db.commit()
|
||||
await db.refresh(record)
|
||||
return record
|
||||
|
||||
|
||||
async def get_sentiment_scores(
|
||||
db: AsyncSession,
|
||||
symbol: str,
|
||||
lookback_hours: float = 24,
|
||||
) -> list[SentimentScore]:
|
||||
"""Get recent sentiment records within the lookback window."""
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
cutoff = datetime.now(timezone.utc) - timedelta(hours=lookback_hours)
|
||||
|
||||
result = await db.execute(
|
||||
select(SentimentScore)
|
||||
.where(
|
||||
SentimentScore.ticker_id == ticker.id,
|
||||
SentimentScore.timestamp >= cutoff,
|
||||
)
|
||||
.order_by(SentimentScore.timestamp.desc())
|
||||
)
|
||||
return list(result.scalars().all())
|
||||
|
||||
|
||||
def _classification_to_base_score(classification: str, confidence: int) -> float:
|
||||
"""Map classification + confidence to a base score (0-100).
|
||||
|
||||
bullish → confidence (high confidence = high score)
|
||||
bearish → 100 - confidence (high confidence bearish = low score)
|
||||
neutral → 50
|
||||
"""
|
||||
cl = classification.lower()
|
||||
if cl == "bullish":
|
||||
return float(confidence)
|
||||
elif cl == "bearish":
|
||||
return float(100 - confidence)
|
||||
else:
|
||||
return 50.0
|
||||
|
||||
|
||||
async def compute_sentiment_dimension_score(
|
||||
db: AsyncSession,
|
||||
symbol: str,
|
||||
lookback_hours: float = 24,
|
||||
decay_rate: float = 0.1,
|
||||
) -> float | None:
|
||||
"""Compute the sentiment dimension score using time-decay weighted average.
|
||||
|
||||
Returns a score in [0, 100] or None if no scores exist in the window.
|
||||
|
||||
Algorithm:
|
||||
1. For each score in the lookback window, compute base_score from
|
||||
classification + confidence.
|
||||
2. Apply time decay: weight = exp(-decay_rate * hours_since_score).
|
||||
3. Weighted average: sum(base_score * weight) / sum(weight).
|
||||
"""
|
||||
scores = await get_sentiment_scores(db, symbol, lookback_hours)
|
||||
if not scores:
|
||||
return None
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
weighted_sum = 0.0
|
||||
weight_total = 0.0
|
||||
|
||||
for score in scores:
|
||||
ts = score.timestamp
|
||||
if ts.tzinfo is None:
|
||||
ts = ts.replace(tzinfo=timezone.utc)
|
||||
hours_since = (now - ts).total_seconds() / 3600.0
|
||||
weight = math.exp(-decay_rate * hours_since)
|
||||
base = _classification_to_base_score(score.classification, score.confidence)
|
||||
weighted_sum += base * weight
|
||||
weight_total += weight
|
||||
|
||||
if weight_total == 0:
|
||||
return None
|
||||
|
||||
result = weighted_sum / weight_total
|
||||
return max(0.0, min(100.0, result))
|
||||
274
app/services/sr_service.py
Normal file
274
app/services/sr_service.py
Normal file
@@ -0,0 +1,274 @@
|
||||
"""S/R Detector service.
|
||||
|
||||
Detects support/resistance levels from Volume Profile (HVN/LVN) and
|
||||
Pivot Points (swing highs/lows), assigns strength scores, merges nearby
|
||||
levels, tags as support/resistance, and persists to DB.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from sqlalchemy import delete, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.exceptions import NotFoundError, ValidationError
|
||||
from app.models.sr_level import SRLevel
|
||||
from app.models.ticker import Ticker
|
||||
from app.services.indicator_service import (
|
||||
_extract_ohlcv,
|
||||
compute_pivot_points,
|
||||
compute_volume_profile,
|
||||
)
|
||||
from app.services.price_service import query_ohlcv
|
||||
|
||||
DEFAULT_TOLERANCE = 0.005 # 0.5%
|
||||
|
||||
|
||||
async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker:
|
||||
"""Look up a ticker by symbol."""
|
||||
normalised = symbol.strip().upper()
|
||||
result = await db.execute(select(Ticker).where(Ticker.symbol == normalised))
|
||||
ticker = result.scalar_one_or_none()
|
||||
if ticker is None:
|
||||
raise NotFoundError(f"Ticker not found: {normalised}")
|
||||
return ticker
|
||||
|
||||
|
||||
def _count_price_touches(
|
||||
price_level: float,
|
||||
highs: list[float],
|
||||
lows: list[float],
|
||||
closes: list[float],
|
||||
tolerance: float = DEFAULT_TOLERANCE,
|
||||
) -> int:
|
||||
"""Count how many bars touched/respected a price level within tolerance."""
|
||||
count = 0
|
||||
tol = price_level * tolerance if price_level != 0 else tolerance
|
||||
for i in range(len(closes)):
|
||||
# A bar "touches" the level if the level is within the bar's range
|
||||
# (within tolerance)
|
||||
if lows[i] - tol <= price_level <= highs[i] + tol:
|
||||
count += 1
|
||||
return count
|
||||
|
||||
|
||||
def _strength_from_touches(touches: int, total_bars: int) -> int:
|
||||
"""Convert touch count to a 0-100 strength score.
|
||||
|
||||
More touches relative to total bars = higher strength.
|
||||
Cap at 100.
|
||||
"""
|
||||
if total_bars == 0:
|
||||
return 0
|
||||
# Scale: each touch contributes proportionally, with a multiplier
|
||||
# so that a level touched ~20% of bars gets score ~100
|
||||
raw = (touches / total_bars) * 500.0
|
||||
return max(0, min(100, int(round(raw))))
|
||||
|
||||
|
||||
def _extract_candidate_levels(
|
||||
highs: list[float],
|
||||
lows: list[float],
|
||||
closes: list[float],
|
||||
volumes: list[int],
|
||||
) -> list[tuple[float, str]]:
|
||||
"""Extract candidate S/R levels from Volume Profile and Pivot Points.
|
||||
|
||||
Returns list of (price_level, detection_method) tuples.
|
||||
"""
|
||||
candidates: list[tuple[float, str]] = []
|
||||
|
||||
# Volume Profile: HVN and LVN as candidate levels
|
||||
try:
|
||||
vp = compute_volume_profile(highs, lows, closes, volumes)
|
||||
for price in vp.get("hvn", []):
|
||||
candidates.append((price, "volume_profile"))
|
||||
for price in vp.get("lvn", []):
|
||||
candidates.append((price, "volume_profile"))
|
||||
except ValidationError:
|
||||
pass # Not enough data for volume profile
|
||||
|
||||
# Pivot Points: swing highs and lows
|
||||
try:
|
||||
pp = compute_pivot_points(highs, lows, closes)
|
||||
for price in pp.get("swing_highs", []):
|
||||
candidates.append((price, "pivot_point"))
|
||||
for price in pp.get("swing_lows", []):
|
||||
candidates.append((price, "pivot_point"))
|
||||
except ValidationError:
|
||||
pass # Not enough data for pivot points
|
||||
|
||||
return candidates
|
||||
|
||||
|
||||
def _merge_levels(
|
||||
levels: list[dict],
|
||||
tolerance: float = DEFAULT_TOLERANCE,
|
||||
) -> list[dict]:
|
||||
"""Merge levels within tolerance into consolidated levels.
|
||||
|
||||
Levels from different methods within tolerance are merged.
|
||||
Merged levels combine strength scores (capped at 100) and get
|
||||
detection_method = "merged".
|
||||
"""
|
||||
if not levels:
|
||||
return []
|
||||
|
||||
# Sort by price
|
||||
sorted_levels = sorted(levels, key=lambda x: x["price_level"])
|
||||
merged: list[dict] = []
|
||||
|
||||
for level in sorted_levels:
|
||||
if not merged:
|
||||
merged.append(dict(level))
|
||||
continue
|
||||
|
||||
last = merged[-1]
|
||||
ref_price = last["price_level"]
|
||||
tol = ref_price * tolerance if ref_price != 0 else tolerance
|
||||
|
||||
if abs(level["price_level"] - ref_price) <= tol:
|
||||
# Merge: average price, combine strength, mark as merged
|
||||
combined_strength = min(100, last["strength"] + level["strength"])
|
||||
avg_price = (last["price_level"] + level["price_level"]) / 2.0
|
||||
method = (
|
||||
"merged"
|
||||
if last["detection_method"] != level["detection_method"]
|
||||
else last["detection_method"]
|
||||
)
|
||||
last["price_level"] = round(avg_price, 4)
|
||||
last["strength"] = combined_strength
|
||||
last["detection_method"] = method
|
||||
else:
|
||||
merged.append(dict(level))
|
||||
|
||||
return merged
|
||||
|
||||
|
||||
def _tag_levels(
|
||||
levels: list[dict],
|
||||
current_price: float,
|
||||
) -> list[dict]:
|
||||
"""Tag each level as 'support' or 'resistance' relative to current price."""
|
||||
for level in levels:
|
||||
if level["price_level"] < current_price:
|
||||
level["type"] = "support"
|
||||
else:
|
||||
level["type"] = "resistance"
|
||||
return levels
|
||||
|
||||
|
||||
def detect_sr_levels(
|
||||
highs: list[float],
|
||||
lows: list[float],
|
||||
closes: list[float],
|
||||
volumes: list[int],
|
||||
tolerance: float = DEFAULT_TOLERANCE,
|
||||
) -> list[dict]:
|
||||
"""Detect, score, merge, and tag S/R levels from OHLCV data.
|
||||
|
||||
Returns list of dicts with keys: price_level, type, strength,
|
||||
detection_method — sorted by strength descending.
|
||||
"""
|
||||
if not closes:
|
||||
return []
|
||||
|
||||
candidates = _extract_candidate_levels(highs, lows, closes, volumes)
|
||||
if not candidates:
|
||||
return []
|
||||
|
||||
total_bars = len(closes)
|
||||
current_price = closes[-1]
|
||||
|
||||
# Build level dicts with strength scores
|
||||
raw_levels: list[dict] = []
|
||||
for price, method in candidates:
|
||||
touches = _count_price_touches(price, highs, lows, closes, tolerance)
|
||||
strength = _strength_from_touches(touches, total_bars)
|
||||
raw_levels.append({
|
||||
"price_level": price,
|
||||
"strength": strength,
|
||||
"detection_method": method,
|
||||
"type": "", # will be tagged after merge
|
||||
})
|
||||
|
||||
# Merge nearby levels
|
||||
merged = _merge_levels(raw_levels, tolerance)
|
||||
|
||||
# Tag as support/resistance
|
||||
tagged = _tag_levels(merged, current_price)
|
||||
|
||||
# Sort by strength descending
|
||||
tagged.sort(key=lambda x: x["strength"], reverse=True)
|
||||
|
||||
return tagged
|
||||
|
||||
|
||||
async def recalculate_sr_levels(
|
||||
db: AsyncSession,
|
||||
symbol: str,
|
||||
tolerance: float = DEFAULT_TOLERANCE,
|
||||
) -> list[SRLevel]:
|
||||
"""Recalculate S/R levels for a ticker and persist to DB.
|
||||
|
||||
1. Fetch OHLCV data
|
||||
2. Detect levels
|
||||
3. Delete old levels for ticker
|
||||
4. Insert new levels
|
||||
5. Return new levels sorted by strength desc
|
||||
"""
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
|
||||
records = await query_ohlcv(db, symbol)
|
||||
if not records:
|
||||
# No OHLCV data — clear any existing levels
|
||||
await db.execute(
|
||||
delete(SRLevel).where(SRLevel.ticker_id == ticker.id)
|
||||
)
|
||||
await db.commit()
|
||||
return []
|
||||
|
||||
_, highs, lows, closes, volumes = _extract_ohlcv(records)
|
||||
|
||||
levels = detect_sr_levels(highs, lows, closes, volumes, tolerance)
|
||||
|
||||
# Delete old levels
|
||||
await db.execute(
|
||||
delete(SRLevel).where(SRLevel.ticker_id == ticker.id)
|
||||
)
|
||||
|
||||
# Insert new levels
|
||||
now = datetime.utcnow()
|
||||
new_models: list[SRLevel] = []
|
||||
for lvl in levels:
|
||||
model = SRLevel(
|
||||
ticker_id=ticker.id,
|
||||
price_level=lvl["price_level"],
|
||||
type=lvl["type"],
|
||||
strength=lvl["strength"],
|
||||
detection_method=lvl["detection_method"],
|
||||
created_at=now,
|
||||
)
|
||||
db.add(model)
|
||||
new_models.append(model)
|
||||
|
||||
await db.commit()
|
||||
|
||||
# Refresh to get IDs
|
||||
for m in new_models:
|
||||
await db.refresh(m)
|
||||
|
||||
return new_models
|
||||
|
||||
|
||||
async def get_sr_levels(
|
||||
db: AsyncSession,
|
||||
symbol: str,
|
||||
tolerance: float = DEFAULT_TOLERANCE,
|
||||
) -> list[SRLevel]:
|
||||
"""Get S/R levels for a ticker, recalculating on every request (MVP).
|
||||
|
||||
Returns levels sorted by strength descending.
|
||||
"""
|
||||
return await recalculate_sr_levels(db, symbol, tolerance)
|
||||
57
app/services/ticker_service.py
Normal file
57
app/services/ticker_service.py
Normal file
@@ -0,0 +1,57 @@
|
||||
"""Ticker Registry service: add, delete, and list tracked tickers."""
|
||||
|
||||
import re
|
||||
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.exceptions import DuplicateError, NotFoundError, ValidationError
|
||||
from app.models.ticker import Ticker
|
||||
|
||||
|
||||
async def add_ticker(db: AsyncSession, symbol: str) -> Ticker:
|
||||
"""Add a new ticker after validation.
|
||||
|
||||
Validates: non-empty, uppercase alphanumeric. Auto-uppercases input.
|
||||
Raises DuplicateError if symbol already tracked.
|
||||
"""
|
||||
stripped = symbol.strip()
|
||||
if not stripped:
|
||||
raise ValidationError("Ticker symbol must not be empty or whitespace-only")
|
||||
|
||||
normalised = stripped.upper()
|
||||
if not re.fullmatch(r"[A-Z0-9]+", normalised):
|
||||
raise ValidationError(
|
||||
f"Ticker symbol must be alphanumeric: {normalised}"
|
||||
)
|
||||
|
||||
result = await db.execute(select(Ticker).where(Ticker.symbol == normalised))
|
||||
if result.scalar_one_or_none() is not None:
|
||||
raise DuplicateError(f"Ticker already exists: {normalised}")
|
||||
|
||||
ticker = Ticker(symbol=normalised)
|
||||
db.add(ticker)
|
||||
await db.commit()
|
||||
await db.refresh(ticker)
|
||||
return ticker
|
||||
|
||||
|
||||
async def delete_ticker(db: AsyncSession, symbol: str) -> None:
|
||||
"""Delete a ticker and cascade all associated data.
|
||||
|
||||
Raises NotFoundError if the symbol is not tracked.
|
||||
"""
|
||||
normalised = symbol.strip().upper()
|
||||
result = await db.execute(select(Ticker).where(Ticker.symbol == normalised))
|
||||
ticker = result.scalar_one_or_none()
|
||||
if ticker is None:
|
||||
raise NotFoundError(f"Ticker not found: {normalised}")
|
||||
|
||||
await db.delete(ticker)
|
||||
await db.commit()
|
||||
|
||||
|
||||
async def list_tickers(db: AsyncSession) -> list[Ticker]:
|
||||
"""Return all tracked tickers sorted alphabetically by symbol."""
|
||||
result = await db.execute(select(Ticker).order_by(Ticker.symbol.asc()))
|
||||
return list(result.scalars().all())
|
||||
288
app/services/watchlist_service.py
Normal file
288
app/services/watchlist_service.py
Normal file
@@ -0,0 +1,288 @@
|
||||
"""Watchlist service.
|
||||
|
||||
Auto-populates top-X tickers by composite score (default 10), supports
|
||||
manual add/remove (tagged, not subject to auto-population), enforces
|
||||
cap (auto + 10 manual, default max 20), and updates auto entries on
|
||||
score recomputation.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import delete, func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from app.exceptions import DuplicateError, NotFoundError, ValidationError
|
||||
from app.models.score import CompositeScore, DimensionScore
|
||||
from app.models.sr_level import SRLevel
|
||||
from app.models.ticker import Ticker
|
||||
from app.models.trade_setup import TradeSetup
|
||||
from app.models.watchlist import WatchlistEntry
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
DEFAULT_AUTO_SIZE = 10
|
||||
MAX_MANUAL = 10
|
||||
|
||||
|
||||
async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker:
|
||||
normalised = symbol.strip().upper()
|
||||
result = await db.execute(select(Ticker).where(Ticker.symbol == normalised))
|
||||
ticker = result.scalar_one_or_none()
|
||||
if ticker is None:
|
||||
raise NotFoundError(f"Ticker not found: {normalised}")
|
||||
return ticker
|
||||
|
||||
|
||||
async def auto_populate(
|
||||
db: AsyncSession,
|
||||
user_id: int,
|
||||
top_x: int = DEFAULT_AUTO_SIZE,
|
||||
) -> None:
|
||||
"""Auto-populate watchlist with top-X tickers by composite score.
|
||||
|
||||
Replaces existing auto entries. Manual entries are untouched.
|
||||
"""
|
||||
# Get top-X tickers by composite score (non-stale, descending)
|
||||
stmt = (
|
||||
select(CompositeScore)
|
||||
.where(CompositeScore.is_stale == False) # noqa: E712
|
||||
.order_by(CompositeScore.score.desc())
|
||||
.limit(top_x)
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
top_scores = list(result.scalars().all())
|
||||
top_ticker_ids = {cs.ticker_id for cs in top_scores}
|
||||
|
||||
# Delete existing auto entries for this user
|
||||
await db.execute(
|
||||
delete(WatchlistEntry).where(
|
||||
WatchlistEntry.user_id == user_id,
|
||||
WatchlistEntry.entry_type == "auto",
|
||||
)
|
||||
)
|
||||
|
||||
# Get manual ticker_ids so we don't duplicate
|
||||
manual_result = await db.execute(
|
||||
select(WatchlistEntry.ticker_id).where(
|
||||
WatchlistEntry.user_id == user_id,
|
||||
WatchlistEntry.entry_type == "manual",
|
||||
)
|
||||
)
|
||||
manual_ticker_ids = {row[0] for row in manual_result.all()}
|
||||
|
||||
now = datetime.now(timezone.utc)
|
||||
for ticker_id in top_ticker_ids:
|
||||
if ticker_id in manual_ticker_ids:
|
||||
continue # Already on watchlist as manual
|
||||
entry = WatchlistEntry(
|
||||
user_id=user_id,
|
||||
ticker_id=ticker_id,
|
||||
entry_type="auto",
|
||||
added_at=now,
|
||||
)
|
||||
db.add(entry)
|
||||
|
||||
await db.flush()
|
||||
|
||||
|
||||
async def add_manual_entry(
|
||||
db: AsyncSession,
|
||||
user_id: int,
|
||||
symbol: str,
|
||||
) -> WatchlistEntry:
|
||||
"""Add a manual watchlist entry.
|
||||
|
||||
Raises DuplicateError if already on watchlist.
|
||||
Raises ValidationError if manual cap exceeded.
|
||||
"""
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
|
||||
# Check if already on watchlist
|
||||
existing = await db.execute(
|
||||
select(WatchlistEntry).where(
|
||||
WatchlistEntry.user_id == user_id,
|
||||
WatchlistEntry.ticker_id == ticker.id,
|
||||
)
|
||||
)
|
||||
if existing.scalar_one_or_none() is not None:
|
||||
raise DuplicateError(f"Ticker already on watchlist: {ticker.symbol}")
|
||||
|
||||
# Count current manual entries
|
||||
count_result = await db.execute(
|
||||
select(func.count()).select_from(WatchlistEntry).where(
|
||||
WatchlistEntry.user_id == user_id,
|
||||
WatchlistEntry.entry_type == "manual",
|
||||
)
|
||||
)
|
||||
manual_count = count_result.scalar() or 0
|
||||
|
||||
if manual_count >= MAX_MANUAL:
|
||||
raise ValidationError(
|
||||
f"Manual watchlist cap reached ({MAX_MANUAL}). "
|
||||
"Remove an entry before adding a new one."
|
||||
)
|
||||
|
||||
# Check total cap
|
||||
total_result = await db.execute(
|
||||
select(func.count()).select_from(WatchlistEntry).where(
|
||||
WatchlistEntry.user_id == user_id,
|
||||
)
|
||||
)
|
||||
total_count = total_result.scalar() or 0
|
||||
max_total = DEFAULT_AUTO_SIZE + MAX_MANUAL
|
||||
|
||||
if total_count >= max_total:
|
||||
raise ValidationError(
|
||||
f"Watchlist cap reached ({max_total}). "
|
||||
"Remove an entry before adding a new one."
|
||||
)
|
||||
|
||||
entry = WatchlistEntry(
|
||||
user_id=user_id,
|
||||
ticker_id=ticker.id,
|
||||
entry_type="manual",
|
||||
added_at=datetime.now(timezone.utc),
|
||||
)
|
||||
db.add(entry)
|
||||
await db.commit()
|
||||
await db.refresh(entry)
|
||||
return entry
|
||||
|
||||
|
||||
async def remove_entry(
|
||||
db: AsyncSession,
|
||||
user_id: int,
|
||||
symbol: str,
|
||||
) -> None:
|
||||
"""Remove a watchlist entry (manual or auto)."""
|
||||
ticker = await _get_ticker(db, symbol)
|
||||
|
||||
result = await db.execute(
|
||||
select(WatchlistEntry).where(
|
||||
WatchlistEntry.user_id == user_id,
|
||||
WatchlistEntry.ticker_id == ticker.id,
|
||||
)
|
||||
)
|
||||
entry = result.scalar_one_or_none()
|
||||
if entry is None:
|
||||
raise NotFoundError(f"Ticker not on watchlist: {ticker.symbol}")
|
||||
|
||||
await db.delete(entry)
|
||||
await db.commit()
|
||||
|
||||
|
||||
async def _enrich_entry(
|
||||
db: AsyncSession,
|
||||
entry: WatchlistEntry,
|
||||
symbol: str,
|
||||
) -> dict:
|
||||
"""Build enriched watchlist entry dict with scores, R:R, and SR levels."""
|
||||
ticker_id = entry.ticker_id
|
||||
|
||||
# Composite score
|
||||
comp_result = await db.execute(
|
||||
select(CompositeScore).where(CompositeScore.ticker_id == ticker_id)
|
||||
)
|
||||
comp = comp_result.scalar_one_or_none()
|
||||
|
||||
# Dimension scores
|
||||
dim_result = await db.execute(
|
||||
select(DimensionScore).where(DimensionScore.ticker_id == ticker_id)
|
||||
)
|
||||
dims = [
|
||||
{"dimension": ds.dimension, "score": ds.score}
|
||||
for ds in dim_result.scalars().all()
|
||||
]
|
||||
|
||||
# Best trade setup (highest R:R) for this ticker
|
||||
setup_result = await db.execute(
|
||||
select(TradeSetup)
|
||||
.where(TradeSetup.ticker_id == ticker_id)
|
||||
.order_by(TradeSetup.rr_ratio.desc())
|
||||
.limit(1)
|
||||
)
|
||||
setup = setup_result.scalar_one_or_none()
|
||||
|
||||
# Active SR levels
|
||||
sr_result = await db.execute(
|
||||
select(SRLevel)
|
||||
.where(SRLevel.ticker_id == ticker_id)
|
||||
.order_by(SRLevel.strength.desc())
|
||||
)
|
||||
sr_levels = [
|
||||
{
|
||||
"price_level": lv.price_level,
|
||||
"type": lv.type,
|
||||
"strength": lv.strength,
|
||||
}
|
||||
for lv in sr_result.scalars().all()
|
||||
]
|
||||
|
||||
return {
|
||||
"symbol": symbol,
|
||||
"entry_type": entry.entry_type,
|
||||
"composite_score": comp.score if comp else None,
|
||||
"dimensions": dims,
|
||||
"rr_ratio": setup.rr_ratio if setup else None,
|
||||
"rr_direction": setup.direction if setup else None,
|
||||
"sr_levels": sr_levels,
|
||||
"added_at": entry.added_at,
|
||||
}
|
||||
|
||||
|
||||
async def get_watchlist(
|
||||
db: AsyncSession,
|
||||
user_id: int,
|
||||
sort_by: str = "composite",
|
||||
) -> list[dict]:
|
||||
"""Get user's watchlist with enriched data.
|
||||
|
||||
Runs auto_populate first to ensure auto entries are current,
|
||||
then enriches each entry with scores, R:R, and SR levels.
|
||||
|
||||
sort_by: "composite", "rr", or a dimension name
|
||||
(e.g. "technical", "sr_quality", "sentiment", "fundamental", "momentum").
|
||||
"""
|
||||
# Auto-populate to refresh auto entries
|
||||
await auto_populate(db, user_id)
|
||||
await db.commit()
|
||||
|
||||
# Fetch all entries with ticker symbol
|
||||
stmt = (
|
||||
select(WatchlistEntry, Ticker.symbol)
|
||||
.join(Ticker, WatchlistEntry.ticker_id == Ticker.id)
|
||||
.where(WatchlistEntry.user_id == user_id)
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
rows = result.all()
|
||||
|
||||
entries: list[dict] = []
|
||||
for entry, symbol in rows:
|
||||
enriched = await _enrich_entry(db, entry, symbol)
|
||||
entries.append(enriched)
|
||||
|
||||
# Sort
|
||||
if sort_by == "composite":
|
||||
entries.sort(
|
||||
key=lambda e: e["composite_score"] if e["composite_score"] is not None else -1,
|
||||
reverse=True,
|
||||
)
|
||||
elif sort_by == "rr":
|
||||
entries.sort(
|
||||
key=lambda e: e["rr_ratio"] if e["rr_ratio"] is not None else -1,
|
||||
reverse=True,
|
||||
)
|
||||
else:
|
||||
# Sort by a specific dimension score
|
||||
def _dim_sort_key(e: dict) -> float:
|
||||
for d in e["dimensions"]:
|
||||
if d["dimension"] == sort_by:
|
||||
return d["score"]
|
||||
return -1.0
|
||||
|
||||
entries.sort(key=_dim_sort_key, reverse=True)
|
||||
|
||||
return entries
|
||||
30
deploy/nginx.conf
Normal file
30
deploy/nginx.conf
Normal file
@@ -0,0 +1,30 @@
|
||||
# Nginx reverse proxy configuration for stock-data-backend
|
||||
# Domain: signal.thiessen.io → localhost:8000 (uvicorn)
|
||||
#
|
||||
# Installation:
|
||||
# sudo cp deploy/nginx.conf /etc/nginx/sites-available/stock-data-backend
|
||||
# sudo ln -s /etc/nginx/sites-available/stock-data-backend /etc/nginx/sites-enabled/
|
||||
# sudo nginx -t && sudo systemctl reload nginx
|
||||
#
|
||||
# SSL/TLS (recommended):
|
||||
# sudo apt install certbot python3-certbot-nginx
|
||||
# sudo certbot --nginx -d signal.thiessen.io
|
||||
# Certbot will automatically modify this file to add SSL directives.
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
server_name signal.thiessen.io;
|
||||
|
||||
# Redirect all HTTP to HTTPS (uncomment after certbot setup)
|
||||
# return 301 https://$host$request_uri;
|
||||
|
||||
location / {
|
||||
proxy_pass http://127.0.0.1:8000;
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_read_timeout 120s;
|
||||
proxy_connect_timeout 10s;
|
||||
}
|
||||
}
|
||||
44
deploy/setup_db.sh
Executable file
44
deploy/setup_db.sh
Executable file
@@ -0,0 +1,44 @@
|
||||
#!/bin/bash
|
||||
# Idempotent database setup script for stock-data-backend
|
||||
# Creates PostgreSQL user and database if they don't exist, then runs migrations.
|
||||
# Safe to run multiple times.
|
||||
#
|
||||
# Usage:
|
||||
# chmod +x deploy/setup_db.sh
|
||||
# ./deploy/setup_db.sh
|
||||
#
|
||||
# Customize these via environment variables:
|
||||
# DB_NAME=stock_data_backend DB_USER=stock_backend DB_PASS=changeme ./deploy/setup_db.sh
|
||||
|
||||
set -e
|
||||
|
||||
DB_NAME="${DB_NAME:-stock_data_backend}"
|
||||
DB_USER="${DB_USER:-stock_backend}"
|
||||
DB_PASS="${DB_PASS:-changeme}"
|
||||
|
||||
echo "Setting up database: ${DB_NAME} with user: ${DB_USER}"
|
||||
|
||||
# Create role and database if they don't exist
|
||||
sudo -u postgres psql <<EOF
|
||||
DO \$\$
|
||||
BEGIN
|
||||
IF NOT EXISTS (SELECT FROM pg_roles WHERE rolname = '${DB_USER}') THEN
|
||||
CREATE ROLE ${DB_USER} WITH LOGIN PASSWORD '${DB_PASS}';
|
||||
RAISE NOTICE 'Created role ${DB_USER}';
|
||||
ELSE
|
||||
RAISE NOTICE 'Role ${DB_USER} already exists';
|
||||
END IF;
|
||||
END \$\$;
|
||||
|
||||
SELECT 'CREATE DATABASE ${DB_NAME} OWNER ${DB_USER}'
|
||||
WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = '${DB_NAME}')\gexec
|
||||
|
||||
GRANT ALL PRIVILEGES ON DATABASE ${DB_NAME} TO ${DB_USER};
|
||||
EOF
|
||||
|
||||
echo "Database setup complete. Running migrations..."
|
||||
|
||||
# Run Alembic migrations
|
||||
alembic upgrade head
|
||||
|
||||
echo "Migrations complete."
|
||||
29
deploy/stock-data-backend.service
Normal file
29
deploy/stock-data-backend.service
Normal file
@@ -0,0 +1,29 @@
|
||||
# systemd service for stock-data-backend
|
||||
#
|
||||
# Installation:
|
||||
# sudo cp deploy/stock-data-backend.service /etc/systemd/system/
|
||||
# sudo systemctl daemon-reload
|
||||
# sudo systemctl enable stock-data-backend
|
||||
# sudo systemctl start stock-data-backend
|
||||
#
|
||||
# Customize:
|
||||
# - User/Group: create with `sudo useradd -r -s /usr/sbin/nologin stockdata`
|
||||
# - WorkingDirectory: adjust if installed elsewhere
|
||||
# - EnvironmentFile: ensure .env exists at the specified path
|
||||
|
||||
[Unit]
|
||||
Description=Stock Data Backend
|
||||
After=network.target postgresql.service
|
||||
|
||||
[Service]
|
||||
Type=exec
|
||||
User=stockdata
|
||||
Group=stockdata
|
||||
WorkingDirectory=/opt/stock-data-backend
|
||||
EnvironmentFile=/opt/stock-data-backend/.env
|
||||
ExecStart=/opt/stock-data-backend/.venv/bin/uvicorn app.main:app --host 127.0.0.1 --port 8000 --workers 1
|
||||
Restart=always
|
||||
RestartSec=5
|
||||
|
||||
[Install]
|
||||
WantedBy=multi-user.target
|
||||
15
frontend/index.html
Normal file
15
frontend/index.html
Normal file
@@ -0,0 +1,15 @@
|
||||
<!doctype html>
|
||||
<html lang="en" class="dark">
|
||||
<head>
|
||||
<meta charset="UTF-8" />
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
|
||||
<title>Signal Dashboard</title>
|
||||
<link rel="preconnect" href="https://fonts.googleapis.com" />
|
||||
<link rel="preconnect" href="https://fonts.gstatic.com" crossorigin />
|
||||
<link href="https://fonts.googleapis.com/css2?family=Inter:wght@300;400;500;600;700&display=swap" rel="stylesheet" />
|
||||
</head>
|
||||
<body class="bg-[#0a0e1a] text-gray-100 font-sans">
|
||||
<div id="root"></div>
|
||||
<script type="module" src="/src/main.tsx"></script>
|
||||
</body>
|
||||
</html>
|
||||
3392
frontend/package-lock.json
generated
Normal file
3392
frontend/package-lock.json
generated
Normal file
File diff suppressed because it is too large
Load Diff
32
frontend/package.json
Normal file
32
frontend/package.json
Normal file
@@ -0,0 +1,32 @@
|
||||
{
|
||||
"name": "signal-dashboard",
|
||||
"private": true,
|
||||
"version": "0.1.0",
|
||||
"type": "module",
|
||||
"scripts": {
|
||||
"dev": "vite",
|
||||
"build": "tsc -b && vite build",
|
||||
"preview": "vite preview",
|
||||
"test": "vitest --run",
|
||||
"test:watch": "vitest"
|
||||
},
|
||||
"dependencies": {
|
||||
"react": "^18.3.1",
|
||||
"react-dom": "^18.3.1",
|
||||
"react-router-dom": "^6.28.0",
|
||||
"@tanstack/react-query": "^5.62.0",
|
||||
"zustand": "^5.0.1",
|
||||
"axios": "^1.7.9",
|
||||
"recharts": "^2.14.1"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@types/react": "^18.3.12",
|
||||
"@types/react-dom": "^18.3.1",
|
||||
"@vitejs/plugin-react": "^4.3.4",
|
||||
"typescript": "~5.6.3",
|
||||
"vite": "^5.4.11",
|
||||
"tailwindcss": "^3.4.16",
|
||||
"postcss": "^8.4.49",
|
||||
"autoprefixer": "^10.4.20"
|
||||
}
|
||||
}
|
||||
6
frontend/postcss.config.js
Normal file
6
frontend/postcss.config.js
Normal file
@@ -0,0 +1,6 @@
|
||||
export default {
|
||||
plugins: {
|
||||
tailwindcss: {},
|
||||
autoprefixer: {},
|
||||
},
|
||||
};
|
||||
31
frontend/src/App.tsx
Normal file
31
frontend/src/App.tsx
Normal file
@@ -0,0 +1,31 @@
|
||||
import { Routes, Route, Navigate } from 'react-router-dom';
|
||||
import ProtectedRoute from './components/auth/ProtectedRoute';
|
||||
import AppShell from './components/layout/AppShell';
|
||||
import LoginPage from './pages/LoginPage';
|
||||
import RegisterPage from './pages/RegisterPage';
|
||||
import WatchlistPage from './pages/WatchlistPage';
|
||||
import TickerDetailPage from './pages/TickerDetailPage';
|
||||
import ScannerPage from './pages/ScannerPage';
|
||||
import RankingsPage from './pages/RankingsPage';
|
||||
import AdminPage from './pages/AdminPage';
|
||||
|
||||
export default function App() {
|
||||
return (
|
||||
<Routes>
|
||||
<Route path="/login" element={<LoginPage />} />
|
||||
<Route path="/register" element={<RegisterPage />} />
|
||||
<Route element={<ProtectedRoute />}>
|
||||
<Route element={<AppShell />}>
|
||||
<Route path="/" element={<Navigate to="/watchlist" />} />
|
||||
<Route path="/watchlist" element={<WatchlistPage />} />
|
||||
<Route path="/ticker/:symbol" element={<TickerDetailPage />} />
|
||||
<Route path="/scanner" element={<ScannerPage />} />
|
||||
<Route path="/rankings" element={<RankingsPage />} />
|
||||
<Route element={<ProtectedRoute requireAdmin />}>
|
||||
<Route path="/admin" element={<AdminPage />} />
|
||||
</Route>
|
||||
</Route>
|
||||
</Route>
|
||||
</Routes>
|
||||
);
|
||||
}
|
||||
83
frontend/src/api/admin.ts
Normal file
83
frontend/src/api/admin.ts
Normal file
@@ -0,0 +1,83 @@
|
||||
import apiClient from './client';
|
||||
import type { AdminUser, SystemSetting } from '../lib/types';
|
||||
|
||||
// Users
|
||||
export function listUsers() {
|
||||
return apiClient.get<AdminUser[]>('admin/users').then((r) => r.data);
|
||||
}
|
||||
|
||||
export function createUser(data: {
|
||||
username: string;
|
||||
password: string;
|
||||
role: string;
|
||||
has_access: boolean;
|
||||
}) {
|
||||
return apiClient.post<AdminUser>('admin/users', data).then((r) => r.data);
|
||||
}
|
||||
|
||||
export function updateAccess(userId: number, hasAccess: boolean) {
|
||||
return apiClient
|
||||
.put<{ message: string }>(`admin/users/${userId}/access`, {
|
||||
has_access: hasAccess,
|
||||
})
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
export function resetPassword(userId: number, password: string) {
|
||||
return apiClient
|
||||
.put<{ message: string }>(`admin/users/${userId}/password`, { password })
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
// Settings
|
||||
export function listSettings() {
|
||||
return apiClient
|
||||
.get<SystemSetting[]>('admin/settings')
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
export function updateSetting(key: string, value: string) {
|
||||
return apiClient
|
||||
.put<{ message: string }>(`admin/settings/${key}`, { value })
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
export function updateRegistration(enabled: boolean) {
|
||||
return apiClient
|
||||
.put<{ message: string }>('admin/settings/registration', { enabled })
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
// Jobs
|
||||
export interface JobStatus {
|
||||
name: string;
|
||||
label: string;
|
||||
enabled: boolean;
|
||||
next_run_at: string | null;
|
||||
registered: boolean;
|
||||
}
|
||||
|
||||
export function listJobs() {
|
||||
return apiClient.get<JobStatus[]>('admin/jobs').then((r) => r.data);
|
||||
}
|
||||
|
||||
export function toggleJob(jobName: string, enabled: boolean) {
|
||||
return apiClient
|
||||
.put<{ message: string }>(`admin/jobs/${jobName}/toggle`, { enabled })
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
export function triggerJob(jobName: string) {
|
||||
return apiClient
|
||||
.post<{ message: string }>(`admin/jobs/${jobName}/trigger`)
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
// Data cleanup
|
||||
export function cleanupData(olderThanDays: number) {
|
||||
return apiClient
|
||||
.post<{ message: string }>('admin/data/cleanup', {
|
||||
older_than_days: olderThanDays,
|
||||
})
|
||||
.then((r) => r.data);
|
||||
}
|
||||
14
frontend/src/api/auth.ts
Normal file
14
frontend/src/api/auth.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import apiClient from './client';
|
||||
import type { TokenResponse } from '../lib/types';
|
||||
|
||||
export function login(username: string, password: string) {
|
||||
return apiClient
|
||||
.post<TokenResponse>('auth/login', { username, password })
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
export function register(username: string, password: string) {
|
||||
return apiClient
|
||||
.post<{ message: string }>('auth/register', { username, password })
|
||||
.then((r) => r.data);
|
||||
}
|
||||
69
frontend/src/api/client.ts
Normal file
69
frontend/src/api/client.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import axios from 'axios';
|
||||
import type { APIEnvelope } from '../lib/types';
|
||||
import { useAuthStore } from '../stores/authStore';
|
||||
|
||||
/**
|
||||
* Typed error class for API errors, providing structured error handling
|
||||
* across the application.
|
||||
*/
|
||||
export class ApiError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'ApiError';
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Central Axios instance configured for the Stock Data Backend API.
|
||||
* - Base URL: /api/v1/
|
||||
* - Timeout: 30 seconds
|
||||
* - JSON content type
|
||||
*/
|
||||
const apiClient = axios.create({
|
||||
baseURL: '/api/v1/',
|
||||
timeout: 30_000,
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
});
|
||||
|
||||
/**
|
||||
* Request interceptor: attaches JWT Bearer token from the auth store
|
||||
* to every outgoing request when a token is available.
|
||||
*/
|
||||
apiClient.interceptors.request.use((config) => {
|
||||
const token = useAuthStore.getState().token;
|
||||
if (token) {
|
||||
config.headers.Authorization = `Bearer ${token}`;
|
||||
}
|
||||
return config;
|
||||
});
|
||||
|
||||
/**
|
||||
* Response interceptor:
|
||||
* - Success path: unwraps the { status, data, error } envelope, returning
|
||||
* only the `data` field. Throws ApiError if envelope status is 'error'.
|
||||
* - Error path: handles 401 by clearing auth and redirecting to login.
|
||||
* All other errors are wrapped in ApiError with a descriptive message.
|
||||
*/
|
||||
apiClient.interceptors.response.use(
|
||||
(response) => {
|
||||
const envelope = response.data as APIEnvelope;
|
||||
if (envelope.status === 'error') {
|
||||
throw new ApiError(envelope.error ?? 'Unknown API error');
|
||||
}
|
||||
// Return unwrapped data — callers receive the inner payload directly.
|
||||
// We override the response shape here; downstream API functions cast as needed.
|
||||
response.data = envelope.data;
|
||||
return response;
|
||||
},
|
||||
(error) => {
|
||||
if (axios.isAxiosError(error) && error.response?.status === 401) {
|
||||
useAuthStore.getState().logout();
|
||||
window.location.href = '/login';
|
||||
}
|
||||
const msg =
|
||||
error.response?.data?.error ?? error.message ?? 'Network error';
|
||||
throw new ApiError(msg);
|
||||
},
|
||||
);
|
||||
|
||||
export default apiClient;
|
||||
8
frontend/src/api/fundamentals.ts
Normal file
8
frontend/src/api/fundamentals.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import apiClient from './client';
|
||||
import type { FundamentalResponse } from '../lib/types';
|
||||
|
||||
export function getFundamentals(symbol: string) {
|
||||
return apiClient
|
||||
.get<FundamentalResponse>(`fundamentals/${symbol}`)
|
||||
.then((r) => r.data);
|
||||
}
|
||||
5
frontend/src/api/health.ts
Normal file
5
frontend/src/api/health.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
import apiClient from './client';
|
||||
|
||||
export function check() {
|
||||
return apiClient.get<{ status: string }>('health').then((r) => r.data);
|
||||
}
|
||||
24
frontend/src/api/indicators.ts
Normal file
24
frontend/src/api/indicators.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
import apiClient from './client';
|
||||
import type { IndicatorResult, EMACrossResult } from '../lib/types';
|
||||
|
||||
interface IndicatorEnvelopeData {
|
||||
symbol: string;
|
||||
indicator: IndicatorResult;
|
||||
}
|
||||
|
||||
interface EMACrossEnvelopeData {
|
||||
symbol: string;
|
||||
ema_cross: EMACrossResult;
|
||||
}
|
||||
|
||||
export function getIndicator(symbol: string, indicatorType: string) {
|
||||
return apiClient
|
||||
.get<IndicatorEnvelopeData>(`indicators/${symbol}/${indicatorType}`)
|
||||
.then((r) => (r.data as unknown as IndicatorEnvelopeData).indicator);
|
||||
}
|
||||
|
||||
export function getEMACross(symbol: string) {
|
||||
return apiClient
|
||||
.get<EMACrossEnvelopeData>(`indicators/${symbol}/ema-cross`)
|
||||
.then((r) => (r.data as unknown as EMACrossEnvelopeData).ema_cross);
|
||||
}
|
||||
7
frontend/src/api/ingestion.ts
Normal file
7
frontend/src/api/ingestion.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
import apiClient from './client';
|
||||
|
||||
export function fetchData(symbol: string) {
|
||||
return apiClient
|
||||
.post<{ message: string }>(`ingestion/fetch/${symbol}`)
|
||||
.then((r) => r.data);
|
||||
}
|
||||
6
frontend/src/api/ohlcv.ts
Normal file
6
frontend/src/api/ohlcv.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import apiClient from './client';
|
||||
import type { OHLCVBar } from '../lib/types';
|
||||
|
||||
export function getOHLCV(symbol: string) {
|
||||
return apiClient.get<OHLCVBar[]>(`ohlcv/${symbol}`).then((r) => r.data);
|
||||
}
|
||||
18
frontend/src/api/scores.ts
Normal file
18
frontend/src/api/scores.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import apiClient from './client';
|
||||
import type { ScoreResponse, RankingsResponse } from '../lib/types';
|
||||
|
||||
export function getScores(symbol: string) {
|
||||
return apiClient
|
||||
.get<ScoreResponse>(`scores/${symbol}`)
|
||||
.then((r) => r.data);
|
||||
}
|
||||
|
||||
export function getRankings() {
|
||||
return apiClient.get<RankingsResponse>('rankings').then((r) => r.data);
|
||||
}
|
||||
|
||||
export function updateWeights(weights: Record<string, number>) {
|
||||
return apiClient
|
||||
.put<{ message: string }>('scores/weights', weights)
|
||||
.then((r) => r.data);
|
||||
}
|
||||
8
frontend/src/api/sentiment.ts
Normal file
8
frontend/src/api/sentiment.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import apiClient from './client';
|
||||
import type { SentimentResponse } from '../lib/types';
|
||||
|
||||
export function getSentiment(symbol: string) {
|
||||
return apiClient
|
||||
.get<SentimentResponse>(`sentiment/${symbol}`)
|
||||
.then((r) => r.data);
|
||||
}
|
||||
8
frontend/src/api/sr-levels.ts
Normal file
8
frontend/src/api/sr-levels.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
import apiClient from './client';
|
||||
import type { SRLevelResponse } from '../lib/types';
|
||||
|
||||
export function getLevels(symbol: string) {
|
||||
return apiClient
|
||||
.get<SRLevelResponse>(`sr-levels/${symbol}`)
|
||||
.then((r) => r.data);
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user