commit 61ab24490d850f606ee45e594725317270fe0d25 Author: Dennis Thiessen Date: Fri Feb 20 17:31:01 2026 +0100 first commit diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..e6cee4c --- /dev/null +++ b/.env.example @@ -0,0 +1,34 @@ +# Database +DATABASE_URL=postgresql+asyncpg://stock_backend:changeme@localhost:5432/stock_data_backend + +# Auth +JWT_SECRET=change-this-to-a-random-secret +JWT_EXPIRY_MINUTES=60 + +# OHLCV Provider — Alpaca Markets +ALPACA_API_KEY= +ALPACA_API_SECRET= + +# Sentiment Provider — Gemini with Search Grounding +GEMINI_API_KEY= +GEMINI_MODEL=gemini-2.0-flash + +# Fundamentals Provider — Financial Modeling Prep +FMP_API_KEY= + +# Scheduled Jobs +DATA_COLLECTOR_FREQUENCY=daily +SENTIMENT_POLL_INTERVAL_MINUTES=30 +FUNDAMENTAL_FETCH_FREQUENCY=daily +RR_SCAN_FREQUENCY=daily + +# Scoring Defaults +DEFAULT_WATCHLIST_AUTO_SIZE=10 +DEFAULT_RR_THRESHOLD=3.0 + +# Database Pool +DB_POOL_SIZE=5 +DB_POOL_TIMEOUT=30 + +# Logging +LOG_LEVEL=INFO diff --git a/.gitea/workflows/deploy.yml b/.gitea/workflows/deploy.yml new file mode 100644 index 0000000..ac51152 --- /dev/null +++ b/.gitea/workflows/deploy.yml @@ -0,0 +1,68 @@ +# Gitea Actions CI/CD pipeline: lint → test → deploy +# Triggers on push to main branch. +# +# Required secrets (set in Gitea repo settings): +# DEPLOY_HOST — server IP or hostname +# DEPLOY_USER — SSH username on the server +# DEPLOY_KEY — SSH private key for deployment + +name: Deploy + +on: + push: + branches: [main] + +jobs: + lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.12" + - run: pip install ruff + - run: ruff check app/ + + test: + needs: lint + runs-on: ubuntu-latest + services: + postgres: + image: postgres:16 + env: + POSTGRES_DB: test_db + POSTGRES_USER: test_user + POSTGRES_PASSWORD: test_pass + ports: + - 5432:5432 + steps: + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 + with: + python-version: "3.12" + - run: pip install -e ".[dev]" + - run: alembic upgrade head + env: + DATABASE_URL: postgresql+asyncpg://test_user:test_pass@localhost:5432/test_db + - run: pytest --tb=short + env: + DATABASE_URL: postgresql+asyncpg://test_user:test_pass@localhost:5432/test_db + + deploy: + needs: test + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Deploy via SSH + uses: appleboy/ssh-action@v1 + with: + host: ${{ secrets.DEPLOY_HOST }} + username: ${{ secrets.DEPLOY_USER }} + key: ${{ secrets.DEPLOY_KEY }} + script: | + cd /opt/stock-data-backend + git pull origin main + source .venv/bin/activate + pip install -e . + alembic upgrade head + sudo systemctl restart stock-data-backend diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..2ac3889 --- /dev/null +++ b/.gitignore @@ -0,0 +1,32 @@ +# Python +__pycache__/ +*.py[cod] +*.egg-info/ +*.egg +dist/ +build/ +.venv/ +.env + +# Hypothesis (PBT) +.hypothesis/ + +# Pytest +.pytest_cache/ + +# IDE +.vscode/ +.idea/ +*.swp +*.swo + +# OS +.DS_Store +Thumbs.db + +# Frontend +frontend/node_modules/ +frontend/dist/ + +# Alembic +alembic/versions/__pycache__/ diff --git a/.kiro/specs/signal-dashboard/.config.kiro b/.kiro/specs/signal-dashboard/.config.kiro new file mode 100644 index 0000000..e2880dd --- /dev/null +++ b/.kiro/specs/signal-dashboard/.config.kiro @@ -0,0 +1 @@ +{"specId": "fa730cf4-a14d-4f62-8993-fd7db6fe25cc", "workflowType": "requirements-first", "specType": "feature"} \ No newline at end of file diff --git a/.kiro/specs/signal-dashboard/design.md b/.kiro/specs/signal-dashboard/design.md new file mode 100644 index 0000000..13a1e08 --- /dev/null +++ b/.kiro/specs/signal-dashboard/design.md @@ -0,0 +1,645 @@ +# Design Document: Signal Dashboard + +## Overview + +Signal Dashboard is a React 18 + TypeScript SPA that consumes the existing Stock Data Backend REST API (`/api/v1/`). It provides authenticated users with views for watchlist monitoring, per-ticker analysis, trade setup scanning, composite-score rankings, and admin management. + +The frontend lives in `frontend/` within the existing project root. Vite builds static assets to `frontend/dist/`, which Nginx serves on `signal.thiessen.io`. API requests to `/api/v1/` are proxied to the FastAPI backend — no CORS needed. + +### Key Technical Decisions + +| Decision | Choice | Rationale | +|---|---|---| +| Build tool | Vite 5 | Fast HMR, native TS/React support, small output | +| Routing | React Router v6 | Standard, supports layout routes and guards | +| Server state | TanStack Query v5 | Caching, deduplication, background refetch | +| Client state | Zustand | Minimal auth store, no boilerplate | +| Styling | Tailwind CSS v3 | Utility-first, dark mode built-in, small bundle | +| Charts | Recharts | React-native charting, composable, lightweight | +| HTTP | Axios | Interceptors for auth/envelope unwrapping | +| Testing | Vitest + React Testing Library + fast-check | Vite-native test runner, property-based testing | + +## Architecture + +```mermaid +graph TD + subgraph Browser + Router[React Router] + Pages[Page Components] + Hooks[TanStack Query Hooks] + Store[Zustand Auth Store] + API[API Client - Axios] + end + + Router --> Pages + Pages --> Hooks + Hooks --> API + API --> Store + Store --> API + + subgraph Server + Nginx[Nginx - static files + proxy] + Backend[FastAPI Backend] + end + + API -->|/api/v1/*| Nginx + Nginx -->|proxy_pass| Backend + Nginx -->|static| Browser +``` + +### Request Flow + +1. Component mounts → calls a TanStack Query hook (e.g., `useWatchlist()`) +2. Hook calls an API client function (e.g., `api.watchlist.list()`) +3. Axios sends request with JWT Bearer header from Zustand store +4. Axios response interceptor unwraps `{ status, data, error }` envelope +5. On 401 → Zustand clears token, React Router redirects to `/login` +6. TanStack Query caches the result, component renders data + +### Directory Structure + +``` +frontend/ +├── index.html +├── package.json +├── tsconfig.json +├── vite.config.ts +├── tailwind.config.ts +├── postcss.config.js +├── src/ +│ ├── main.tsx # App entry, providers +│ ├── App.tsx # Router + layout +│ ├── api/ +│ │ ├── client.ts # Axios instance, interceptors +│ │ ├── auth.ts # login, register +│ │ ├── watchlist.ts # watchlist CRUD +│ │ ├── tickers.ts # ticker CRUD +│ │ ├── scores.ts # scores, rankings, weights +│ │ ├── trades.ts # trade setups +│ │ ├── ohlcv.ts # OHLCV data +│ │ ├── indicators.ts # technical indicators +│ │ ├── sr-levels.ts # support/resistance +│ │ ├── sentiment.ts # sentiment data +│ │ ├── fundamentals.ts # fundamental data +│ │ ├── ingestion.ts # manual data fetch +│ │ ├── admin.ts # admin endpoints +│ │ └── health.ts # health check +│ ├── hooks/ +│ │ ├── useAuth.ts # login/register/logout mutations +│ │ ├── useWatchlist.ts # watchlist queries + mutations +│ │ ├── useTickers.ts # ticker queries + mutations +│ │ ├── useScores.ts # scores, rankings queries +│ │ ├── useTrades.ts # trade setup queries +│ │ ├── useTickerDetail.ts # parallel queries for detail view +│ │ └── useAdmin.ts # admin queries + mutations +│ ├── stores/ +│ │ └── authStore.ts # Zustand: token, user, role +│ ├── pages/ +│ │ ├── LoginPage.tsx +│ │ ├── RegisterPage.tsx +│ │ ├── WatchlistPage.tsx +│ │ ├── TickerDetailPage.tsx +│ │ ├── ScannerPage.tsx +│ │ ├── RankingsPage.tsx +│ │ └── AdminPage.tsx +│ ├── components/ +│ │ ├── layout/ +│ │ │ ├── AppShell.tsx # Sidebar + main content +│ │ │ ├── Sidebar.tsx +│ │ │ └── MobileNav.tsx +│ │ ├── auth/ +│ │ │ └── ProtectedRoute.tsx +│ │ ├── charts/ +│ │ │ └── CandlestickChart.tsx +│ │ ├── ui/ +│ │ │ ├── ScoreCard.tsx +│ │ │ ├── Toast.tsx +│ │ │ ├── Skeleton.tsx +│ │ │ ├── Badge.tsx +│ │ │ └── ConfirmDialog.tsx +│ │ ├── watchlist/ +│ │ │ ├── WatchlistTable.tsx +│ │ │ └── AddTickerForm.tsx +│ │ ├── scanner/ +│ │ │ └── TradeTable.tsx +│ │ ├── rankings/ +│ │ │ ├── RankingsTable.tsx +│ │ │ └── WeightsForm.tsx +│ │ ├── ticker/ +│ │ │ ├── SentimentPanel.tsx +│ │ │ ├── FundamentalsPanel.tsx +│ │ │ ├── IndicatorSelector.tsx +│ │ │ └── SROverlay.tsx +│ │ └── admin/ +│ │ ├── UserTable.tsx +│ │ ├── SettingsForm.tsx +│ │ ├── JobControls.tsx +│ │ └── DataCleanup.tsx +│ ├── lib/ +│ │ ├── format.ts # Number/date formatting utilities +│ │ └── types.ts # Shared TypeScript interfaces +│ └── styles/ +│ └── globals.css # Tailwind directives + custom vars +└── tests/ + ├── unit/ + └── property/ +``` + +## Components and Interfaces + +### API Client (`src/api/client.ts`) + +Central Axios instance with interceptors: + +```typescript +// Axios instance configuration +const apiClient = axios.create({ + baseURL: '/api/v1/', + timeout: 30_000, + headers: { 'Content-Type': 'application/json' }, +}); + +// Request interceptor: attach JWT +apiClient.interceptors.request.use((config) => { + const token = useAuthStore.getState().token; + if (token) config.headers.Authorization = `Bearer ${token}`; + return config; +}); + +// Response interceptor: unwrap envelope, handle 401 +apiClient.interceptors.response.use( + (response) => { + const envelope = response.data as APIEnvelope; + if (envelope.status === 'error') throw new ApiError(envelope.error); + return envelope.data; + }, + (error) => { + if (error.response?.status === 401) { + useAuthStore.getState().logout(); + } + const msg = error.response?.data?.error ?? error.message ?? 'Network error'; + throw new ApiError(msg); + } +); +``` + +### Auth Store (`src/stores/authStore.ts`) + +```typescript +interface AuthState { + token: string | null; + username: string | null; + role: 'admin' | 'user' | null; + login: (token: string) => void; + logout: () => void; +} +``` + +- `login()` decodes the JWT payload to extract `sub` (username) and `role`, stores token in `localStorage` +- `logout()` clears token from state and `localStorage`, TanStack Query cache is cleared on logout + +### Protected Route (`src/components/auth/ProtectedRoute.tsx`) + +```typescript +// Wraps routes that require authentication +// Props: requireAdmin?: boolean +// If no token → redirect to /login +// If requireAdmin && role !== 'admin' → redirect to /watchlist +``` + +### Router Layout + +```typescript +// Route structure + + } /> + } /> + }> + }> + } /> + } /> + } /> + } /> + } /> + }> + } /> + + + + +``` + +### TanStack Query Hooks Pattern + +Each domain has a hook file that exports query/mutation hooks: + +```typescript +// Example: useWatchlist.ts +export function useWatchlist() { + return useQuery({ + queryKey: ['watchlist'], + queryFn: () => api.watchlist.list(), + }); +} + +export function useAddToWatchlist() { + const qc = useQueryClient(); + return useMutation({ + mutationFn: (symbol: string) => api.watchlist.add(symbol), + onSuccess: () => qc.invalidateQueries({ queryKey: ['watchlist'] }), + }); +} +``` + +### Key UI Components + +**ScoreCard**: Displays composite score with a colored ring/bar (green > 70, yellow 40-70, red < 40) and expandable dimension breakdown. + +**CandlestickChart**: Recharts `ComposedChart` with custom `Bar` shapes for OHLCV candles. S/R levels rendered as `ReferenceLine` components with color coding (green = support, red = resistance). + +**Toast System**: Lightweight toast using React context + portal. Auto-dismiss after 4 seconds. Error toasts in red, success in green. + +**Skeleton**: Tailwind `animate-pulse` placeholder blocks matching the shape of cards/tables during loading states. + +### Formatting Utilities (`src/lib/format.ts`) + +```typescript +formatPrice(n: number): string // "1,234.56" +formatPercent(n: number): string // "12.34%" +formatLargeNumber(n: number): string // "1.23B", "456.7M", "12.3K" +formatDate(d: string): string // "Jan 15, 2025" +formatDateTime(d: string): string // "Jan 15, 2025 2:30 PM" +``` + +## Data Models + +### TypeScript Interfaces (`src/lib/types.ts`) + +```typescript +// API envelope (before unwrapping) +interface APIEnvelope { + status: 'success' | 'error'; + data: T | null; + error: string | null; +} + +// Auth +interface TokenResponse { + access_token: string; + token_type: string; +} + +// Watchlist +interface WatchlistEntry { + symbol: string; + entry_type: 'auto' | 'manual'; + composite_score: number | null; + dimensions: DimensionScore[]; + rr_ratio: number | null; + rr_direction: string | null; + sr_levels: SRLevelSummary[]; + added_at: string; +} + +interface DimensionScore { + dimension: string; + score: number; +} + +interface SRLevelSummary { + price_level: number; + type: 'support' | 'resistance'; + strength: number; +} + +// OHLCV +interface OHLCVBar { + id: number; + ticker_id: number; + date: string; + open: number; + high: number; + low: number; + close: number; + volume: number; + created_at: string; +} + +// Scores +interface ScoreResponse { + symbol: string; + composite_score: number | null; + composite_stale: boolean; + weights: Record; + dimensions: DimensionScoreDetail[]; + missing_dimensions: string[]; + computed_at: string | null; +} + +interface DimensionScoreDetail { + dimension: string; + score: number; + is_stale: boolean; + computed_at: string | null; +} + +interface RankingEntry { + symbol: string; + composite_score: number; + dimensions: DimensionScoreDetail[]; +} + +interface RankingsResponse { + rankings: RankingEntry[]; + weights: Record; +} + +// Trade Setups +interface TradeSetup { + id: number; + symbol: string; + direction: string; + entry_price: number; + stop_loss: number; + target: number; + rr_ratio: number; + composite_score: number; + detected_at: string; +} + +// S/R Levels +interface SRLevel { + id: number; + price_level: number; + type: 'support' | 'resistance'; + strength: number; + detection_method: string; + created_at: string; +} + +interface SRLevelResponse { + symbol: string; + levels: SRLevel[]; + count: number; +} + +// Sentiment +interface SentimentScore { + id: number; + classification: 'bullish' | 'bearish' | 'neutral'; + confidence: number; + source: string; + timestamp: string; +} + +interface SentimentResponse { + symbol: string; + scores: SentimentScore[]; + count: number; + dimension_score: number | null; + lookback_hours: number; +} + +// Fundamentals +interface FundamentalResponse { + symbol: string; + pe_ratio: number | null; + revenue_growth: number | null; + earnings_surprise: number | null; + market_cap: number | null; + fetched_at: string | null; +} + +// Indicators +interface IndicatorResult { + indicator_type: string; + values: Record; + score: number; + bars_used: number; +} + +interface EMACrossResult { + short_ema: number; + long_ema: number; + short_period: number; + long_period: number; + signal: 'bullish' | 'bearish' | 'neutral'; +} + +// Tickers +interface Ticker { + id: number; + symbol: string; + created_at: string; +} + +// Admin +interface AdminUser { + id: number; + username: string; + role: string; + has_access: boolean; + created_at: string | null; + updated_at: string | null; +} + +interface SystemSetting { + key: string; + value: string; + updated_at: string | null; +} +``` + + +## Correctness Properties + +*A property is a characteristic or behavior that should hold true across all valid executions of a system — essentially, a formal statement about what the system should do. Properties serve as the bridge between human-readable specifications and machine-verifiable correctness guarantees.* + +### Property 1: Token storage round-trip + +*For any* valid JWT token string, storing it via `authStore.login(token)` and then reading `authStore.token` and `localStorage.getItem('token')` should both return the original token string. + +**Validates: Requirements 1.1, 1.6** + +### Property 2: Bearer token attachment + +*For any* non-null token in the auth store, every request made through the API client should include an `Authorization` header with value `Bearer {token}`. + +**Validates: Requirements 1.3, 12.3** + +### Property 3: Registration form validation + +*For any* username string shorter than 1 character or password string shorter than 6 characters, the registration form should reject submission. *For any* username of length >= 1 and password of length >= 6, the form should allow submission. + +**Validates: Requirements 1.2** + +### Property 4: Route protection based on auth state + +*For any* protected route path, if no token exists in the auth store, navigation should redirect to `/login`. If a valid token exists, navigation should render the protected component. + +**Validates: Requirements 2.1, 2.2** + +### Property 5: API envelope unwrapping + +*For any* API response with `status: "success"`, the API client should return the `data` field. *For any* API response with `status: "error"`, the API client should throw an error containing the `error` field message. + +**Validates: Requirements 12.2** + +### Property 6: Watchlist entry rendering completeness + +*For any* watchlist entry, the rendered output should contain the symbol, entry type (with a visual badge distinguishing "auto" from "manual"), composite score, dimension scores, R:R ratio, R:R direction, and S/R levels. + +**Validates: Requirements 3.2, 3.7** + +### Property 7: Symbol click navigation + +*For any* symbol displayed in the watchlist table, scanner table, or rankings table, clicking that symbol should trigger navigation to `/ticker/{symbol}`. + +**Validates: Requirements 3.6, 5.6, 6.4** + +### Property 8: Score card rendering + +*For any* score response with a composite score and dimension scores, the ScoreCard component should render the composite score value and one entry per dimension with its name and score. + +**Validates: Requirements 4.4** + +### Property 9: Sentiment panel rendering + +*For any* sentiment response, the rendered SentimentPanel should display the classification, confidence value, and dimension score. + +**Validates: Requirements 4.5** + +### Property 10: Fundamentals panel rendering + +*For any* fundamentals response, the rendered FundamentalsPanel should display P/E ratio, revenue growth, earnings surprise, and market cap (or a placeholder for null values). + +**Validates: Requirements 4.6** + +### Property 11: Trade setup rendering + +*For any* trade setup, the rendered table row should contain the symbol, direction, entry price, stop loss, target, R:R ratio, composite score, and detection timestamp. + +**Validates: Requirements 5.2** + +### Property 12: Scanner filtering + +*For any* list of trade setups, minimum R:R filter value, and direction filter selection: all displayed setups should have `rr_ratio >= minRR` and (if direction is not "both") `direction === selectedDirection`. + +**Validates: Requirements 5.3, 5.4** + +### Property 13: Scanner sorting + +*For any* list of trade setups and a selected sort column, the displayed rows should be ordered by that column's values (ascending or descending based on sort direction). + +**Validates: Requirements 5.5** + +### Property 14: Rankings display order + +*For any* rankings response, the rendered list should display entries in descending order by composite score, with each entry showing rank position, symbol, composite score, and all dimension scores. + +**Validates: Requirements 6.1, 6.2** + +### Property 15: Admin user table rendering + +*For any* admin user record, the rendered table row should contain the username, role, and access status. + +**Validates: Requirements 7.2** + +### Property 16: Number formatting + +*For any* finite number, `formatPrice` should produce a string with exactly 2 decimal places. `formatPercent` should produce a string ending with `%`. `formatLargeNumber` should produce a string with an appropriate suffix (`K` for thousands, `M` for millions, `B` for billions) for values >= 1000, and no suffix for smaller values. + +**Validates: Requirements 13.4** + +### Property 17: Weights form rendering + +*For any* weights map (dimension name → number), the WeightsForm should render one labeled numeric input per dimension key. + +**Validates: Requirements 11.1** + +## Error Handling + +### API Client Error Strategy + +All errors flow through the Axios response interceptor and are surfaced via the Toast system: + +| Error Type | Detection | Behavior | +|---|---|---| +| 401 Unauthorized | `error.response.status === 401` | Clear auth store, redirect to `/login` | +| API error envelope | `envelope.status === 'error'` | Throw `ApiError` with `envelope.error` message | +| Network error | No `error.response` | Throw `ApiError` with "Network error — check your connection" | +| Timeout | Axios timeout (30s) | Throw `ApiError` with "Request timed out" | +| Unknown | Catch-all | Throw `ApiError` with `error.message` fallback | + +### Component-Level Error Handling + +- **TanStack Query `onError`**: Each mutation hook passes errors to the toast system +- **Query error states**: Components check `isError` and render inline error messages +- **Ticker Detail partial failure**: Each data section (scores, sentiment, fundamentals, S/R, OHLCV) is an independent query. If one fails, the others still render. Failed sections show an inline error with a retry button. +- **Form validation**: Client-side validation before API calls (username length, password length, numeric inputs). Invalid submissions are blocked with inline field errors. + +### Toast System + +```typescript +type ToastType = 'success' | 'error' | 'info'; + +interface Toast { + id: string; + type: ToastType; + message: string; +} + +// Auto-dismiss after 4 seconds +// Max 3 toasts visible at once (oldest dismissed first) +// Error toasts: red accent, Success: green accent, Info: blue accent +``` + +## Testing Strategy + +### Testing Stack + +| Tool | Purpose | +|---|---| +| Vitest | Test runner (Vite-native, fast) | +| React Testing Library | Component rendering + DOM queries | +| fast-check | Property-based testing | +| MSW (Mock Service Worker) | API mocking for integration tests | + +### Unit Tests + +Unit tests cover specific examples, edge cases, and integration points: + +- **Auth flow**: Login stores token, logout clears token, 401 triggers logout +- **API client**: Envelope unwrapping for success/error, timeout config, Bearer header +- **Routing**: Unauthenticated redirect, admin-only route guard, non-admin redirect +- **Component rendering**: Each page renders with mock data, loading skeletons appear, error states display +- **Form validation**: Empty username rejected, short password rejected, valid inputs accepted +- **Confirmation dialog**: Delete ticker shows confirm before API call +- **Partial failure**: Ticker detail renders available sections when one query fails + +### Property-Based Tests + +Each correctness property maps to a single `fast-check` property test with minimum 100 iterations. Tests are tagged with the property reference: + +```typescript +// Feature: signal-dashboard, Property 16: Number formatting +test.prop([fc.float({ min: -1e15, max: 1e15, noNaN: true })], (n) => { + const result = formatPrice(n); + expect(result).toMatch(/\.\d{2}$/); +}); +``` + +Property tests focus on: +- **Pure functions**: `format.ts` utilities (Property 16) +- **Store logic**: Auth store token round-trip (Property 1) +- **API client interceptors**: Envelope unwrapping (Property 5), Bearer attachment (Property 2) +- **Filtering/sorting logic**: Scanner filter functions (Properties 12, 13) +- **Component rendering**: Given generated data, components render required fields (Properties 6, 8–11, 14, 15, 17) +- **Routing guards**: Protected route behavior based on auth state (Property 4) + +### Test Configuration + +- Vitest config in `frontend/vitest.config.ts` with jsdom environment +- `fast-check` configured with `{ numRuns: 100 }` minimum per property +- MSW handlers for all API endpoints used in integration tests +- Each property test tagged: `Feature: signal-dashboard, Property {N}: {title}` diff --git a/.kiro/specs/signal-dashboard/requirements.md b/.kiro/specs/signal-dashboard/requirements.md new file mode 100644 index 0000000..0ee3372 --- /dev/null +++ b/.kiro/specs/signal-dashboard/requirements.md @@ -0,0 +1,200 @@ +# Requirements Document + +## Introduction + +Signal Dashboard is a single-page application (SPA) frontend for the Stock Data Backend API. The Dashboard provides authenticated users with a visual interface to monitor watchlists, analyze individual tickers across multiple dimensions (technical, S/R, sentiment, fundamentals, momentum), scan for asymmetric risk:reward trade setups, view composite-score rankings, and manage system administration. The Dashboard consumes the existing REST API at `/api/v1/` and is served as static files by Nginx on the same domain (`signal.thiessen.io`). + +Technology choice: React 18 + TypeScript + Vite, with TanStack Query for data fetching, Zustand for auth state, Tailwind CSS for styling, Recharts for charting, and React Router for navigation. This stack prioritizes maintainability, small bundle size, and a modern developer experience without framework bloat. + +## Glossary + +- **Dashboard**: The Signal Dashboard SPA frontend application +- **API_Client**: The HTTP client module that communicates with the backend REST API +- **Auth_Module**: The authentication subsystem handling login, registration, token storage, and token refresh +- **Watchlist_View**: The main overview page displaying the user's watchlist entries with enriched score data +- **Ticker_Detail_View**: The per-ticker analysis page showing price chart, indicators, S/R levels, sentiment, and fundamentals +- **Scanner_View**: The trade setup scanner page displaying R:R filtered setups +- **Rankings_View**: The page displaying all tickers sorted by composite score +- **Admin_Panel**: The administration interface for user management, job control, system settings, and data cleanup +- **Router**: The client-side routing module controlling navigation and access guards +- **Token_Store**: The client-side storage mechanism for JWT access tokens +- **Chart_Component**: The interactive price chart component rendering OHLCV candlestick data with overlays +- **Score_Card**: A UI component displaying a composite score and its dimension breakdown +- **Toast_System**: The notification subsystem displaying transient success/error messages to the user + +## Requirements + +### Requirement 1: JWT Authentication Flow + +**User Story:** As a user, I want to log in and register so that I can access the dashboard securely. + +#### Acceptance Criteria + +1. WHEN a user submits valid credentials on the login form, THE Auth_Module SHALL send a POST request to `/api/v1/auth/login` and store the returned JWT token in the Token_Store +2. WHEN a user submits a registration form with a username (minimum 1 character) and password (minimum 6 characters), THE Auth_Module SHALL send a POST request to `/api/v1/auth/register` and display a success message via the Toast_System +3. WHILE a valid JWT token exists in the Token_Store, THE API_Client SHALL include the token as a Bearer authorization header on all subsequent API requests +4. WHEN the API returns a 401 Unauthorized response, THE Auth_Module SHALL clear the Token_Store and redirect the user to the login page +5. IF the login or registration request fails, THEN THE Auth_Module SHALL display the error message from the API response via the Toast_System +6. THE Token_Store SHALL persist the JWT token in browser localStorage so that sessions survive page reloads + +### Requirement 2: Protected Routing and Role-Based Access + +**User Story:** As a user, I want the app to enforce access control so that unauthenticated users cannot access protected pages and only admins can access admin features. + +#### Acceptance Criteria + +1. WHILE no valid JWT token exists in the Token_Store, THE Router SHALL redirect navigation to any protected route to the login page +2. WHILE a valid JWT token exists in the Token_Store, THE Router SHALL allow navigation to protected routes (Watchlist_View, Ticker_Detail_View, Scanner_View, Rankings_View) +3. WHILE the authenticated user has an admin role, THE Router SHALL allow navigation to the Admin_Panel +4. WHILE the authenticated user has a non-admin role, THE Router SHALL redirect navigation to the Admin_Panel to the Watchlist_View +5. THE Router SHALL provide a navigation sidebar or top bar with links to all accessible views for the authenticated user + +### Requirement 3: Watchlist Overview + +**User Story:** As a user, I want to see my watchlist with composite scores, dimension breakdowns, and R:R ratios so that I can quickly assess my tracked tickers. + +#### Acceptance Criteria + +1. WHEN the Watchlist_View loads, THE Dashboard SHALL fetch data from `GET /api/v1/watchlist` and display each entry as a card or row +2. THE Watchlist_View SHALL display for each entry: symbol, entry type (auto/manual), composite score, dimension scores, R:R ratio, R:R direction, and nearest S/R levels +3. WHEN a user clicks the add-to-watchlist control and enters a valid ticker symbol, THE Dashboard SHALL send a POST request to `/api/v1/watchlist/{symbol}` and refresh the watchlist +4. WHEN a user clicks the remove button on a watchlist entry, THE Dashboard SHALL send a DELETE request to `/api/v1/watchlist/{symbol}` and remove the entry from the display +5. IF the watchlist API request fails, THEN THE Dashboard SHALL display the error message via the Toast_System +6. WHEN a user clicks on a watchlist entry symbol, THE Router SHALL navigate to the Ticker_Detail_View for that symbol +7. THE Watchlist_View SHALL visually distinguish auto-populated entries from manual entries using a badge or label + +### Requirement 4: Ticker Detail View + +**User Story:** As a user, I want to see a comprehensive analysis of a single ticker including price chart, indicators, S/R levels, sentiment, and fundamentals so that I can make informed decisions. + +#### Acceptance Criteria + +1. WHEN the Ticker_Detail_View loads for a given symbol, THE Dashboard SHALL fetch data in parallel from: `GET /api/v1/ohlcv/{symbol}`, `GET /api/v1/scores/{symbol}`, `GET /api/v1/sr-levels/{symbol}`, `GET /api/v1/sentiment/{symbol}`, and `GET /api/v1/fundamentals/{symbol}` +2. THE Chart_Component SHALL render OHLCV data as a candlestick chart with date on the x-axis and price on the y-axis +3. THE Chart_Component SHALL overlay S/R levels as horizontal lines on the price chart, color-coded by type (support in green, resistance in red) +4. THE Ticker_Detail_View SHALL display the composite score and all dimension scores using Score_Card components +5. THE Ticker_Detail_View SHALL display sentiment data including classification (bullish/bearish/neutral), confidence, and the time-decay weighted dimension score +6. THE Ticker_Detail_View SHALL display fundamental data including P/E ratio, revenue growth, earnings surprise, and market cap +7. WHEN a user selects an indicator type (ADX, EMA, RSI, ATR, volume_profile, pivot_points), THE Dashboard SHALL fetch data from `GET /api/v1/indicators/{symbol}/{indicator_type}` and display the result with its normalized score +8. WHEN a user requests the EMA cross signal, THE Dashboard SHALL fetch data from `GET /api/v1/indicators/{symbol}/ema-cross` and display the signal (bullish/bearish/neutral) with short and long EMA values +9. IF any data fetch fails for the Ticker_Detail_View, THEN THE Dashboard SHALL display an inline error message for the failed section while rendering the remaining sections normally + +### Requirement 5: Trade Setup Scanner + +**User Story:** As a user, I want to scan for trade setups with favorable risk:reward ratios so that I can find asymmetric opportunities. + +#### Acceptance Criteria + +1. WHEN the Scanner_View loads, THE Dashboard SHALL fetch data from `GET /api/v1/trades` and display all trade setups in a sortable table +2. THE Scanner_View SHALL display for each trade setup: symbol, direction (long/short), entry price, stop loss, target, R:R ratio, composite score, and detection timestamp +3. THE Scanner_View SHALL allow the user to filter trade setups by minimum R:R ratio using a numeric input +4. THE Scanner_View SHALL allow the user to filter trade setups by direction (long, short, or both) +5. THE Scanner_View SHALL allow the user to sort the table by any column (R:R ratio, composite score, symbol, detection time) +6. WHEN a user clicks on a trade setup symbol, THE Router SHALL navigate to the Ticker_Detail_View for that symbol + +### Requirement 6: Rankings View + +**User Story:** As a user, I want to see all tickers ranked by composite score so that I can identify the strongest opportunities. + +#### Acceptance Criteria + +1. WHEN the Rankings_View loads, THE Dashboard SHALL fetch data from `GET /api/v1/rankings` and display tickers sorted by composite score descending +2. THE Rankings_View SHALL display for each ticker: rank position, symbol, composite score, and all dimension scores +3. THE Rankings_View SHALL display the current scoring weights used for composite calculation +4. WHEN a user clicks on a ranked ticker symbol, THE Router SHALL navigate to the Ticker_Detail_View for that symbol + +### Requirement 7: Admin Panel — User Management + +**User Story:** As an admin, I want to manage user accounts so that I can control access to the platform. + +#### Acceptance Criteria + +1. WHEN the Admin_Panel user management section loads, THE Dashboard SHALL fetch data from `GET /api/v1/admin/users` and display all users in a table +2. THE Admin_Panel SHALL display for each user: username, role, and access status +3. WHEN an admin clicks the create-user control and submits a username, password, role, and access flag, THE Dashboard SHALL send a POST request to `/api/v1/admin/users` and refresh the user list +4. WHEN an admin toggles a user's access status, THE Dashboard SHALL send a PUT request to `/api/v1/admin/users/{user_id}/access` with the new access flag +5. WHEN an admin resets a user's password, THE Dashboard SHALL send a PUT request to `/api/v1/admin/users/{user_id}/password` with the new password +6. IF any admin user management request fails, THEN THE Dashboard SHALL display the error message via the Toast_System + +### Requirement 8: Admin Panel — System Settings and Jobs + +**User Story:** As an admin, I want to manage system settings, scheduled jobs, and data cleanup so that I can maintain the platform. + +#### Acceptance Criteria + +1. WHEN the Admin_Panel settings section loads, THE Dashboard SHALL fetch data from `GET /api/v1/admin/settings` and display all settings as editable fields +2. WHEN an admin updates a system setting value, THE Dashboard SHALL send a PUT request to `/api/v1/admin/settings/{key}` with the new value +3. WHEN an admin toggles the registration setting, THE Dashboard SHALL send a PUT request to `/api/v1/admin/settings/registration` with the enabled flag +4. WHEN an admin toggles a scheduled job on or off, THE Dashboard SHALL send a PUT request to `/api/v1/admin/jobs/{job_name}/toggle` with the enabled flag +5. WHEN an admin triggers a scheduled job manually, THE Dashboard SHALL send a POST request to `/api/v1/admin/jobs/{job_name}/trigger` and display a confirmation via the Toast_System +6. WHEN an admin submits a data cleanup request with an older-than-days value, THE Dashboard SHALL send a POST request to `/api/v1/admin/data/cleanup` with the specified value and display the result via the Toast_System + +### Requirement 9: Ticker Management + +**User Story:** As a user, I want to add and remove tickers from the system so that I can track the stocks I care about. + +#### Acceptance Criteria + +1. WHEN a user submits a new ticker symbol via the add-ticker form, THE Dashboard SHALL send a POST request to `/api/v1/tickers` with the symbol and refresh the ticker list +2. WHEN a user views the ticker list, THE Dashboard SHALL fetch data from `GET /api/v1/tickers` and display all registered tickers +3. WHEN a user clicks the delete button on a ticker, THE Dashboard SHALL display a confirmation dialog before sending a DELETE request to `/api/v1/tickers/{symbol}` +4. IF a ticker deletion or creation request fails, THEN THE Dashboard SHALL display the error message via the Toast_System +5. WHEN a ticker is successfully deleted, THE Dashboard SHALL remove the ticker from the displayed list without requiring a full page reload + +### Requirement 10: Data Ingestion Trigger + +**User Story:** As a user, I want to manually trigger data ingestion for a specific ticker so that I can get fresh data on demand. + +#### Acceptance Criteria + +1. WHEN a user clicks the fetch-data button on the Ticker_Detail_View, THE Dashboard SHALL send a POST request to `/api/v1/ingestion/fetch/{symbol}` +2. WHILE the ingestion request is in progress, THE Dashboard SHALL display a loading indicator on the fetch-data button +3. WHEN the ingestion request completes successfully, THE Dashboard SHALL display a success message via the Toast_System and refresh the OHLCV data on the Ticker_Detail_View +4. IF the ingestion request fails, THEN THE Dashboard SHALL display the error message via the Toast_System + +### Requirement 11: Score Weight Configuration + +**User Story:** As a user, I want to adjust the scoring dimension weights so that I can customize the composite score calculation to my strategy. + +#### Acceptance Criteria + +1. THE Dashboard SHALL display the current scoring weights on the Rankings_View with editable numeric inputs for each dimension +2. WHEN a user modifies one or more weight values and submits the form, THE Dashboard SHALL send a PUT request to `/api/v1/scores/weights` with the updated weights map +3. WHEN the weight update succeeds, THE Dashboard SHALL refresh the rankings data to reflect the new composite scores +4. IF the weight update request fails, THEN THE Dashboard SHALL display the error message via the Toast_System + +### Requirement 12: API Client and Error Handling + +**User Story:** As a developer, I want a centralized API client with consistent error handling so that all API interactions follow the same patterns. + +#### Acceptance Criteria + +1. THE API_Client SHALL send all requests to the base URL `/api/v1/` using the JSON content type +2. THE API_Client SHALL unwrap the API envelope (`{ status, data, error }`) and return the `data` field on success or throw an error with the `error` field on failure +3. THE API_Client SHALL attach the JWT Bearer token from the Token_Store to every authenticated request +4. WHEN the API_Client receives a network error or timeout, THE API_Client SHALL throw a descriptive error that the calling component can display via the Toast_System +5. THE API_Client SHALL set a request timeout of 30 seconds for all API calls + +### Requirement 13: Responsive Layout and Visual Design + +**User Story:** As a user, I want the dashboard to have a clean, modern interface that works on desktop and tablet screens so that I can use it comfortably. + +#### Acceptance Criteria + +1. THE Dashboard SHALL use a sidebar navigation layout on screens wider than 1024 pixels and a collapsible hamburger menu on narrower screens +2. THE Dashboard SHALL use a dark color scheme with accent colors for positive (green) and negative (red) values consistent with financial data conventions +3. THE Dashboard SHALL apply smooth transitions (duration 150ms to 300ms) for page navigation, modal openings, and interactive element state changes +4. THE Dashboard SHALL display numeric financial values with appropriate formatting: prices to 2 decimal places, percentages with a percent sign, large numbers with abbreviations (K, M, B) +5. THE Dashboard SHALL display loading skeleton placeholders while data is being fetched from the API + +### Requirement 14: Static SPA Build and Deployment + +**User Story:** As a developer, I want the frontend to build as static files that Nginx can serve alongside the backend API so that deployment is simple. + +#### Acceptance Criteria + +1. THE Dashboard SHALL produce a static build output (HTML, CSS, JS) in a `dist/` directory via a single build command +2. THE Dashboard SHALL use hash-based filenames for JS and CSS assets to enable long-term browser caching +3. THE Dashboard SHALL support client-side routing with a fallback to `index.html` for all non-API routes (Nginx `try_files` configuration) +4. THE Dashboard SHALL proxy API requests to `/api/v1/` on the same origin, requiring no CORS configuration in production +5. WHEN the `GET /api/v1/health` endpoint returns a success response, THE Dashboard SHALL consider the backend available diff --git a/.kiro/specs/signal-dashboard/tasks.md b/.kiro/specs/signal-dashboard/tasks.md new file mode 100644 index 0000000..8798bd4 --- /dev/null +++ b/.kiro/specs/signal-dashboard/tasks.md @@ -0,0 +1,238 @@ +# Implementation Plan: Signal Dashboard + +## Overview + +Incremental build of the Signal Dashboard SPA in `frontend/`. Each phase wires up end-to-end before moving on, so there's always a runnable app. Backend API is already live — we consume it as-is. + +## Tasks + +- [x] 1. Scaffold Vite + React + TypeScript project + - [x] 1.1 Initialize `frontend/` with Vite React-TS template, install dependencies (react, react-dom, react-router-dom, @tanstack/react-query, zustand, axios, recharts, tailwindcss, postcss, autoprefixer) + - Create `package.json`, `tsconfig.json`, `vite.config.ts`, `tailwind.config.ts`, `postcss.config.js` + - Configure Vite proxy for `/api/v1/` to backend during dev + - Configure Tailwind with dark mode class strategy + - Create `src/main.tsx`, `src/App.tsx`, `src/styles/globals.css` with Tailwind directives + - _Requirements: 14.1, 14.4, 13.2_ + + - [x] 1.2 Create shared TypeScript interfaces and formatting utilities + - Create `src/lib/types.ts` with all data model interfaces (APIEnvelope, TokenResponse, WatchlistEntry, OHLCVBar, ScoreResponse, TradeSetup, SRLevel, SentimentResponse, FundamentalResponse, IndicatorResult, EMACrossResult, Ticker, AdminUser, SystemSetting, etc.) + - Create `src/lib/format.ts` with `formatPrice`, `formatPercent`, `formatLargeNumber`, `formatDate`, `formatDateTime` + - _Requirements: 13.4_ + + - [ ]* 1.3 Write property tests for formatting utilities + - **Property 16: Number formatting** + - **Validates: Requirements 13.4** + - Install vitest, @testing-library/react, fast-check as dev dependencies + - Create `frontend/vitest.config.ts` with jsdom environment + - Create `frontend/tests/property/format.test.ts` + - Test `formatPrice` always produces 2 decimal places, `formatPercent` ends with `%`, `formatLargeNumber` uses correct suffix + +- [x] 2. API client and auth store + - [x] 2.1 Create Axios API client with interceptors + - Create `src/api/client.ts` with base URL `/api/v1/`, 30s timeout, JSON content type + - Add request interceptor to attach Bearer token from auth store + - Add response interceptor to unwrap `{ status, data, error }` envelope + - Add 401 handler that clears auth store and redirects to login + - Create `ApiError` class for typed error handling + - _Requirements: 12.1, 12.2, 12.3, 12.4, 12.5_ + + - [x] 2.2 Create Zustand auth store + - Create `src/stores/authStore.ts` with token, username, role state + - `login(token)` decodes JWT payload, extracts `sub` and `role`, persists to localStorage + - `logout()` clears state and localStorage + - Initialize from localStorage on store creation for session persistence + - _Requirements: 1.1, 1.6_ + + - [x] 2.3 Create API module files for each domain + - Create `src/api/auth.ts` (login, register) + - Create `src/api/watchlist.ts` (list, add, remove) + - Create `src/api/tickers.ts` (list, create, delete) + - Create `src/api/scores.ts` (getScores, getRankings, updateWeights) + - Create `src/api/trades.ts` (list) + - Create `src/api/ohlcv.ts` (getOHLCV) + - Create `src/api/indicators.ts` (getIndicator, getEMACross) + - Create `src/api/sr-levels.ts` (getLevels) + - Create `src/api/sentiment.ts` (getSentiment) + - Create `src/api/fundamentals.ts` (getFundamentals) + - Create `src/api/ingestion.ts` (fetchData) + - Create `src/api/admin.ts` (users CRUD, settings, jobs, cleanup) + - Create `src/api/health.ts` (check) + - _Requirements: 12.1, 12.2_ + + - [ ]* 2.4 Write property tests for API client and auth store + - **Property 1: Token storage round-trip** + - **Property 2: Bearer token attachment** + - **Property 5: API envelope unwrapping** + - **Validates: Requirements 1.1, 1.3, 1.6, 12.2, 12.3** + +- [x] 3. Checkpoint — Verify foundation + - Ensure all tests pass, ask the user if questions arise. + +- [x] 4. Routing, layout, and auth pages + - [x] 4.1 Create ProtectedRoute component and router setup + - Create `src/components/auth/ProtectedRoute.tsx` — redirects to `/login` if no token, redirects non-admin away from admin routes + - Set up React Router in `src/App.tsx` with route structure from design (login, register, protected shell with watchlist, ticker detail, scanner, rankings, admin) + - _Requirements: 2.1, 2.2, 2.3, 2.4_ + + - [x] 4.2 Create AppShell layout with sidebar navigation + - Create `src/components/layout/AppShell.tsx` — sidebar + main content area with `` + - Create `src/components/layout/Sidebar.tsx` — nav links to watchlist, scanner, rankings, admin (admin link only if role is admin) + - Create `src/components/layout/MobileNav.tsx` — hamburger menu for screens < 1024px + - Apply dark color scheme with Tailwind dark mode classes + - Add smooth transitions (150-300ms) for navigation and interactive elements + - _Requirements: 2.5, 13.1, 13.2, 13.3_ + + - [x] 4.3 Create Login and Register pages + - Create `src/pages/LoginPage.tsx` with username/password form, calls `useAuth().login` mutation + - Create `src/pages/RegisterPage.tsx` with username (min 1 char) / password (min 6 chars) validation, calls `useAuth().register` mutation + - Create `src/hooks/useAuth.ts` with login/register/logout mutations using TanStack Query + - Display API errors via toast on failure, redirect to watchlist on login success + - _Requirements: 1.1, 1.2, 1.4, 1.5_ + + - [x] 4.4 Create shared UI components + - Create `src/components/ui/Toast.tsx` — toast context + portal, auto-dismiss 4s, max 3 visible, color-coded (red/green/blue) + - Create `src/components/ui/Skeleton.tsx` — Tailwind `animate-pulse` placeholder blocks + - Create `src/components/ui/Badge.tsx` — small label component for entry types + - Create `src/components/ui/ConfirmDialog.tsx` — modal confirmation dialog + - Create `src/components/ui/ScoreCard.tsx` — composite score display with colored ring (green > 70, yellow 40-70, red < 40) and dimension breakdown + - _Requirements: 13.3, 13.5, 1.5_ + + - [ ]* 4.5 Write property tests for routing and registration validation + - **Property 3: Registration form validation** + - **Property 4: Route protection based on auth state** + - **Validates: Requirements 1.2, 2.1, 2.2** + +- [x] 5. Checkpoint — Verify auth flow and navigation + - Ensure all tests pass, ask the user if questions arise. + +- [x] 6. Watchlist view + - [x] 6.1 Create TanStack Query hooks for watchlist + - Create `src/hooks/useWatchlist.ts` with `useWatchlist()` query, `useAddToWatchlist()` mutation, `useRemoveFromWatchlist()` mutation + - Invalidate watchlist query on add/remove success + - Surface errors to toast system + - _Requirements: 3.1, 3.3, 3.4, 3.5_ + + - [x] 6.2 Create WatchlistPage and sub-components + - Create `src/pages/WatchlistPage.tsx` — fetches watchlist, renders table/cards, loading skeletons, error state + - Create `src/components/watchlist/WatchlistTable.tsx` — displays symbol (clickable → `/ticker/{symbol}`), entry type badge (auto/manual), composite score, dimension scores, R:R ratio, R:R direction, nearest S/R levels, remove button + - Create `src/components/watchlist/AddTickerForm.tsx` — input + submit to add symbol to watchlist + - _Requirements: 3.1, 3.2, 3.3, 3.4, 3.6, 3.7_ + + - [ ]* 6.3 Write property tests for watchlist rendering + - **Property 6: Watchlist entry rendering completeness** + - **Property 7: Symbol click navigation** (watchlist portion) + - **Validates: Requirements 3.2, 3.6, 3.7** + +- [x] 7. Ticker detail view + - [x] 7.1 Create TanStack Query hooks for ticker detail + - Create `src/hooks/useTickerDetail.ts` with parallel queries for OHLCV, scores, S/R levels, sentiment, fundamentals + - Each query is independent — partial failure renders available sections + - _Requirements: 4.1, 4.9_ + + - [x] 7.2 Create TickerDetailPage with chart and data panels + - Create `src/pages/TickerDetailPage.tsx` — orchestrates parallel data fetching, renders sections with independent loading/error states + - Create `src/components/charts/CandlestickChart.tsx` — Recharts ComposedChart with custom Bar shapes for OHLCV candles, date x-axis, price y-axis + - Create `src/components/ticker/SROverlay.tsx` — renders S/R levels as ReferenceLine components on chart (green = support, red = resistance) + - Render ScoreCard for composite + dimension scores + - _Requirements: 4.1, 4.2, 4.3, 4.4, 4.9_ + + - [x] 7.3 Create sentiment, fundamentals, and indicator panels + - Create `src/components/ticker/SentimentPanel.tsx` — displays classification, confidence, dimension score + - Create `src/components/ticker/FundamentalsPanel.tsx` — displays P/E, revenue growth, earnings surprise, market cap (placeholder for nulls) + - Create `src/components/ticker/IndicatorSelector.tsx` — dropdown to select indicator type (ADX, EMA, RSI, ATR, volume_profile, pivot_points), fetches from `/api/v1/indicators/{symbol}/{type}`, displays result with normalized score. Includes EMA cross signal display. + - _Requirements: 4.5, 4.6, 4.7, 4.8_ + + - [x] 7.4 Add data ingestion trigger to ticker detail + - Add fetch-data button to TickerDetailPage + - POST to `/api/v1/ingestion/fetch/{symbol}`, show loading indicator on button, toast on success/failure, refresh OHLCV data on success + - _Requirements: 10.1, 10.2, 10.3, 10.4_ + + - [ ]* 7.5 Write property tests for ticker detail components + - **Property 8: Score card rendering** + - **Property 9: Sentiment panel rendering** + - **Property 10: Fundamentals panel rendering** + - **Validates: Requirements 4.4, 4.5, 4.6** + +- [x] 8. Checkpoint — Verify watchlist and ticker detail + - Ensure all tests pass, ask the user if questions arise. + +- [x] 9. Scanner view + - [x] 9.1 Create TanStack Query hooks and scanner page + - Create `src/hooks/useTrades.ts` with `useTrades()` query + - Create `src/pages/ScannerPage.tsx` — fetches trade setups, renders filter controls and table, loading skeletons + - Create `src/components/scanner/TradeTable.tsx` — sortable table displaying symbol (clickable → `/ticker/{symbol}`), direction, entry price, stop loss, target, R:R ratio, composite score, detection timestamp + - Add filter controls: minimum R:R numeric input, direction dropdown (long/short/both) + - Add column sorting (R:R ratio, composite score, symbol, detection time) with ascending/descending toggle + - _Requirements: 5.1, 5.2, 5.3, 5.4, 5.5, 5.6_ + + - [ ]* 9.2 Write property tests for scanner filtering and sorting + - **Property 11: Trade setup rendering** + - **Property 12: Scanner filtering** + - **Property 13: Scanner sorting** + - **Validates: Requirements 5.2, 5.3, 5.4, 5.5** + +- [x] 10. Rankings view + - [x] 10.1 Create TanStack Query hooks and rankings page + - Create `src/hooks/useScores.ts` with `useRankings()` query, `useUpdateWeights()` mutation + - Create `src/pages/RankingsPage.tsx` — fetches rankings, renders table sorted by composite score descending, displays current weights + - Create `src/components/rankings/RankingsTable.tsx` — displays rank position, symbol (clickable → `/ticker/{symbol}`), composite score, all dimension scores + - Create `src/components/rankings/WeightsForm.tsx` — editable numeric inputs per dimension, submit updates weights via PUT, refreshes rankings on success + - _Requirements: 6.1, 6.2, 6.3, 6.4, 11.1, 11.2, 11.3, 11.4_ + + - [ ]* 10.2 Write property tests for rankings and weights + - **Property 14: Rankings display order** + - **Property 17: Weights form rendering** + - **Validates: Requirements 6.1, 6.2, 11.1** + +- [x] 11. Checkpoint — Verify scanner and rankings + - Ensure all tests pass, ask the user if questions arise. + +- [x] 12. Ticker management + - [x] 12.1 Create TanStack Query hooks and ticker management UI + - Create `src/hooks/useTickers.ts` with `useTickers()` query, `useAddTicker()` mutation, `useDeleteTicker()` mutation + - Add ticker list display to an appropriate location (e.g., admin page or dedicated section) + - Add ticker form for adding new symbols + - Delete button triggers ConfirmDialog before sending DELETE request + - Remove ticker from display on successful delete without full page reload + - _Requirements: 9.1, 9.2, 9.3, 9.4, 9.5_ + +- [x] 13. Admin panel + - [x] 13.1 Create admin hooks and user management section + - Create `src/hooks/useAdmin.ts` with queries and mutations for users, settings, jobs, cleanup + - Create `src/pages/AdminPage.tsx` — tabbed layout with user management, settings, jobs, data cleanup sections + - Create `src/components/admin/UserTable.tsx` — displays username, role, access status; toggle access, reset password controls + - Add create-user form (username, password, role, access flag) + - _Requirements: 7.1, 7.2, 7.3, 7.4, 7.5, 7.6_ + + - [x] 13.2 Create settings, jobs, and data cleanup sections + - Create `src/components/admin/SettingsForm.tsx` — editable fields for each setting, registration toggle + - Create `src/components/admin/JobControls.tsx` — toggle on/off per job, manual trigger button, toast confirmation + - Create `src/components/admin/DataCleanup.tsx` — older-than-days input, submit cleanup, display result via toast + - _Requirements: 8.1, 8.2, 8.3, 8.4, 8.5, 8.6_ + + - [ ]* 13.3 Write property test for admin user table rendering + - **Property 15: Admin user table rendering** + - **Validates: Requirements 7.2** + +- [x] 14. Final wiring and polish + - [x] 14.1 Add health check and loading states + - Create health check query using `GET /api/v1/health` — display backend status indicator in sidebar + - Ensure all pages show Skeleton placeholders during loading + - Ensure all mutation errors surface through Toast system consistently + - _Requirements: 14.5, 13.5, 12.4_ + + - [x] 14.2 Configure production build + - Verify `vite build` outputs to `frontend/dist/` with hashed filenames + - Add Nginx config snippet in comments or README for `try_files $uri $uri/ /index.html` and `/api/v1/` proxy + - _Requirements: 14.1, 14.2, 14.3, 14.4_ + +- [x] 15. Final checkpoint — Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +## Notes + +- Tasks marked with `*` are optional property test tasks and can be skipped for faster MVP +- Each task references specific requirements for traceability +- Backend API is already running — no backend changes needed +- All 17 correctness properties are covered across optional test tasks +- Checkpoints are placed after each major phase for incremental validation diff --git a/.kiro/specs/stock-data-backend/.config.kiro b/.kiro/specs/stock-data-backend/.config.kiro new file mode 100644 index 0000000..e2880dd --- /dev/null +++ b/.kiro/specs/stock-data-backend/.config.kiro @@ -0,0 +1 @@ +{"specId": "fa730cf4-a14d-4f62-8993-fd7db6fe25cc", "workflowType": "requirements-first", "specType": "feature"} \ No newline at end of file diff --git a/.kiro/specs/stock-data-backend/design.md b/.kiro/specs/stock-data-backend/design.md new file mode 100644 index 0000000..c6972e7 --- /dev/null +++ b/.kiro/specs/stock-data-backend/design.md @@ -0,0 +1,1121 @@ +# Design Document: Stock Data Backend + +## Overview + +The Stock Data Backend is an MVP investing-signal platform built with Python/FastAPI and PostgreSQL, focused on NASDAQ stocks. It ingests OHLCV price data from a swappable market data provider, computes technical indicators (ADX, EMA, RSI, ATR, Volume Profile, Pivot Points), detects support/resistance levels, collects sentiment and fundamental data, and feeds everything into a composite scoring engine. The scoring engine ranks tickers, auto-populates a watchlist, and an R:R scanner flags asymmetric trade setups. + +The system is API-first (REST, JSON envelope, versioned URLs), uses JWT auth with role-based access, and runs scheduled jobs for data collection. All computation is on-demand or scheduled — no streaming, no websockets, no real-time feeds. + +### Key Design Decisions + +- **Single process**: FastAPI app with APScheduler for scheduled jobs — no separate worker processes. +- **On-demand scoring**: Composite scores are marked stale when inputs change and recomputed only when requested. +- **Simple LRU cache**: In-memory `functools.lru_cache` (max 1000 entries) for indicator computations. No TTL — cache is invalidated when new OHLCV data is ingested for a ticker. +- **Provider abstraction**: Market data provider behind a Python Protocol class for swappability. +- **Fixed indicator set**: ADX, EMA, RSI, ATR, Volume Profile, Pivot Points — no plugin architecture. +- **Sentiment**: Single source, weighted average with configurable time decay and lookback window. +- **Fundamentals**: Single source, simple periodic fetch. +- **Watchlist cap**: Auto-populated top-X (default 10) + max 10 manual additions = max 20. + +## Architecture + +### High-Level Architecture + +```mermaid +graph TB + Client[API Client] --> API[FastAPI App /api/v1/] + API --> Auth[Auth Service] + API --> TickerReg[Ticker Registry] + API --> PriceStore[Price Store] + API --> Ingestion[Ingestion Pipeline] + API --> TechAnalysis[Technical Analysis] + API --> SRDetector[S/R Detector] + API --> Scoring[Scoring Engine] + API --> RRScanner[R:R Scanner] + API --> Watchlist[Watchlist Service] + API --> Admin[Admin Service] + + Ingestion --> Provider[Market Data Provider Protocol] + Provider --> ExternalAPI[External Market Data API] + + Scheduler[APScheduler] --> Ingestion + Scheduler --> SentimentCollector[Sentiment Collector] + Scheduler --> FundCollector[Fundamental Collector] + Scheduler --> RRScanner + + TickerReg --> DB[(PostgreSQL)] + PriceStore --> DB + Auth --> DB + Scoring --> DB + SRDetector --> DB + Watchlist --> DB + + TechAnalysis --> Cache[LRU Cache max=1000] + TechAnalysis --> PriceStore +``` + +### Request Flow + +```mermaid +sequenceDiagram + participant C as Client + participant A as FastAPI + participant Auth as Auth Middleware + participant S as Service Layer + participant DB as PostgreSQL + + C->>A: HTTP Request + JWT + A->>Auth: Validate token + role + Auth-->>A: User context + A->>S: Call service method + S->>DB: Query/Mutate + DB-->>S: Result + S-->>A: Domain result + A-->>C: JSON envelope response +``` + +### Project Structure + +``` +stock-data-backend/ +├── alembic/ # DB migrations +│ ├── versions/ +│ └── env.py +├── app/ +│ ├── main.py # FastAPI app, lifespan, scheduler +│ ├── config.py # Settings via pydantic-settings +│ ├── database.py # SQLAlchemy engine, session factory +│ ├── models/ # SQLAlchemy ORM models +│ │ ├── ticker.py +│ │ ├── ohlcv.py +│ │ ├── user.py +│ │ ├── sentiment.py +│ │ ├── fundamental.py +│ │ ├── score.py +│ │ ├── sr_level.py +│ │ ├── trade_setup.py +│ │ ├── watchlist.py +│ │ └── settings.py +│ ├── schemas/ # Pydantic request/response schemas +│ │ ├── common.py # APIEnvelope, pagination +│ │ ├── ticker.py +│ │ ├── ohlcv.py +│ │ ├── auth.py +│ │ ├── indicator.py +│ │ ├── sr_level.py +│ │ ├── sentiment.py +│ │ ├── fundamental.py +│ │ ├── score.py +│ │ ├── trade_setup.py +│ │ ├── watchlist.py +│ │ └── admin.py +│ ├── routers/ # FastAPI routers (one per domain) +│ │ ├── tickers.py +│ │ ├── ohlcv.py +│ │ ├── ingestion.py +│ │ ├── indicators.py +│ │ ├── sr_levels.py +│ │ ├── sentiment.py +│ │ ├── fundamentals.py +│ │ ├── scores.py +│ │ ├── trades.py +│ │ ├── watchlist.py +│ │ ├── auth.py +│ │ ├── admin.py +│ │ └── health.py +│ ├── services/ # Business logic +│ │ ├── ticker_service.py +│ │ ├── price_service.py +│ │ ├── ingestion_service.py +│ │ ├── indicator_service.py +│ │ ├── sr_service.py +│ │ ├── sentiment_service.py +│ │ ├── fundamental_service.py +│ │ ├── scoring_service.py +│ │ ├── rr_scanner_service.py +│ │ ├── watchlist_service.py +│ │ ├── auth_service.py +│ │ └── admin_service.py +│ ├── providers/ # External data provider abstractions +│ │ ├── protocol.py # MarketDataProvider, SentimentProvider, FundamentalProvider Protocols +│ │ ├── alpaca.py # Alpaca OHLCV provider (alpaca-py) +│ │ ├── gemini_sentiment.py # Gemini LLM sentiment provider (google-genai + search grounding) +│ │ └── fmp.py # Financial Modeling Prep fundamentals provider (httpx) +│ ├── scheduler.py # APScheduler job definitions +│ ├── dependencies.py # FastAPI dependency injection +│ ├── middleware.py # Logging, error handling +│ └── cache.py # LRU cache wrapper with invalidation +├── tests/ +│ ├── unit/ +│ ├── property/ +│ └── conftest.py +├── deploy/ # Deployment templates +│ ├── nginx.conf # Nginx reverse proxy config for signal.thiessen.io +│ ├── stock-data-backend.service # systemd service file +│ └── setup_db.sh # DB creation + migration script +├── .gitea/ +│ └── workflows/ +│ └── deploy.yml # Gitea Actions CI/CD pipeline +├── alembic.ini +├── pyproject.toml +└── .env.example +``` + + +## Components and Interfaces + +### 1. Market Data Provider Protocol + +```python +from typing import Protocol +from datetime import date + +class MarketDataProvider(Protocol): + async def fetch_ohlcv( + self, ticker: str, start_date: date, end_date: date + ) -> list[OHLCVRecord]: + """Fetch OHLCV data for a ticker in a date range.""" + ... + +class SentimentProvider(Protocol): + async def fetch_sentiment(self, ticker: str) -> SentimentRecord: + """Fetch current sentiment analysis for a ticker.""" + ... + +class FundamentalProvider(Protocol): + async def fetch_fundamentals(self, ticker: str) -> FundamentalRecord: + """Fetch fundamental data for a ticker.""" + ... +``` + +Each data source has its own protocol since they come from different external services. Swapping any provider means implementing the relevant protocol — no other code changes. + +**Concrete Provider Implementations:** + +| Data Type | Provider | SDK/Library | Auth | Notes | +|-----------|----------|-------------|------|-------| +| OHLCV | Alpaca Markets Data API | `alpaca-py` | API key + secret | Free tier, daily bars, good rate limits | +| Sentiment | Gemini (gemini-2.0-flash) with Google Search grounding | `google-genai` | API key | LLM analyzes live web data (news, social media) per ticker. Returns structured JSON with classification + confidence. Search grounding ensures current data, not just training knowledge. | +| Fundamentals | Financial Modeling Prep (FMP) | `httpx` (REST) | API key | Free tier: P/E, revenue growth, earnings surprise, market cap | + +**Gemini Sentiment Provider Details:** + +The sentiment provider sends a structured prompt to Gemini with search grounding enabled: +- Prompt asks for current market sentiment analysis for a specific ticker +- Gemini searches the web for recent news, social media mentions, analyst opinions +- Response is requested in JSON mode: `{"classification": "bullish|bearish|neutral", "confidence": 0-100, "reasoning": "..."}` +- The `reasoning` field is logged but not stored — only classification and confidence are persisted as a Sentiment_Score +- Cost: ~$0.001 per call with gemini-2.0-flash (negligible for 30-min polling of a few dozen tickers) + +### 2. Ticker Registry + +- **Add ticker**: Validate symbol (non-empty, uppercase, alphanumeric), check uniqueness, insert. +- **Delete ticker**: Cascade delete all associated data (OHLCV, scores, SR levels, trade setups, watchlist entries, sentiment, fundamentals). +- **List tickers**: Return all, sorted alphabetically. + +### 3. Price Store + +- **Upsert OHLCV**: Insert or update on (ticker, date) conflict. Validates: high >= low, all prices >= 0, volume >= 0, date <= today. +- **Query**: By ticker + date range. Uses composite index on (ticker, date). +- **On upsert**: Invalidate LRU cache entries for the affected ticker. Mark composite score as stale. + +### 4. Ingestion Pipeline + +- **Fetch + upsert**: Calls provider, validates records, upserts into Price Store. +- **Rate limit handling**: Tracks `last_ingested_date` per ticker in memory during a fetch. On rate limit, returns partial result with progress info. Resume continues from `last_ingested_date + 1 day`. +- **Error handling**: Provider errors return descriptive message, no data modification. + +### 5. Technical Analysis Service + +Computes indicators from OHLCV data. Each indicator function: +- Takes ticker + date range as input +- Fetches OHLCV from Price Store +- Validates minimum data requirements (e.g., RSI needs 14+ records) +- Returns raw values + normalized 0-100 score +- Results cached via LRU (keyed on ticker + date range + indicator type) + +Indicators: +| Indicator | Min Data | Default Period | +|-----------|----------|----------------| +| ADX | 28 bars | 14 | +| EMA | period+1 | 20, 50 | +| RSI | 15 bars | 14 | +| ATR | 15 bars | 14 | +| Volume Profile | 20 bars | N/A | +| Pivot Points | 5 bars | N/A | + +### 6. S/R Detector + +- **Detection methods**: Volume Profile (HVN/LVN zones) and Pivot Points (swing highs/lows). +- **Strength scoring**: Count how many times price has touched/respected a level (0-100 scale). +- **Merge**: Levels from different methods within configurable tolerance (default 0.5%) are merged into a single consolidated level. Merged levels combine strength scores. +- **Tagging**: Each level tagged as "support" or "resistance" relative to current (latest close) price. +- **Recalculation**: Triggered when new OHLCV data arrives for a ticker. +- **Output**: Sorted by strength descending, includes detection method. + +### 7. Sentiment Service + +- **Collection**: Scheduled job (default every 30 min) fetches sentiment for all tracked tickers. +- **Storage**: Each record has classification (bullish/bearish/neutral), confidence (0-100), source, timestamp. +- **Dimension score**: Weighted average of scores within lookback window (default 24h). Time decay applied — more recent scores weighted higher. Bullish = high score, bearish = low score, neutral = 50. + +### 8. Fundamental Service + +- **Collection**: Scheduled job (default daily) fetches P/E, revenue growth, earnings surprise %, market cap. +- **Storage**: Latest snapshot per ticker. On new data, marks fundamental dimension score as stale. +- **Error handling**: On provider failure, retain existing data, log error. + +### 9. Scoring Engine + +- **Dimensions**: technical, sr_quality, sentiment, fundamental, momentum — each scored 0-100. +- **Composite**: Weighted average of available dimensions. Missing dimensions excluded, weights re-normalized. +- **Staleness**: Scores marked stale when underlying data changes. Recomputed on-demand when requested. +- **Weight update**: When user updates weights, all composite scores are recomputed. +- **Rankings**: Return tickers sorted by composite score descending, all dimension scores included. + +**Dimension score computation**: +- **Technical**: Weighted combination of ADX trend strength, EMA directional alignment, RSI momentum position. +- **S/R Quality**: Based on number of strong S/R levels, proximity of nearest levels to current price, and average strength. +- **Sentiment**: Weighted average with time decay from sentiment service. +- **Fundamental**: Normalized composite of P/E (lower is better, relative to sector), revenue growth, earnings surprise. +- **Momentum**: Rate of change of price over configurable lookback periods (e.g., 5-day, 20-day). + +### 10. R:R Scanner + +- **Scan**: Periodic job scans all tracked tickers. +- **Long setup**: Entry = current price, target = nearest SR level above, stop = entry - (ATR × multiplier). R:R = (target - entry) / (entry - stop). +- **Short setup**: Entry = current price, target = nearest SR level below, stop = entry + (ATR × multiplier). R:R = (entry - target) / (stop - entry). +- **Filter**: Only setups meeting configurable R:R threshold (default 3:1). +- **Recalculation**: When SR levels or price data changes, recalculate and prune invalid setups. +- **Skip**: Tickers without sufficient SR levels or ATR data are skipped with logged reason. + +### 11. Watchlist Service + +- **Auto-populate**: Top-X tickers by composite score (default X=10). Auto entries update when scores change. +- **Manual entries**: Users can add/remove. Tagged as manual, not subject to auto-population. +- **Cap**: Max size = auto count + 10 manual (default max 20). +- **Response**: Each entry includes composite score, all dimension scores, R:R ratio (if setup exists), active SR levels. +- **Sorting**: By composite score, any dimension score, or R:R ratio. + +### 12. Auth Service + +- **Registration**: Configurable on/off. Creates user with no API access by default (admin must grant). +- **Login**: Validates credentials, returns JWT (60-min expiry). Error messages don't reveal which field is wrong. +- **JWT**: Contains user_id, role, expiry. Validated on every protected request. +- **Roles**: `user` and `admin`. Middleware checks role for admin endpoints. +- **Password**: bcrypt hashed. Never stored or returned in plaintext. + +### 13. Admin Service + +- **Default admin**: Created on first startup (username: "admin", password: "admin"). +- **User management**: Grant/revoke access, toggle registration, list users, reset passwords, create accounts. +- **System settings**: Persisted in DB. Frequencies, thresholds, weights, watchlist size. +- **Data maintenance**: Delete data older than N days (OHLCV, sentiment, fundamentals). Preserves tickers, users, latest scores. +- **Job control**: Enable/disable scheduled jobs, trigger manual runs. + +### API Envelope + +All responses follow: + +```python +class APIEnvelope(BaseModel): + status: Literal["success", "error"] + data: Any | None = None + error: str | None = None +``` + +### Dependency Injection + +FastAPI's `Depends()` for: +- DB session (async context manager) +- Current user (from JWT) +- Admin-only guard +- Service instances (constructed with session) + + +## Data Models + +### Entity Relationship Diagram + +```mermaid +erDiagram + User { + int id PK + string username UK + string password_hash + string role + bool has_access + datetime created_at + datetime updated_at + } + + Ticker { + int id PK + string symbol UK + datetime created_at + } + + OHLCVRecord { + int id PK + int ticker_id FK + date date + float open + float high + float low + float close + bigint volume + datetime created_at + } + + SentimentScore { + int id PK + int ticker_id FK + string classification + int confidence + string source + datetime timestamp + } + + FundamentalData { + int id PK + int ticker_id FK + float pe_ratio + float revenue_growth + float earnings_surprise + float market_cap + datetime fetched_at + } + + SRLevel { + int id PK + int ticker_id FK + float price_level + string type + int strength + string detection_method + datetime created_at + } + + DimensionScore { + int id PK + int ticker_id FK + string dimension + float score + bool is_stale + datetime computed_at + } + + CompositeScore { + int id PK + int ticker_id FK + float score + bool is_stale + string weights_json + datetime computed_at + } + + TradeSetup { + int id PK + int ticker_id FK + string direction + float entry_price + float stop_loss + float target + float rr_ratio + float composite_score + datetime detected_at + } + + WatchlistEntry { + int id PK + int user_id FK + int ticker_id FK + string entry_type + datetime added_at + } + + SystemSetting { + int id PK + string key UK + string value + datetime updated_at + } + + IngestionProgress { + int id PK + int ticker_id FK + date last_ingested_date + datetime updated_at + } + + Ticker ||--o{ OHLCVRecord : has + Ticker ||--o{ SentimentScore : has + Ticker ||--o| FundamentalData : has + Ticker ||--o{ SRLevel : has + Ticker ||--o{ DimensionScore : has + Ticker ||--o| CompositeScore : has + Ticker ||--o{ TradeSetup : has + Ticker ||--o{ WatchlistEntry : on + User ||--o{ WatchlistEntry : owns + Ticker ||--o| IngestionProgress : tracks +``` + +### Key Model Details + +**OHLCVRecord** +- Composite unique constraint on `(ticker_id, date)`. +- Composite index on `(ticker_id, date)` for range queries. +- `date` is date-only (no time component). +- Validation: `high >= low`, all prices `>= 0`, `volume >= 0`, `date <= today`. + +**SRLevel** +- `type`: "support" or "resistance". +- `detection_method`: "volume_profile" or "pivot_point" or "merged". +- `strength`: 0-100 integer. + +**DimensionScore** +- `dimension`: one of "technical", "sr_quality", "sentiment", "fundamental", "momentum". +- `is_stale`: set to `True` when underlying data changes, triggers recomputation on next read. + +**CompositeScore** +- `weights_json`: JSON string of the weights used for this computation (for auditability). +- `is_stale`: same staleness pattern as DimensionScore. + +**WatchlistEntry** +- `entry_type`: "auto" or "manual". +- Unique constraint on `(user_id, ticker_id)`. + +**User** +- `role`: "user" or "admin". +- `has_access`: boolean, default `False`. Admin must grant access after registration. + +**IngestionProgress** +- Tracks the last successfully ingested date per ticker for rate-limit resume. +- Unique constraint on `ticker_id`. + +### Database Migrations + +Alembic manages all schema changes. Initial migration creates all tables. Subsequent migrations handle schema evolution. Migration files are version-controlled. + + +## Correctness Properties + +*A property is a characteristic or behavior that should hold true across all valid executions of a system — essentially, a formal statement about what the system should do. Properties serve as the bridge between human-readable specifications and machine-verifiable correctness guarantees.* + +### Property 1: Ticker creation round-trip + +*For any* valid NASDAQ ticker symbol (non-empty, uppercase, alphanumeric), adding it to the Ticker Registry and then listing all tickers should include that symbol in the result. + +**Validates: Requirements 1.1** + +### Property 2: Duplicate ticker rejection + +*For any* valid ticker symbol, adding it to the Ticker Registry twice should succeed the first time and return a duplicate error the second time, with the registry containing exactly one entry for that symbol. + +**Validates: Requirements 1.2** + +### Property 3: Whitespace ticker rejection + +*For any* string composed entirely of whitespace characters (including the empty string), submitting it as a ticker symbol should be rejected with a validation error, and the Ticker Registry should remain unchanged. + +**Validates: Requirements 1.3** + +### Property 4: Ticker deletion cascades + +*For any* ticker with associated OHLCV records, scores, SR levels, trade setups, sentiment, and fundamental data, deleting the ticker should remove the ticker and all associated records from the database. + +**Validates: Requirements 1.5** + +### Property 5: OHLCV storage round-trip + +*For any* valid OHLCV record (valid ticker, high >= low, all prices >= 0, volume >= 0, date <= today), storing it in the Price Store and retrieving it by (ticker, date) should return the same open, high, low, close, and volume values. + +**Validates: Requirements 2.1, 2.2** + +### Property 6: OHLCV validation rejects invalid records + +*For any* OHLCV record where high < low, or any price is negative, or volume is negative, or date is in the future, the Backend Service should reject the record with a validation error and the Price Store should remain unchanged. + +**Validates: Requirements 2.3** + +### Property 7: OHLCV rejects unregistered tickers + +*For any* OHLCV record referencing a ticker symbol not present in the Ticker Registry, the Backend Service should reject the record with an error. + +**Validates: Requirements 2.4** + +### Property 8: Provider error preserves existing data + +*For any* data type (OHLCV, sentiment, fundamentals) and any existing data state, if the market data provider returns an error or is unreachable during a fetch, all existing data in the store should remain unchanged. + +**Validates: Requirements 3.2, 7.3, 8.3** + +### Property 9: Rate-limit resume continuity + +*For any* ticker and date range where ingestion is interrupted by a rate limit after N records, resuming the fetch for the same ticker and date range should continue from the day after the last successfully ingested date, resulting in no gaps and no duplicate records across the combined ingestion. + +**Validates: Requirements 3.3, 3.4, 4.5** + +### Property 10: Scheduled collection processes all tickers + +*For any* set of tracked tickers, when a scheduled collection job (OHLCV, sentiment, or fundamentals) runs, it should attempt to fetch data for every tracked ticker. If one ticker fails, the remaining tickers should still be processed. + +**Validates: Requirements 4.1, 4.3, 7.1, 8.2** + +### Property 11: Score bounds invariant + +*For any* computed score in the system — indicator normalized score, SR level strength, dimension score, or composite score — the value must be in the range [0, 100]. + +**Validates: Requirements 5.2, 6.2, 9.1** + +### Property 12: Indicator minimum data enforcement + +*For any* ticker with fewer OHLCV records than the minimum required for a given indicator (e.g., RSI needs 14+, ADX needs 28+), requesting that indicator should return an error specifying the minimum data requirement. + +**Validates: Requirements 5.4** + +### Property 13: EMA cross directional bias + +*For any* ticker and date range with sufficient OHLCV data, the EMA cross signal should return "bullish" when short EMA > long EMA, "bearish" when short EMA < long EMA, and "neutral" when they are equal (within floating-point tolerance). + +**Validates: Requirements 5.3** + +### Property 14: Indicator computation determinism + +*For any* valid OHLCV dataset and indicator type (ADX, EMA, RSI, ATR), computing the indicator twice with the same inputs should produce identical results. + +**Validates: Requirements 5.1** + +### Property 15: SR level support/resistance tagging + +*For any* SR level and current price, the level should be tagged "support" if the level price is below the current price, and "resistance" if the level price is above the current price. + +**Validates: Requirements 6.3** + +### Property 16: SR level merging within tolerance + +*For any* two SR levels from different detection methods whose price levels are within the configurable tolerance (default 0.5%), the SR Detector should merge them into a single consolidated level. For any two levels outside the tolerance, they should remain separate. + +**Validates: Requirements 6.5** + +### Property 17: SR level detection from data + +*For any* OHLCV dataset with sufficient data, the SR Detector should produce SR levels derived from Volume Profile (HVN/LVN) and/or Pivot Points (swing highs/lows), and each level should reference its detection method. + +**Validates: Requirements 6.1** + +### Property 18: Sentiment score data shape + +*For any* stored Sentiment Score, the classification must be one of (bullish, bearish, neutral), confidence must be in [0, 100], and source and timestamp must be non-null. + +**Validates: Requirements 7.2** + +### Property 19: Sentiment dimension score uses time decay + +*For any* set of sentiment scores within the lookback window, the sentiment dimension score should weight more recent scores higher than older ones. Specifically, given two sets of scores with identical values but different timestamps, the set with more recent timestamps should produce a higher (or equal) dimension score if bullish, or lower (or equal) if bearish. + +**Validates: Requirements 7.4** + +### Property 20: Fundamental data storage round-trip + +*For any* valid fundamental data record (P/E ratio, revenue growth, earnings surprise %, market cap), storing it and retrieving it for the same ticker should return the same values. + +**Validates: Requirements 8.1** + +### Property 21: Composite score is weighted average + +*For any* ticker with dimension scores and a set of weights, the composite score should equal the weighted average of the available dimension scores. Specifically: `composite = sum(weight_i * score_i) / sum(weight_i)` for all available dimensions. + +**Validates: Requirements 9.2** + +### Property 22: Missing dimensions re-normalize weights + +*For any* ticker missing one or more dimension scores, the composite score should be computed using only available dimensions with weights re-normalized to sum to 1.0, and the response should indicate which dimensions are missing. + +**Validates: Requirements 9.3** + +### Property 23: Staleness marking on data change + +*For any* ticker, when underlying data changes (new OHLCV, new sentiment, new fundamentals), the affected dimension scores and composite score should be marked as stale. + +**Validates: Requirements 9.4** + +### Property 24: Stale score recomputation on demand + +*For any* ticker with a stale composite score, requesting the score should trigger recomputation and return a fresh (non-stale) score that reflects current data. + +**Validates: Requirements 9.5** + +### Property 25: Weight update triggers full recomputation + +*For any* set of tickers with composite scores, when dimension weights are updated, all composite scores should be recomputed using the new weights. + +**Validates: Requirements 9.7** + +### Property 26: Trade setup R:R threshold filtering + +*For any* set of potential trade setups, only those with R:R ratio >= the configured threshold (default 3:1) should be returned. No setup below the threshold should appear in results. + +**Validates: Requirements 10.1** + +### Property 27: Trade setup computation correctness + +*For any* ticker with SR levels and ATR data, a long setup should have target = nearest SR level above current price and stop = entry - ATR-based distance, while a short setup should have target = nearest SR level below current price and stop = entry + ATR-based distance. The R:R ratio should equal `|target - entry| / |entry - stop|`. + +**Validates: Requirements 10.2, 10.3** + +### Property 28: Trade setup data completeness + +*For any* trade setup, it must include: entry price (> 0), stop-loss (> 0), target (> 0), R:R ratio (> 0), direction (one of "long" or "short"), and composite score (0-100). + +**Validates: Requirements 10.4** + +### Property 29: Trade setup pruning on data change + +*For any* existing trade setup, when underlying SR levels or price data changes such that the setup no longer meets the R:R threshold, the setup should be removed. + +**Validates: Requirements 10.5** + +### Property 30: Watchlist auto-population + +*For any* set of tickers with composite scores, the watchlist auto-populated entries should be exactly the top-X tickers by composite score (where X is configurable, default 10). + +**Validates: Requirements 11.1** + +### Property 31: Watchlist entry data completeness + +*For any* watchlist entry, the response should include composite score, all dimension scores, R:R ratio (if a trade setup exists for that ticker), and active SR levels. + +**Validates: Requirements 11.2** + +### Property 32: Manual watchlist entries persist through auto-population + +*For any* manually added watchlist entry, it should be tagged as "manual" and should not be removed or replaced when auto-population runs, regardless of the ticker's composite score ranking. + +**Validates: Requirements 11.3** + +### Property 33: Watchlist size cap enforcement + +*For any* watchlist, the total number of entries should never exceed auto-populate count + 10 manual additions (default max 20). Attempting to add a manual entry beyond the cap should be rejected. + +**Validates: Requirements 11.4** + +### Property 34: Registration creates no-access user + +*For any* valid credentials submitted when registration is enabled, the created user should have `has_access = False` and role = "user". + +**Validates: Requirements 12.1** + +### Property 35: Registration disabled rejects all attempts + +*For any* credentials submitted when registration is disabled, the registration should be rejected regardless of credential validity. + +**Validates: Requirements 12.2** + +### Property 36: Login returns valid JWT + +*For any* registered user with valid credentials, login should return a JWT access token that decodes to contain the user's ID, role, and an expiry time 60 minutes from issuance. + +**Validates: Requirements 12.3** + +### Property 37: Invalid credentials return generic error + +*For any* login attempt with invalid credentials (wrong username, wrong password, or both), the error response should be identical — not revealing which field was incorrect. + +**Validates: Requirements 12.4** + +### Property 38: Access control enforcement + +*For any* protected endpoint, unauthenticated requests should receive HTTP 401, and authenticated users without the required role or access should receive HTTP 403. + +**Validates: Requirements 12.5** + +### Property 39: Admin user management operations + +*For any* user account, an admin should be able to grant access, revoke access, and reset the password, with each operation correctly updating the user's state in the database. + +**Validates: Requirements 13.2** + +### Property 40: Data cleanup preserves structure + +*For any* dataset with records of various ages, admin data cleanup (delete records older than N days) should remove old OHLCV, sentiment, and fundamental records while preserving all ticker entries, user accounts, and the latest scores. + +**Validates: Requirements 13.4** + +### Property 41: Sorting correctness + +*For any* list endpoint with a defined sort order (tickers alphabetically, SR levels by strength desc, rankings by composite score desc, trade setups by R:R desc then composite desc), the returned results must be correctly sorted according to the specified order. + +**Validates: Requirements 1.4, 6.6, 9.6, 10.8, 11.6** + + +## Error Handling + +### API Error Responses + +All errors use the standard JSON envelope with appropriate HTTP status codes: + +```json +{ + "status": "error", + "data": null, + "error": "Human-readable error message" +} +``` + +| Scenario | HTTP Status | Error Message Pattern | +|----------|-------------|----------------------| +| Validation failure (bad input) | 400 | "Validation error: {details}" | +| Authentication missing/expired | 401 | "Authentication required" / "Token expired" | +| Insufficient permissions | 403 | "Insufficient permissions" | +| Resource not found | 404 | "Ticker not found: {symbol}" | +| Duplicate resource | 409 | "Ticker already exists: {symbol}" | +| Provider unreachable | 502 | "Market data provider unavailable" | +| Rate limited by provider | 429 | "Rate limited. Ingested {n} records. Resume available." | +| Internal error | 500 | "Internal server error" | + +### Error Handling Strategies + +**Provider Errors** +- Wrap all provider calls in try/except. +- On connection error or timeout: return 502 with descriptive message. Existing data is never modified. +- On rate limit: record progress (last ingested date), return 429 with progress info. +- On unexpected provider response: log full response, return 502. + +**Database Errors** +- Unique constraint violations: catch `IntegrityError`, return 409. +- Connection pool exhaustion: log, return 503 "Service temporarily unavailable". +- All DB operations within transactions — rollback on any error. + +**Validation Errors** +- Pydantic model validation catches schema-level errors automatically (400). +- Business validation (e.g., high < low, future date) in service layer, raises custom exceptions mapped to 400. + +**Scheduled Job Errors** +- Each ticker processed independently — one failure doesn't stop others. +- Errors logged with structured JSON (ticker, job name, error type, message). +- Job-level errors (e.g., scheduler crash) logged and job retried on next interval. + +**Authentication Errors** +- Invalid/missing token: 401 with generic message. +- Expired token: 401 with "Token expired" message. +- Invalid credentials on login: 401 with generic "Invalid credentials" (never reveals which field). +- Insufficient role: 403. + +### Exception Hierarchy + +```python +class AppError(Exception): + """Base application error.""" + status_code: int = 500 + message: str = "Internal server error" + +class ValidationError(AppError): + status_code = 400 + +class NotFoundError(AppError): + status_code = 404 + +class DuplicateError(AppError): + status_code = 409 + +class AuthenticationError(AppError): + status_code = 401 + +class AuthorizationError(AppError): + status_code = 403 + +class ProviderError(AppError): + status_code = 502 + +class RateLimitError(AppError): + status_code = 429 +``` + +A global exception handler in FastAPI middleware catches `AppError` subclasses and formats them into the JSON envelope. + +## Deployment and Infrastructure + +### Target Environment + +- **Production**: Debian 12 with Nginx and PostgreSQL (pre-installed by operator) +- **Development**: macOS with local PostgreSQL (via Homebrew or Docker) +- **CI/CD**: Gitea Actions +- **Domain**: `signal.thiessen.io` (reverse-proxied through Nginx) + +### Local Development (macOS) + +Local dev works identically to production. Install PostgreSQL via Homebrew (`brew install postgresql@16`) or run it in Docker. The app is pure Python — no platform-specific dependencies. + +```bash +# Setup +git clone +cd stock-data-backend +python -m venv .venv && source .venv/bin/activate +pip install -e ".[dev]" + +# Local DB +createdb stock_data_backend +cp .env.example .env # Edit DATABASE_URL to point to local Postgres + +# Migrations +alembic upgrade head + +# Run +uvicorn app.main:app --reload --port 8000 +``` + +### Database Setup Script (`deploy/setup_db.sh`) + +Creates the PostgreSQL database, user, and runs migrations. Idempotent — safe to run multiple times. + +```bash +#!/bin/bash +set -e +DB_NAME="${DB_NAME:-stock_data_backend}" +DB_USER="${DB_USER:-stock_backend}" +DB_PASS="${DB_PASS:-changeme}" + +sudo -u postgres psql <= low, all prices >= 0, volume >= 0, date <= today. +- `invalid_ohlcv_records()`: Records violating at least one constraint. +- `dimension_scores()`: Floats in [0, 100] for each dimension. +- `weight_configs()`: Dicts of dimension → positive float weight. +- `sr_levels()`: Levels with valid price, type, strength, method. +- `sentiment_scores()`: Records with valid classification, confidence, source, timestamp. +- `trade_setups()`: Setups with valid entry, stop, target, direction, R:R. + +### Property-to-Test Mapping + +Each of the 41 correctness properties maps to exactly one property-based test. The tag format is: + +``` +Feature: stock-data-backend, Property {number}: {property_title} +``` + +For example: +- `Feature: stock-data-backend, Property 1: Ticker creation round-trip` +- `Feature: stock-data-backend, Property 6: OHLCV validation rejects invalid records` +- `Feature: stock-data-backend, Property 21: Composite score is weighted average` +- `Feature: stock-data-backend, Property 41: Sorting correctness` diff --git a/.kiro/specs/stock-data-backend/requirements.md b/.kiro/specs/stock-data-backend/requirements.md new file mode 100644 index 0000000..13649c2 --- /dev/null +++ b/.kiro/specs/stock-data-backend/requirements.md @@ -0,0 +1,221 @@ +# Requirements Document + +## Introduction + +This document defines the requirements for the Stock Data Backend — an opinionated investing-signal platform built with Python/FastAPI and PostgreSQL, focused on NASDAQ stocks. The platform's philosophy: find the path of least resistance (trend direction), identify key support/resistance zones, detect asymmetric risk-reward setups, and surface the best opportunities through a unified scoring pipeline. It does not attempt to predict price — it identifies where conditions are most favorable. + +Every data source (OHLCV, technical indicators, sentiment, fundamentals) feeds into a single composite scoring and ranking system that auto-populates a watchlist and flags trade setups. Data ingestion is exclusively via the configured market data provider — users do not upload data directly. + +This is an MVP focused on delivering actionable signals. Engineering concerns (API format, database indexing, logging, connection pooling, graceful shutdown) are design constraints, not requirements. + +## Glossary + +- **Backend_Service**: The FastAPI-based Python web application that exposes REST API endpoints. +- **Ticker**: A unique NASDAQ stock symbol (e.g., AAPL, MSFT) being tracked by the system. +- **OHLCV_Record**: A single price data point containing Open, High, Low, Close, and Volume values for a specific Ticker on a specific date. +- **Ticker_Registry**: The subsystem responsible for adding, removing, listing, and looking up tracked NASDAQ tickers. +- **Price_Store**: The subsystem responsible for persisting and retrieving OHLCV price data in PostgreSQL. +- **Ingestion_Pipeline**: The subsystem responsible for importing stock data into the Price_Store via the configured market data provider. +- **Data_Collector**: A scheduled job that periodically fetches the latest price data for all tracked tickers and upserts it into the Price_Store. +- **Auth_Service**: The subsystem responsible for user registration, login, JWT token management, and role-based access control. +- **User**: A registered account with a username, hashed password, and assigned role (user or admin). +- **Admin**: A User with the admin role who can manage other users and configure system settings. +- **Access_Token**: A JWT token issued upon login, expires after 60 minutes. +- **ADX**: Average Directional Index — measures trend strength (0-100). Values above 25 indicate a strong trend. +- **EMA**: Exponential Moving Average — configurable period. EMA Cross (e.g., 20/50) determines directional bias. +- **RSI**: Relative Strength Index — momentum oscillator (0-100). Overbought >70, oversold <30. +- **ATR**: Average True Range — measures price volatility. Used for stop-loss and target placement. +- **Volume_Profile**: Distribution of traded volume across price levels, producing POC, Value Area, HVN, and LVN. +- **POC**: Point of Control — price level with highest traded volume. +- **HVN**: High Volume Node — above-average volume level, acts as support/resistance magnet. +- **LVN**: Low Volume Node — below-average volume level, acts as breakout zone. +- **Pivot_Point**: A support or resistance level from swing highs and swing lows. +- **SR_Level**: A support or resistance level tagged with type, strength score, and detection method. +- **SR_Detector**: The subsystem that auto-calculates support and resistance levels. +- **Sentiment_Score**: A record containing bullish/bearish/neutral classification, confidence (0-100), source, and timestamp for a Ticker. +- **Fundamental_Data**: Key financial metrics: P/E ratio, revenue growth rate, earnings surprise %, and market cap. +- **Composite_Score**: A weighted aggregate score (0-100) from all dimension scores for a Ticker. +- **Dimension_Score**: A normalized score (0-100) for a single analysis dimension (technical, S/R quality, sentiment, fundamental, momentum). +- **Scoring_Engine**: The subsystem that computes dimension scores, applies weights, and produces Composite_Scores. +- **RR_Scanner**: The subsystem that scans for asymmetric risk-reward trade setups. +- **Trade_Setup**: A detected trade opportunity with entry, stop-loss, target, R:R ratio, direction (long/short), and Composite_Score. +- **Watchlist**: A curated list of top-ranked tickers from the Scoring_Engine, with manual add/remove support. +- **System_Settings**: Persisted configuration values managed by admins. + +## Requirements + +### Requirement 1: Ticker Management + +**User Story:** As a user, I want to manage the NASDAQ tickers I am tracking, so that I can control which stocks the system analyzes. + +#### Acceptance Criteria + +- 1.1 WHEN a user submits a valid NASDAQ ticker symbol, THE Ticker_Registry SHALL create a new ticker entry and return the created ticker with its metadata. +- 1.2 WHEN a user submits a ticker symbol that already exists, THE Backend_Service SHALL return a duplicate error. +- 1.3 WHEN a user submits an empty or whitespace-only ticker symbol, THE Backend_Service SHALL reject the request with a validation error. +- 1.4 WHEN a user requests the list of tracked tickers, THE Ticker_Registry SHALL return all tickers sorted alphabetically by symbol. +- 1.5 WHEN a user requests deletion of a tracked ticker, THE Ticker_Registry SHALL remove the ticker and all associated data (OHLCV, scores, setups). +- 1.6 WHEN a user requests deletion of a ticker that does not exist, THE Backend_Service SHALL return a not-found error. + +### Requirement 2: OHLCV Price Data Storage + +**User Story:** As a user, I want the system to store historical OHLCV price data, so that technical analysis and signal detection have a data foundation. + +#### Acceptance Criteria + +- 2.1 THE Price_Store SHALL persist each OHLCV_Record with: ticker symbol, date, open, high, low, close, and volume. +- 2.2 THE Price_Store SHALL enforce uniqueness on (ticker symbol, date). +- 2.3 THE Backend_Service SHALL reject OHLCV_Records where high < low, any price is negative, volume is negative, or date is in the future. +- 2.4 THE Backend_Service SHALL reject OHLCV_Records for tickers not in the Ticker_Registry. + +### Requirement 3: Data Ingestion + +**User Story:** As a user, I want the system to fetch stock data from the market data provider, so that my price history stays current. + +#### Acceptance Criteria + +- 3.1 WHEN a user requests a data fetch for a ticker and date range, THE Ingestion_Pipeline SHALL fetch from the configured provider and upsert into the Price_Store. +- 3.2 IF the provider is unreachable or errors, THE Ingestion_Pipeline SHALL return a descriptive error without modifying existing data. +- 3.3 IF the provider returns a rate-limit error, THE Ingestion_Pipeline SHALL record progress and return a response indicating how many records were ingested, so the fetch can be resumed without gaps. +- 3.4 WHEN a rate-limited fetch is resumed for the same ticker and date range, THE Ingestion_Pipeline SHALL continue from the last successfully ingested date. + +### Requirement 4: Scheduled Data Collection + +**User Story:** As a user, I want the system to automatically fetch the latest price data on a schedule, so that my data stays current without manual intervention. + +#### Acceptance Criteria + +- 4.1 THE Data_Collector SHALL periodically fetch the latest daily OHLCV data for all tracked tickers. +- 4.2 THE Data_Collector SHALL upsert records, updating existing ones if they already exist. +- 4.3 WHEN the Data_Collector encounters an error for a specific ticker, it SHALL log the error and continue with remaining tickers. +- 4.4 THE Data_Collector SHALL be configurable for frequency (daily, hourly) via configuration. +- 4.5 IF a rate limit is hit during collection, THE Data_Collector SHALL record the last successful ticker and resume from there on the next run. + +### Requirement 5: Technical Analysis + +**User Story:** As a user, I want the system to compute key technical indicators, so that trend strength, momentum, and volatility feed into the scoring pipeline. + +#### Acceptance Criteria + +- 5.1 THE Backend_Service SHALL compute the following from OHLCV data: ADX, EMA (default periods 20 and 50), RSI (default 14-period), ATR (default 14-period), Volume_Profile (POC, Value Area, HVN, LVN), and Pivot_Points (swing highs/lows). +- 5.2 WHEN an indicator is requested for a Ticker and date range, THE Backend_Service SHALL return both raw values and a normalized score (0-100). +- 5.3 WHEN an EMA Cross signal is requested, THE Backend_Service SHALL compare short vs long EMA and return directional bias (bullish, bearish, neutral). +- 5.4 IF insufficient data exists to compute an indicator, THE Backend_Service SHALL return an error indicating the minimum data requirement. + +### Requirement 6: Support/Resistance Detection + +**User Story:** As a user, I want the system to auto-calculate support and resistance levels, so that I can see key price zones where buying or selling pressure concentrates. + +#### Acceptance Criteria + +- 6.1 THE SR_Detector SHALL identify SR_Levels from Volume_Profile (HVN/LVN zones) and from Pivot_Points (swing highs/lows). +- 6.2 THE SR_Detector SHALL assign each level a strength score (0-100) based on how many times price has respected that level. +- 6.3 THE SR_Detector SHALL tag each level as "support" or "resistance" relative to current price. +- 6.4 WHEN new OHLCV data arrives for a Ticker, THE SR_Detector SHALL recalculate its SR_Levels. +- 6.5 THE SR_Detector SHALL merge levels from different methods within a configurable price tolerance (default 0.5%) into a single consolidated level. +- 6.6 WHEN a user requests SR_Levels for a Ticker, they SHALL be returned sorted by strength descending with detection method indicated. + +### Requirement 7: Sentiment Data + +**User Story:** As a user, I want sentiment data to feed into the scoring pipeline, so that social mood is factored into signal detection. + +#### Acceptance Criteria + +- 7.1 THE Backend_Service SHALL periodically collect sentiment data for all tracked tickers from a configured source at a configurable interval (default 30 minutes). +- 7.2 EACH Sentiment_Score SHALL contain: classification (bullish/bearish/neutral), confidence (0-100), source identifier, and timestamp. +- 7.3 IF the sentiment source is unreachable, THE Backend_Service SHALL log the error and retain existing data. +- 7.4 WHEN computing the sentiment Dimension_Score, THE Scoring_Engine SHALL aggregate recent scores within a configurable lookback window (default 24h) using configurable source weights and time decay. + +### Requirement 8: Fundamental Data + +**User Story:** As a user, I want key fundamental metrics to feed into the scoring pipeline, so that financial quality is factored into signal detection. + +#### Acceptance Criteria + +- 8.1 THE Backend_Service SHALL fetch and store Fundamental_Data for each tracked Ticker: P/E ratio, revenue growth rate, earnings surprise %, and market cap. +- 8.2 THE Data_Collector SHALL periodically fetch updated Fundamental_Data (default daily). +- 8.3 IF the data source is unreachable, THE Backend_Service SHALL log the error and retain the most recent data. +- 8.4 WHEN new Fundamental_Data arrives, THE Scoring_Engine SHALL mark the fundamental Dimension_Score as stale. + +### Requirement 9: Composite Scoring and Ranking + +**User Story:** As a user, I want each stock scored across all dimensions with configurable weights, so that I can rank stocks by a single unified metric tuned to my preferences. + +#### Acceptance Criteria + +- 9.1 THE Scoring_Engine SHALL compute a Dimension_Score (0-100) per Ticker for: technical, S/R quality, sentiment, fundamental, and momentum. +- 9.2 THE Scoring_Engine SHALL compute a Composite_Score as the weighted average of available Dimension_Scores using user-configurable weights. +- 9.3 WHEN a Ticker is missing data for one or more dimensions, THE Scoring_Engine SHALL use only available dimensions (re-normalizing weights) and indicate which are missing. +- 9.4 WHEN underlying data changes, THE Scoring_Engine SHALL mark the affected Composite_Score as stale. +- 9.5 WHEN a stale score is requested, THE Scoring_Engine SHALL recompute on-demand. No background recomputation. +- 9.6 WHEN a user requests rankings, THE Scoring_Engine SHALL return tickers sorted by Composite_Score descending with all Dimension_Scores included. +- 9.7 WHEN a user updates dimension weights, THE Scoring_Engine SHALL recompute all Composite_Scores. + +### Requirement 10: Asymmetric R:R Trade Detection + +**User Story:** As a user, I want the system to scan for trade setups with favorable risk-reward ratios, so that I see highly asymmetric opportunities without manual chart analysis. + +#### Acceptance Criteria + +- 10.1 THE RR_Scanner SHALL periodically scan all tracked tickers for Trade_Setups meeting a configurable R:R threshold (default 3:1). +- 10.2 FOR long setups: target = nearest SR_Level above price, stop = ATR-based distance below price. +- 10.3 FOR short setups: target = nearest SR_Level below price, stop = ATR-based distance above price. +- 10.4 EACH Trade_Setup SHALL include: entry price, stop-loss, target, R:R ratio, direction (long/short), and Composite_Score. +- 10.5 WHEN underlying SR_Levels or price data changes, THE RR_Scanner SHALL recalculate and remove setups that no longer meet the threshold. +- 10.6 THE RR_Scanner SHALL be configurable for scan frequency via configuration. +- 10.7 IF a Ticker lacks sufficient SR_Levels or ATR data, THE RR_Scanner SHALL skip it and log the reason. +- 10.8 WHEN a user requests trade setups, results SHALL be sorted by R:R descending (secondary: Composite_Score descending), with optional direction filter. + +### Requirement 11: Watchlist + +**User Story:** As a user, I want a watchlist of top-ranked stocks that auto-populates from scoring, so that I always have a curated shortlist of the best opportunities. + +#### Acceptance Criteria + +- 11.1 THE Watchlist SHALL auto-include the top-X tickers by Composite_Score (X configurable, default 10). +- 11.2 WHEN requested, THE Watchlist SHALL return each entry with Composite_Score, Dimension_Scores, R:R ratio (if setup exists), and active SR_Levels. +- 11.3 Users MAY manually add/remove tickers. Manual additions are tagged and not subject to auto-population rules. +- 11.4 THE Watchlist SHALL enforce a max size of auto-populate count + 10 manual additions (default max 20). +- 11.5 WHEN Composite_Scores are recomputed, auto-populated entries SHALL update to reflect new rankings. +- 11.6 THE Watchlist SHALL be sortable by Composite_Score, any Dimension_Score, or R:R ratio. + +### Requirement 12: User Authentication + +**User Story:** As a system owner, I want user registration and login with role-based access, so that only authorized users can access signals and analysis. + +#### Acceptance Criteria + +- 12.1 WHEN registration is enabled and valid credentials are submitted, THE Auth_Service SHALL create a User with no API access by default. +- 12.2 WHEN registration is disabled, THE Auth_Service SHALL reject registration. +- 12.3 WHEN valid login credentials are submitted, THE Auth_Service SHALL return an Access_Token (60-minute expiry). +- 12.4 WHEN invalid credentials are submitted, THE Auth_Service SHALL return an error without revealing which field was wrong. +- 12.5 Unauthenticated requests to protected endpoints SHALL receive 401. Authenticated users without granted access SHALL receive 403. +- 12.6 WHEN a token expires, THE Backend_Service SHALL return 401 indicating expiration. + +### Requirement 13: Admin Management + +**User Story:** As an admin, I want to manage users, control system settings, and perform data maintenance. + +#### Acceptance Criteria + +- 13.1 WHEN the system initializes for the first time, a default admin account SHALL be created (username: "admin", password: "admin"). +- 13.2 Admins SHALL be able to: grant/revoke user access, toggle registration, list all users, reset user passwords, and create new user accounts. +- 13.3 Admins SHALL be able to: enable/disable scheduled jobs, update system settings (frequencies, thresholds, weights, watchlist size), and trigger manual job runs. +- 13.4 Admins SHALL be able to delete all data older than a specified number of days (OHLCV, sentiment, fundamentals). Ticker entries, user accounts, and latest scores SHALL be preserved. +- 13.5 Admin endpoints SHALL be restricted to users with the admin role. + +## Design Constraints + +The following are engineering concerns to be addressed during design, not user-facing requirements: + +- Consistent JSON API envelope (status, data, error fields) with appropriate HTTP status codes +- OpenAPI/Swagger documentation endpoint +- Versioned URL prefixes (/api/v1/) +- Composite database index on (ticker, date) for range query performance +- Date-only storage for OHLCV (no time component) +- Database migrations for schema management +- Structured JSON logging with configurable levels +- Database connection pooling (default 5 connections) +- Health check endpoint (unauthenticated) +- Graceful shutdown (complete in-flight requests, stop jobs, close pool) +- Market data provider behind an interface/protocol for swappability diff --git a/.kiro/specs/stock-data-backend/tasks.md b/.kiro/specs/stock-data-backend/tasks.md new file mode 100644 index 0000000..956d3cd --- /dev/null +++ b/.kiro/specs/stock-data-backend/tasks.md @@ -0,0 +1,255 @@ +# Implementation Plan: Stock Data Backend + +## Overview + +Incremental build of the investing-signal platform: foundation first (config, DB, models, auth), then domain services (tickers, OHLCV, ingestion, indicators, S/R, sentiment, fundamentals), then scoring/ranking (scoring engine, R:R scanner, watchlist), then scheduled jobs, deployment templates, and final wiring. Each step builds on the previous and ends integrated. + +## Tasks + +- [x] 1. Project scaffolding, configuration, and database foundation + - [x] 1.1 Create project structure with `pyproject.toml`, `.env.example`, `alembic.ini`, and `app/` package + - Create `pyproject.toml` with dependencies: fastapi, uvicorn, sqlalchemy[asyncio], asyncpg, alembic, pydantic-settings, python-jose, passlib[bcrypt], apscheduler, httpx, alpaca-py, google-genai, hypothesis + - Create `.env.example` with all environment variables from design + - Create `app/__init__.py`, `app/config.py` (pydantic-settings `Settings` class) + - Create `app/database.py` (async SQLAlchemy engine, session factory, connection pooling) + - _Requirements: Design Constraints (connection pooling, config)_ + + - [x] 1.2 Create all SQLAlchemy ORM models and Alembic initial migration + - Create `app/models/__init__.py` and model files: `ticker.py`, `ohlcv.py`, `user.py`, `sentiment.py`, `fundamental.py`, `score.py`, `sr_level.py`, `trade_setup.py`, `watchlist.py`, `settings.py` + - Implement all 12 entities from the ERD: User, Ticker, OHLCVRecord, SentimentScore, FundamentalData, SRLevel, DimensionScore, CompositeScore, TradeSetup, WatchlistEntry, SystemSetting, IngestionProgress + - Include composite unique constraints, indexes, and cascade deletes per design + - Initialize Alembic (`alembic/env.py`) and generate initial migration + - _Requirements: 2.1, 2.2, Design Constraints (composite index on ticker+date)_ + + - [x] 1.3 Create shared schemas, exception hierarchy, and API envelope + - Create `app/schemas/common.py` with `APIEnvelope` model (status, data, error) + - Create `app/middleware.py` with global exception handler mapping `AppError` subclasses to JSON envelope responses + - Create exception classes: `AppError`, `ValidationError`, `NotFoundError`, `DuplicateError`, `AuthenticationError`, `AuthorizationError`, `ProviderError`, `RateLimitError` + - _Requirements: Design Constraints (JSON envelope, HTTP status codes)_ + + - [x] 1.4 Create FastAPI app entry point with lifespan, health check, and dependency injection + - Create `app/main.py` with FastAPI app, lifespan handler (DB pool startup/shutdown, default admin creation) + - Create `app/dependencies.py` with `Depends()` factories for DB session, current user, admin guard + - Create `app/routers/health.py` with unauthenticated `/api/v1/health` endpoint + - Wire health router into app + - _Requirements: 13.1, Design Constraints (health check, graceful shutdown, versioned URLs)_ + +- [x] 2. Authentication and admin services + - [x] 2.1 Implement Auth Service and auth router + - Create `app/services/auth_service.py`: registration (configurable on/off, creates no-access user), login (bcrypt verify, JWT generation with 60-min expiry), token validation + - Create `app/schemas/auth.py`: RegisterRequest, LoginRequest, TokenResponse + - Create `app/routers/auth.py`: `POST /api/v1/auth/register`, `POST /api/v1/auth/login` + - Implement JWT middleware in `app/dependencies.py` for `get_current_user` and `require_admin` + - _Requirements: 12.1, 12.2, 12.3, 12.4, 12.5, 12.6_ + + - [ ]* 2.2 Write property tests for auth (Properties 34-38) + - **Property 34: Registration creates no-access user** — _Validates: Requirements 12.1_ + - **Property 35: Registration disabled rejects all attempts** — _Validates: Requirements 12.2_ + - **Property 36: Login returns valid JWT** — _Validates: Requirements 12.3_ + - **Property 37: Invalid credentials return generic error** — _Validates: Requirements 12.4_ + - **Property 38: Access control enforcement** — _Validates: Requirements 12.5_ + + - [x] 2.3 Implement Admin Service and admin router + - Create `app/services/admin_service.py`: grant/revoke access, toggle registration, list users, reset passwords, create accounts, system settings CRUD, data cleanup (delete old OHLCV/sentiment/fundamentals preserving tickers/users/scores), job control + - Create `app/schemas/admin.py`: UserManagement, SystemSettingUpdate, DataCleanupRequest + - Create `app/routers/admin.py`: admin-only endpoints under `/api/v1/admin/` + - _Requirements: 13.1, 13.2, 13.3, 13.4, 13.5_ + + - [ ]* 2.4 Write property tests for admin (Properties 39-40) + - **Property 39: Admin user management operations** — _Validates: Requirements 13.2_ + - **Property 40: Data cleanup preserves structure** — _Validates: Requirements 13.4_ + +- [x] 3. Checkpoint - Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 4. Ticker management and OHLCV price storage + - [x] 4.1 Implement Ticker Registry service and router + - Create `app/services/ticker_service.py`: add (validate non-empty, uppercase, alphanumeric, check uniqueness), delete (cascade all associated data), list (sorted alphabetically) + - Create `app/schemas/ticker.py`: TickerCreate, TickerResponse + - Create `app/routers/tickers.py`: `POST /api/v1/tickers`, `GET /api/v1/tickers`, `DELETE /api/v1/tickers/{symbol}` + - _Requirements: 1.1, 1.2, 1.3, 1.4, 1.5, 1.6_ + + - [ ]* 4.2 Write property tests for ticker management (Properties 1-4) + - **Property 1: Ticker creation round-trip** — _Validates: Requirements 1.1_ + - **Property 2: Duplicate ticker rejection** — _Validates: Requirements 1.2_ + - **Property 3: Whitespace ticker rejection** — _Validates: Requirements 1.3_ + - **Property 4: Ticker deletion cascades** — _Validates: Requirements 1.5_ + + - [x] 4.3 Implement Price Store service and OHLCV router + - Create `app/services/price_service.py`: upsert OHLCV (validate high >= low, prices >= 0, volume >= 0, date <= today, ticker exists), query by ticker + date range + - Create `app/schemas/ohlcv.py`: OHLCVCreate, OHLCVResponse + - Create `app/routers/ohlcv.py`: `POST /api/v1/ohlcv`, `GET /api/v1/ohlcv/{symbol}` + - On upsert: invalidate LRU cache for ticker, mark composite score as stale + - _Requirements: 2.1, 2.2, 2.3, 2.4_ + + - [ ]* 4.4 Write property tests for OHLCV (Properties 5-7) + - **Property 5: OHLCV storage round-trip** — _Validates: Requirements 2.1, 2.2_ + - **Property 6: OHLCV validation rejects invalid records** — _Validates: Requirements 2.3_ + - **Property 7: OHLCV rejects unregistered tickers** — _Validates: Requirements 2.4_ + +- [x] 5. Market data provider and ingestion pipeline + - [x] 5.1 Implement provider protocols and concrete implementations + - Create `app/providers/protocol.py`: `MarketDataProvider` Protocol (fetch_ohlcv), `SentimentProvider` Protocol (fetch_sentiment), `FundamentalProvider` Protocol (fetch_fundamentals) + - Create `app/providers/alpaca.py`: Alpaca OHLCV provider using `alpaca-py` SDK — fetches daily bars by ticker and date range + - Create `app/providers/gemini_sentiment.py`: Gemini sentiment provider using `google-genai` with search grounding — sends structured prompt per ticker, parses JSON response (classification + confidence) + - Create `app/providers/fmp.py`: Financial Modeling Prep fundamentals provider using `httpx` — fetches P/E, revenue growth, earnings surprise, market cap + - _Requirements: Design Constraints (provider behind interface)_ + + - [x] 5.2 Implement Ingestion Pipeline service and router + - Create `app/services/ingestion_service.py`: fetch + upsert with rate-limit handling (track `last_ingested_date`, return partial progress on rate limit, resume from last date + 1 day), provider error handling (descriptive error, no data modification) + - Create `app/routers/ingestion.py`: `POST /api/v1/ingestion/fetch/{symbol}` + - _Requirements: 3.1, 3.2, 3.3, 3.4_ + + - [ ]* 5.3 Write property tests for ingestion (Properties 8-9) + - **Property 8: Provider error preserves existing data** — _Validates: Requirements 3.2, 7.3, 8.3_ + - **Property 9: Rate-limit resume continuity** — _Validates: Requirements 3.3, 3.4, 4.5_ + +- [x] 6. Checkpoint - Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 7. Technical analysis and S/R detection + - [x] 7.1 Implement LRU cache wrapper with invalidation + - Create `app/cache.py`: LRU cache wrapper (max 1000 entries) keyed on ticker + date range + indicator type, with per-ticker invalidation method + - _Requirements: Design Constraints (LRU cache)_ + + - [x] 7.2 Implement Technical Analysis service and indicators router + - Create `app/services/indicator_service.py`: compute ADX (28+ bars), EMA (period+1 bars, default 20/50), RSI (15+ bars, 14-period), ATR (15+ bars, 14-period), Volume Profile (20+ bars, POC/Value Area/HVN/LVN), Pivot Points (5+ bars, swing highs/lows) + - Each indicator returns raw values + normalized 0-100 score + - Implement EMA cross signal (bullish/bearish/neutral based on short vs long EMA comparison) + - Enforce minimum data requirements, return error if insufficient + - Create `app/schemas/indicator.py`: IndicatorRequest, IndicatorResponse, EMACrossResponse + - Create `app/routers/indicators.py`: `GET /api/v1/indicators/{symbol}/{indicator_type}`, `GET /api/v1/indicators/{symbol}/ema-cross` + - _Requirements: 5.1, 5.2, 5.3, 5.4_ + + - [ ]* 7.3 Write property tests for indicators (Properties 11-14) + - **Property 11: Score bounds invariant** — _Validates: Requirements 5.2, 6.2, 9.1_ + - **Property 12: Indicator minimum data enforcement** — _Validates: Requirements 5.4_ + - **Property 13: EMA cross directional bias** — _Validates: Requirements 5.3_ + - **Property 14: Indicator computation determinism** — _Validates: Requirements 5.1_ + + - [x] 7.4 Implement S/R Detector service and router + - Create `app/services/sr_service.py`: detect SR levels from Volume Profile (HVN/LVN) and Pivot Points (swing highs/lows), assign strength scores (0-100 based on price respect count), merge levels within tolerance (default 0.5%), tag as support/resistance relative to current price, recalculate on new OHLCV data + - Create `app/schemas/sr_level.py`: SRLevelResponse + - Create `app/routers/sr_levels.py`: `GET /api/v1/sr-levels/{symbol}` (sorted by strength descending) + - _Requirements: 6.1, 6.2, 6.3, 6.4, 6.5, 6.6_ + + - [ ]* 7.5 Write property tests for S/R detection (Properties 15-17) + - **Property 15: SR level support/resistance tagging** — _Validates: Requirements 6.3_ + - **Property 16: SR level merging within tolerance** — _Validates: Requirements 6.5_ + - **Property 17: SR level detection from data** — _Validates: Requirements 6.1_ + +- [x] 8. Sentiment and fundamental data services + - [x] 8.1 Implement Sentiment service and router + - Create `app/services/sentiment_service.py`: store sentiment records (classification, confidence, source, timestamp), compute dimension score with time-decay weighted average over configurable lookback window (default 24h) + - Create `app/schemas/sentiment.py`: SentimentResponse + - Create `app/routers/sentiment.py`: `GET /api/v1/sentiment/{symbol}` + - _Requirements: 7.1, 7.2, 7.3, 7.4_ + + - [ ]* 8.2 Write property tests for sentiment (Properties 18-19) + - **Property 18: Sentiment score data shape** — _Validates: Requirements 7.2_ + - **Property 19: Sentiment dimension score uses time decay** — _Validates: Requirements 7.4_ + + - [x] 8.3 Implement Fundamental Data service and router + - Create `app/services/fundamental_service.py`: store fundamental data (P/E, revenue growth, earnings surprise, market cap), mark fundamental dimension score as stale on new data + - Create `app/schemas/fundamental.py`: FundamentalResponse + - Create `app/routers/fundamentals.py`: `GET /api/v1/fundamentals/{symbol}` + - _Requirements: 8.1, 8.2, 8.3, 8.4_ + + - [ ]* 8.4 Write property test for fundamentals (Property 20) + - **Property 20: Fundamental data storage round-trip** — _Validates: Requirements 8.1_ + +- [x] 9. Checkpoint - Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 10. Scoring engine, R:R scanner, and watchlist + - [x] 10.1 Implement Scoring Engine service and router + - Create `app/services/scoring_service.py`: compute dimension scores (technical, sr_quality, sentiment, fundamental, momentum) each 0-100, compute composite score as weighted average of available dimensions with re-normalized weights, staleness marking/recomputation on demand, weight update triggers full recomputation + - Create `app/schemas/score.py`: ScoreResponse, WeightUpdateRequest, RankingResponse + - Create `app/routers/scores.py`: `GET /api/v1/scores/{symbol}`, `GET /api/v1/rankings`, `PUT /api/v1/scores/weights` + - _Requirements: 9.1, 9.2, 9.3, 9.4, 9.5, 9.6, 9.7_ + + - [ ]* 10.2 Write property tests for scoring (Properties 21-25) + - **Property 21: Composite score is weighted average** — _Validates: Requirements 9.2_ + - **Property 22: Missing dimensions re-normalize weights** — _Validates: Requirements 9.3_ + - **Property 23: Staleness marking on data change** — _Validates: Requirements 9.4_ + - **Property 24: Stale score recomputation on demand** — _Validates: Requirements 9.5_ + - **Property 25: Weight update triggers full recomputation** — _Validates: Requirements 9.7_ + + - [x] 10.3 Implement R:R Scanner service and router + - Create `app/services/rr_scanner_service.py`: scan tickers for trade setups (long: target = nearest SR above, stop = entry - ATR×multiplier; short: target = nearest SR below, stop = entry + ATR×multiplier), filter by R:R threshold (default 3:1), recalculate/prune on data change, skip tickers without sufficient SR/ATR data + - Create `app/schemas/trade_setup.py`: TradeSetupResponse + - Create `app/routers/trades.py`: `GET /api/v1/trades` (sorted by R:R desc, secondary composite desc, optional direction filter) + - _Requirements: 10.1, 10.2, 10.3, 10.4, 10.5, 10.6, 10.7, 10.8_ + + - [ ]* 10.4 Write property tests for R:R scanner (Properties 26-29) + - **Property 26: Trade setup R:R threshold filtering** — _Validates: Requirements 10.1_ + - **Property 27: Trade setup computation correctness** — _Validates: Requirements 10.2, 10.3_ + - **Property 28: Trade setup data completeness** — _Validates: Requirements 10.4_ + - **Property 29: Trade setup pruning on data change** — _Validates: Requirements 10.5_ + + - [x] 10.5 Implement Watchlist service and router + - Create `app/services/watchlist_service.py`: auto-populate top-X by composite score (default 10), manual add/remove (tagged, not subject to auto-population), enforce cap (auto + 10 manual, default max 20), update auto entries on score recomputation + - Create `app/schemas/watchlist.py`: WatchlistEntryResponse (includes composite score, dimension scores, R:R ratio, SR levels) + - Create `app/routers/watchlist.py`: `GET /api/v1/watchlist`, `POST /api/v1/watchlist/{symbol}`, `DELETE /api/v1/watchlist/{symbol}` (sortable by composite, dimension, or R:R) + - _Requirements: 11.1, 11.2, 11.3, 11.4, 11.5, 11.6_ + + - [ ]* 10.6 Write property tests for watchlist (Properties 30-33) + - **Property 30: Watchlist auto-population** — _Validates: Requirements 11.1_ + - **Property 31: Watchlist entry data completeness** — _Validates: Requirements 11.2_ + - **Property 32: Manual watchlist entries persist through auto-population** — _Validates: Requirements 11.3_ + - **Property 33: Watchlist size cap enforcement** — _Validates: Requirements 11.4_ + +- [x] 11. Checkpoint - Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +- [x] 12. Scheduled jobs and sorting correctness + - [x] 12.1 Implement APScheduler job definitions and scheduler integration + - Create `app/scheduler.py`: define scheduled jobs for Data Collector (OHLCV fetch for all tickers, configurable frequency), Sentiment Collector (default 30 min), Fundamental Collector (default daily), R:R Scanner (configurable frequency) + - Each job: process all tracked tickers independently (one failure doesn't stop others), log errors with structured JSON, handle rate limits (record last successful ticker, resume next run) + - Wire scheduler into FastAPI lifespan (start on startup, shutdown gracefully) + - _Requirements: 4.1, 4.2, 4.3, 4.4, 4.5, 7.1, 8.2, 10.6_ + + - [ ]* 12.2 Write property test for scheduled collection (Property 10) + - **Property 10: Scheduled collection processes all tickers** — _Validates: Requirements 4.1, 4.3, 7.1, 8.2_ + + - [ ]* 12.3 Write property test for sorting correctness (Property 41) + - **Property 41: Sorting correctness** — _Validates: Requirements 1.4, 6.6, 9.6, 10.8, 11.6_ + +- [x] 13. Test infrastructure and shared fixtures + - [x] 13.1 Create test configuration and shared fixtures + - Create `tests/conftest.py`: test DB session fixture (transaction rollback per test), FastAPI test client fixture, mock `MarketDataProvider`, hypothesis custom strategies (`valid_ticker_symbols`, `whitespace_strings`, `valid_ohlcv_records`, `invalid_ohlcv_records`, `dimension_scores`, `weight_configs`, `sr_levels`, `sentiment_scores`, `trade_setups`) + - Create `tests/__init__.py`, `tests/unit/__init__.py`, `tests/property/__init__.py` + - _Requirements: Design (Testing Strategy)_ + +- [x] 14. Deployment templates and CI/CD + - [x] 14.1 Create deployment configuration files + - Create `deploy/nginx.conf` (reverse proxy for signal.thiessen.io) + - Create `deploy/stock-data-backend.service` (systemd unit file) + - Create `deploy/setup_db.sh` (idempotent DB creation + migration script) + - Create `.gitea/workflows/deploy.yml` (lint → test → deploy pipeline) + - _Requirements: Design (Deployment and Infrastructure)_ + +- [x] 15. Final wiring and integration + - [x] 15.1 Wire all routers into FastAPI app and verify OpenAPI docs + - Register all routers in `app/main.py` under `/api/v1/` prefix + - Verify Swagger/OpenAPI docs endpoint works at `/docs` + - Ensure all middleware (logging, error handling, auth) is applied + - _Requirements: Design Constraints (OpenAPI/Swagger, versioned URLs)_ + + - [ ]* 15.2 Write integration tests for key API flows + - Test end-to-end: register → login → add ticker → fetch data → get indicators → get scores → get watchlist + - Test auth enforcement: unauthenticated → 401, no-access user → 403, admin endpoints → 403 for non-admin + - Test error flows: duplicate ticker → 409, invalid OHLCV → 400, missing ticker → 404 + - _Requirements: 1.1-1.6, 2.1-2.4, 12.1-12.6_ + +- [x] 16. Final checkpoint - Ensure all tests pass + - Ensure all tests pass, ask the user if questions arise. + +## Notes + +- Tasks marked with `*` are optional and can be skipped for faster MVP +- Each task references specific requirements for traceability +- Checkpoints ensure incremental validation +- Property tests validate the 41 correctness properties from the design document using `hypothesis` +- Unit tests validate specific examples and edge cases +- All code is Python 3.12+ with FastAPI, SQLAlchemy async, and PostgreSQL diff --git a/README.md b/README.md new file mode 100644 index 0000000..54c1b43 --- /dev/null +++ b/README.md @@ -0,0 +1,300 @@ +# Signal Dashboard + +Investing-signal platform for NASDAQ stocks. Surfaces the best trading opportunities through weighted multi-dimensional scoring — technical indicators, support/resistance quality, sentiment, fundamentals, and momentum — with asymmetric risk:reward scanning. + +**Philosophy:** Don't predict price. Find the path of least resistance, key S/R zones, and asymmetric R:R setups. + +## Stack + +| Layer | Tech | +|---|---| +| Backend | Python 3.12+, FastAPI, Uvicorn, async SQLAlchemy, Alembic | +| Database | PostgreSQL (asyncpg) | +| Scheduler | APScheduler — OHLCV, sentiment, fundamentals, R:R scan | +| Frontend | React 18, TypeScript, Vite 5 | +| Styling | Tailwind CSS 3 with custom glassmorphism design system | +| State | TanStack React Query v5 (server), Zustand (client/auth) | +| Charts | Canvas 2D candlestick chart with S/R overlays | +| Routing | React Router v6 (SPA) | +| HTTP | Axios with JWT interceptor | +| Data providers | Alpaca (OHLCV), Gemini 2.0 Flash (sentiment via search grounding), Financial Modeling Prep (fundamentals) | + +## Features + +### Backend +- Ticker registry with full cascade delete +- OHLCV price storage with upsert and validation +- Technical indicators: ADX, EMA, RSI, ATR, Volume Profile, Pivot Points, EMA Cross +- Support/Resistance detection with strength scoring and merge-within-tolerance +- Sentiment analysis with time-decay weighted scoring +- Fundamental data tracking (P/E, revenue growth, earnings surprise, market cap) +- 5-dimension scoring engine (technical, S/R quality, sentiment, fundamental, momentum) with configurable weights +- Risk:Reward scanner — long and short setups, ATR-based stops, configurable R:R threshold (default 3:1) +- Auto-populated watchlist (top-10 by composite score) + manual entries (cap: 20) +- JWT auth with admin role, configurable registration, user access control +- Scheduled jobs with enable/disable control and status monitoring +- Admin panel: user management, data cleanup, job control, system settings + +### Frontend +- Glassmorphism UI with frosted glass panels, gradient text, ambient glow effects, mesh gradient background +- Interactive candlestick chart (Canvas 2D) with hover tooltips showing OHLCV values +- Support/Resistance level overlays on chart (top 6 by strength, dashed lines with labels) +- Data freshness bar showing availability and recency of each data source +- Watchlist with composite scores, R:R ratios, and S/R summaries +- Ticker detail page: chart, scores, sentiment breakdown, fundamentals, technical indicators, S/R table +- Rankings table with configurable dimension weights +- Trade scanner showing detected R:R setups +- Admin page: user management, job status with live indicators, enable/disable toggles, data cleanup, system settings +- Protected routes with JWT auth, admin-only sections +- Responsive layout with mobile navigation +- Toast notifications for async operations + +## Pages + +| Route | Page | Access | +|---|---|---| +| `/login` | Login | Public | +| `/register` | Register | Public (when enabled) | +| `/watchlist` | Watchlist (default) | Authenticated | +| `/ticker/:symbol` | Ticker Detail | Authenticated | +| `/scanner` | Trade Scanner | Authenticated | +| `/rankings` | Rankings | Authenticated | +| `/admin` | Admin Panel | Admin only | + +## API Endpoints + +All under `/api/v1/`. Interactive docs at `/docs` (Swagger) and `/redoc`. + +| Group | Endpoints | +|---|---| +| Health | `GET /health` | +| Auth | `POST /auth/register`, `POST /auth/login` | +| Tickers | `POST /tickers`, `GET /tickers`, `DELETE /tickers/{symbol}` | +| OHLCV | `POST /ohlcv`, `GET /ohlcv/{symbol}` | +| Ingestion | `POST /ingestion/fetch/{symbol}` | +| Indicators | `GET /indicators/{symbol}/{type}`, `GET /indicators/{symbol}/ema-cross` | +| S/R Levels | `GET /sr-levels/{symbol}` | +| Sentiment | `GET /sentiment/{symbol}` | +| Fundamentals | `GET /fundamentals/{symbol}` | +| Scores | `GET /scores/{symbol}`, `GET /rankings`, `PUT /scores/weights` | +| Trades | `GET /trades` | +| Watchlist | `GET /watchlist`, `POST /watchlist/{symbol}`, `DELETE /watchlist/{symbol}` | +| Admin | `GET /admin/users`, `PUT /admin/users/{id}/role`, `PUT /admin/users/{id}/access`, `DELETE /admin/data/{symbol}`, `POST /admin/jobs/{name}/trigger`, `PUT /admin/jobs/{name}/toggle`, `GET /admin/jobs`, `GET /admin/settings`, `PUT /admin/settings` | + +## Development Setup + +### Prerequisites + +- Python 3.12+ +- PostgreSQL (via Homebrew on macOS: `brew install postgresql@17`) +- Node.js 18+ and npm + +### Backend Setup + +```bash +# Create and activate virtual environment +python -m venv .venv +source .venv/bin/activate +pip install -e ".[dev]" + +# Configure environment +cp .env.example .env +# Edit .env with your values (see Environment Variables below) + +# Start PostgreSQL and create database +brew services start postgresql@17 +createdb stock_data_backend +createuser stock_backend + +# Run migrations +alembic upgrade head + +# Start the backend +uvicorn app.main:app --reload --host 0.0.0.0 --port 8000 +``` + +A default `admin`/`admin` account is created on first startup. Open http://localhost:8000/docs for Swagger UI. + +### Frontend Setup + +```bash +cd frontend +npm install +npm run dev +``` + +Open http://localhost:5173 for the Signal Dashboard. The Vite dev server proxies `/api/v1/` requests to the backend at `http://127.0.0.1:8000`. + +### Frontend Build + +```bash +cd frontend +npm run build # TypeScript check + production build → frontend/dist/ +npm run preview # Preview the production build locally +``` + +### Tests + +```bash +# Backend tests (in-memory SQLite — no PostgreSQL needed) +pytest tests/ -v + +# Frontend tests +cd frontend +npm test +``` + +## Environment Variables + +Configure in `.env` (copy from `.env.example`): + +| Variable | Required | Default | Description | +|---|---|---|---| +| `DATABASE_URL` | Yes | — | PostgreSQL connection string (`postgresql+asyncpg://...`) | +| `JWT_SECRET` | Yes | — | Random secret for JWT signing | +| `JWT_EXPIRY_MINUTES` | No | `60` | JWT token expiry | +| `ALPACA_API_KEY` | For OHLCV | — | Alpaca Markets API key | +| `ALPACA_API_SECRET` | For OHLCV | — | Alpaca Markets API secret | +| `GEMINI_API_KEY` | For sentiment | — | Google Gemini API key | +| `GEMINI_MODEL` | No | `gemini-2.0-flash` | Gemini model name | +| `FMP_API_KEY` | For fundamentals | — | Financial Modeling Prep API key | +| `DATA_COLLECTOR_FREQUENCY` | No | `daily` | OHLCV collection schedule | +| `SENTIMENT_POLL_INTERVAL_MINUTES` | No | `30` | Sentiment polling interval | +| `FUNDAMENTAL_FETCH_FREQUENCY` | No | `daily` | Fundamentals fetch schedule | +| `RR_SCAN_FREQUENCY` | No | `daily` | R:R scanner schedule | +| `DEFAULT_WATCHLIST_AUTO_SIZE` | No | `10` | Auto-watchlist size | +| `DEFAULT_RR_THRESHOLD` | No | `3.0` | Minimum R:R ratio for setups | +| `DB_POOL_SIZE` | No | `5` | Database connection pool size | +| `LOG_LEVEL` | No | `INFO` | Logging level | + +## Production Deployment (Debian 12) + +### 1. Install dependencies + +```bash +sudo apt update && sudo apt install -y python3.12 python3.12-venv postgresql nginx nodejs npm +``` + +### 2. Create service user + +```bash +sudo useradd -r -s /usr/sbin/nologin stockdata +``` + +### 3. Deploy application + +```bash +sudo mkdir -p /opt/stock-data-backend +# Copy project files to /opt/stock-data-backend +cd /opt/stock-data-backend +python3.12 -m venv .venv +source .venv/bin/activate +pip install . +``` + +### 4. Configure + +```bash +sudo cp .env.example /opt/stock-data-backend/.env +sudo chown stockdata:stockdata /opt/stock-data-backend/.env +# Edit .env with production values (strong JWT_SECRET, real API keys, etc.) +``` + +### 5. Database + +```bash +DB_NAME=stock_data_backend DB_USER=stock_backend DB_PASS=strong_password ./deploy/setup_db.sh +``` + +### 6. Build frontend + +```bash +cd frontend +npm ci +npm run build +``` + +### 7. Systemd service + +```bash +sudo cp deploy/stock-data-backend.service /etc/systemd/system/ +sudo systemctl daemon-reload +sudo systemctl enable --now stock-data-backend +``` + +### 8. Nginx reverse proxy + +```bash +sudo cp deploy/nginx.conf /etc/nginx/sites-available/stock-data-backend +sudo ln -s /etc/nginx/sites-available/stock-data-backend /etc/nginx/sites-enabled/ +sudo nginx -t && sudo systemctl reload nginx +``` + +Nginx serves the frontend static files from `frontend/dist/` and proxies `/api/v1/` to the backend. + +### 9. SSL (recommended) + +```bash +sudo apt install certbot python3-certbot-nginx +sudo certbot --nginx -d signal.thiessen.io +``` + +### Verify + +```bash +curl https://signal.thiessen.io/api/v1/health +``` + +## Project Structure + +``` +app/ +├── main.py # FastAPI app, lifespan, router wiring +├── config.py # Pydantic settings from .env +├── database.py # Async SQLAlchemy engine + session +├── dependencies.py # DI: DB session, auth guards +├── exceptions.py # Exception hierarchy +├── middleware.py # Global error handler → JSON envelope +├── cache.py # LRU cache with per-ticker invalidation +├── scheduler.py # APScheduler job definitions +├── models/ # SQLAlchemy ORM models +├── schemas/ # Pydantic request/response schemas +├── services/ # Business logic layer +├── providers/ # External data provider integrations +└── routers/ # FastAPI route handlers + +frontend/ +├── index.html # SPA entry point +├── vite.config.ts # Vite config with API proxy +├── tailwind.config.ts # Tailwind + glassmorphism theme +├── package.json +└── src/ + ├── App.tsx # Route definitions + ├── main.tsx # React entry point + ├── api/ # Axios API client modules (one per resource) + ├── components/ + │ ├── admin/ # User table, job controls, settings, data cleanup + │ ├── auth/ # Protected route wrapper + │ ├── charts/ # Canvas candlestick chart + │ ├── layout/ # App shell, sidebar, mobile nav + │ ├── rankings/ # Rankings table, weights form + │ ├── scanner/ # Trade table + │ ├── ticker/ # Sentiment panel, fundamentals, indicators, S/R overlay + │ ├── ui/ # Badge, toast, skeleton, score card, confirm dialog + │ └── watchlist/ # Watchlist table, add ticker form + ├── hooks/ # React Query hooks (one per resource) + ├── lib/ # Types, formatting utilities + ├── pages/ # Page components (7 pages) + ├── stores/ # Zustand auth store + └── styles/ # Global CSS with glassmorphism classes + +deploy/ +├── nginx.conf # Reverse proxy + static file serving +├── setup_db.sh # Idempotent DB setup script +└── stock-data-backend.service # systemd unit + +tests/ +├── conftest.py # Fixtures, strategies, test DB +├── unit/ # Unit tests +└── property/ # Property-based tests (Hypothesis) +``` diff --git a/alembic.ini b/alembic.ini new file mode 100644 index 0000000..bed1805 --- /dev/null +++ b/alembic.ini @@ -0,0 +1,40 @@ +[alembic] +script_location = alembic +prepend_sys_path = . +sqlalchemy.url = driver://user:pass@localhost/dbname + +[post_write_hooks] + +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/alembic/env.py b/alembic/env.py new file mode 100644 index 0000000..f187993 --- /dev/null +++ b/alembic/env.py @@ -0,0 +1,67 @@ +import asyncio +from logging.config import fileConfig + +from alembic import context +from sqlalchemy import pool +from sqlalchemy.ext.asyncio import async_engine_from_config + +from app.config import settings +from app.database import Base + +# Import all models so they register with Base.metadata +import app.models # noqa: F401 + +config = context.config + +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# Override sqlalchemy.url with the app's database URL +config.set_main_option("sqlalchemy.url", settings.database_url) + +target_metadata = Base.metadata + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode.""" + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection): + context.configure(connection=connection, target_metadata=target_metadata) + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """Run migrations in 'online' mode with async engine.""" + connectable = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/alembic/script.py.mako b/alembic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/alembic/versions/001_initial_schema.py b/alembic/versions/001_initial_schema.py new file mode 100644 index 0000000..dda7074 --- /dev/null +++ b/alembic/versions/001_initial_schema.py @@ -0,0 +1,180 @@ +"""initial_schema + +Revision ID: 001 +Revises: +Create Date: 2025-01-01 00:00:00.000000 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + + +# revision identifiers, used by Alembic. +revision: str = "001" +down_revision: Union[str, None] = None +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + # Independent tables (no foreign keys) + op.create_table( + "system_settings", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("key", sa.String(length=100), nullable=False), + sa.Column("value", sa.Text(), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("key"), + ) + op.create_table( + "tickers", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("symbol", sa.String(length=10), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("symbol"), + ) + op.create_table( + "users", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("username", sa.String(length=100), nullable=False), + sa.Column("password_hash", sa.String(length=255), nullable=False), + sa.Column("role", sa.String(length=20), nullable=False), + sa.Column("has_access", sa.Boolean(), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("username"), + ) + + # Tables with FK to tickers + op.create_table( + "composite_scores", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("ticker_id", sa.Integer(), nullable=False), + sa.Column("score", sa.Float(), nullable=False), + sa.Column("is_stale", sa.Boolean(), nullable=False), + sa.Column("weights_json", sa.Text(), nullable=False), + sa.Column("computed_at", sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "dimension_scores", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("ticker_id", sa.Integer(), nullable=False), + sa.Column("dimension", sa.String(length=50), nullable=False), + sa.Column("score", sa.Float(), nullable=False), + sa.Column("is_stale", sa.Boolean(), nullable=False), + sa.Column("computed_at", sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "fundamental_data", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("ticker_id", sa.Integer(), nullable=False), + sa.Column("pe_ratio", sa.Float(), nullable=True), + sa.Column("revenue_growth", sa.Float(), nullable=True), + sa.Column("earnings_surprise", sa.Float(), nullable=True), + sa.Column("market_cap", sa.Float(), nullable=True), + sa.Column("fetched_at", sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "ingestion_progress", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("ticker_id", sa.Integer(), nullable=False), + sa.Column("last_ingested_date", sa.Date(), nullable=False), + sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("ticker_id", name="uq_ingestion_progress_ticker"), + ) + op.create_table( + "ohlcv_records", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("ticker_id", sa.Integer(), nullable=False), + sa.Column("date", sa.Date(), nullable=False), + sa.Column("open", sa.Float(), nullable=False), + sa.Column("high", sa.Float(), nullable=False), + sa.Column("low", sa.Float(), nullable=False), + sa.Column("close", sa.Float(), nullable=False), + sa.Column("volume", sa.BigInteger(), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("ticker_id", "date", name="uq_ohlcv_ticker_date"), + ) + op.create_index("ix_ohlcv_ticker_date", "ohlcv_records", ["ticker_id", "date"], unique=False) + op.create_table( + "sentiment_scores", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("ticker_id", sa.Integer(), nullable=False), + sa.Column("classification", sa.String(length=20), nullable=False), + sa.Column("confidence", sa.Integer(), nullable=False), + sa.Column("source", sa.String(length=100), nullable=False), + sa.Column("timestamp", sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "sr_levels", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("ticker_id", sa.Integer(), nullable=False), + sa.Column("price_level", sa.Float(), nullable=False), + sa.Column("type", sa.String(length=20), nullable=False), + sa.Column("strength", sa.Integer(), nullable=False), + sa.Column("detection_method", sa.String(length=50), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + op.create_table( + "trade_setups", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("ticker_id", sa.Integer(), nullable=False), + sa.Column("direction", sa.String(length=10), nullable=False), + sa.Column("entry_price", sa.Float(), nullable=False), + sa.Column("stop_loss", sa.Float(), nullable=False), + sa.Column("target", sa.Float(), nullable=False), + sa.Column("rr_ratio", sa.Float(), nullable=False), + sa.Column("composite_score", sa.Float(), nullable=False), + sa.Column("detected_at", sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + ) + + # Table with FKs to both users and tickers + op.create_table( + "watchlist_entries", + sa.Column("id", sa.Integer(), nullable=False), + sa.Column("user_id", sa.Integer(), nullable=False), + sa.Column("ticker_id", sa.Integer(), nullable=False), + sa.Column("entry_type", sa.String(length=10), nullable=False), + sa.Column("added_at", sa.DateTime(timezone=True), nullable=False), + sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"), + sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"), + sa.PrimaryKeyConstraint("id"), + sa.UniqueConstraint("user_id", "ticker_id", name="uq_watchlist_user_ticker"), + ) + + +def downgrade() -> None: + op.drop_table("watchlist_entries") + op.drop_table("trade_setups") + op.drop_table("sr_levels") + op.drop_table("sentiment_scores") + op.drop_index("ix_ohlcv_ticker_date", table_name="ohlcv_records") + op.drop_table("ohlcv_records") + op.drop_table("ingestion_progress") + op.drop_table("fundamental_data") + op.drop_table("dimension_scores") + op.drop_table("composite_scores") + op.drop_table("users") + op.drop_table("tickers") + op.drop_table("system_settings") diff --git a/app/__init__.py b/app/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/app/__init__.py @@ -0,0 +1 @@ + diff --git a/app/cache.py b/app/cache.py new file mode 100644 index 0000000..4dc60fb --- /dev/null +++ b/app/cache.py @@ -0,0 +1,86 @@ +"""LRU cache wrapper with per-ticker invalidation. + +Provides an in-memory cache (max 1000 entries) keyed on +(ticker, start_date, end_date, indicator_type). Supports selective +invalidation of all entries for a given ticker — needed when new +OHLCV data is ingested. +""" + +from __future__ import annotations + +from collections import OrderedDict +from typing import Any, Hashable + +CacheKey = tuple[str, Any, Any, str] # (ticker, start_date, end_date, indicator_type) + +_DEFAULT_MAX_SIZE = 1000 + + +class LRUCache: + """Simple LRU cache backed by an ``OrderedDict``. + + Parameters + ---------- + max_size: + Maximum number of entries. When exceeded the least-recently-used + entry is evicted. Defaults to 1000. + """ + + def __init__(self, max_size: int = _DEFAULT_MAX_SIZE) -> None: + self._max_size = max_size + self._store: OrderedDict[Hashable, Any] = OrderedDict() + + # ------------------------------------------------------------------ + # Public API + # ------------------------------------------------------------------ + + def get(self, key: CacheKey) -> Any | None: + """Return cached value or ``None`` on miss. + + Accessing an entry promotes it to most-recently-used. + """ + if key not in self._store: + return None + self._store.move_to_end(key) + return self._store[key] + + def set(self, key: CacheKey, value: Any) -> None: + """Insert or update *key* with *value*. + + If the cache is full the least-recently-used entry is evicted. + """ + if key in self._store: + self._store.move_to_end(key) + self._store[key] = value + return + if len(self._store) >= self._max_size: + self._store.popitem(last=False) # evict LRU + self._store[key] = value + + def invalidate_ticker(self, ticker: str) -> int: + """Remove all entries whose first key element matches *ticker*. + + Returns the number of evicted entries. + """ + keys_to_remove = [k for k in self._store if k[0] == ticker] + for k in keys_to_remove: + del self._store[k] + return len(keys_to_remove) + + def clear(self) -> None: + """Remove all entries.""" + self._store.clear() + + @property + def size(self) -> int: + """Current number of cached entries.""" + return len(self._store) + + @property + def max_size(self) -> int: + """Maximum capacity.""" + return self._max_size + + +# Module-level singleton used by the indicator service. +indicator_cache = LRUCache() diff --git a/app/config.py b/app/config.py new file mode 100644 index 0000000..44e3b66 --- /dev/null +++ b/app/config.py @@ -0,0 +1,43 @@ +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Settings(BaseSettings): + model_config = SettingsConfigDict(env_file=".env", env_file_encoding="utf-8") + + # Database + database_url: str = "postgresql+asyncpg://stock_backend:changeme@localhost:5432/stock_data_backend" + + # Auth + jwt_secret: str = "change-this-to-a-random-secret" + jwt_expiry_minutes: int = 60 + + # OHLCV Provider — Alpaca Markets + alpaca_api_key: str = "" + alpaca_api_secret: str = "" + + # Sentiment Provider — Gemini with Search Grounding + gemini_api_key: str = "" + gemini_model: str = "gemini-2.0-flash" + + # Fundamentals Provider — Financial Modeling Prep + fmp_api_key: str = "" + + # Scheduled Jobs + data_collector_frequency: str = "daily" + sentiment_poll_interval_minutes: int = 30 + fundamental_fetch_frequency: str = "daily" + rr_scan_frequency: str = "daily" + + # Scoring Defaults + default_watchlist_auto_size: int = 10 + default_rr_threshold: float = 3.0 + + # Database Pool + db_pool_size: int = 5 + db_pool_timeout: int = 30 + + # Logging + log_level: str = "INFO" + + +settings = Settings() diff --git a/app/database.py b/app/database.py new file mode 100644 index 0000000..2ba0f3c --- /dev/null +++ b/app/database.py @@ -0,0 +1,33 @@ +from collections.abc import AsyncGenerator + +from sqlalchemy.ext.asyncio import ( + AsyncSession, + async_sessionmaker, + create_async_engine, +) +from sqlalchemy.orm import DeclarativeBase + +from app.config import settings + +engine = create_async_engine( + settings.database_url, + pool_size=settings.db_pool_size, + pool_timeout=settings.db_pool_timeout, + pool_pre_ping=True, + echo=False, +) + +async_session_factory = async_sessionmaker( + engine, + class_=AsyncSession, + expire_on_commit=False, +) + + +class Base(DeclarativeBase): + pass + + +async def get_session() -> AsyncGenerator[AsyncSession, None]: + async with async_session_factory() as session: + yield session diff --git a/app/dependencies.py b/app/dependencies.py new file mode 100644 index 0000000..ead6e2c --- /dev/null +++ b/app/dependencies.py @@ -0,0 +1,82 @@ +"""FastAPI dependency injection factories. + +Provides DB session, current user extraction from JWT, and role/access guards. +""" + +import logging +from collections.abc import AsyncGenerator + +from fastapi import Depends +from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer +from jose import JWTError, jwt +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import settings +from app.database import get_session +from app.exceptions import AuthenticationError, AuthorizationError +from app.models.user import User + +logger = logging.getLogger(__name__) + +_bearer_scheme = HTTPBearer(auto_error=False) + +JWT_ALGORITHM = "HS256" + + +async def get_db() -> AsyncGenerator[AsyncSession, None]: + """Yield an async DB session.""" + async for session in get_session(): + yield session + + +async def get_current_user( + credentials: HTTPAuthorizationCredentials | None = Depends(_bearer_scheme), + db: AsyncSession = Depends(get_db), +) -> User: + """Extract and validate JWT from Authorization header, return the User.""" + if credentials is None: + raise AuthenticationError("Authentication required") + + token = credentials.credentials + try: + payload = jwt.decode( + token, + settings.jwt_secret, + algorithms=[JWT_ALGORITHM], + ) + user_id_str: str | None = payload.get("sub") + if user_id_str is None: + raise AuthenticationError("Invalid token: missing subject") + user_id = int(user_id_str) + except JWTError as exc: + if "expired" in str(exc).lower(): + raise AuthenticationError("Token expired") from exc + raise AuthenticationError("Invalid token") from exc + except (ValueError, TypeError) as exc: + raise AuthenticationError("Invalid token: bad subject") from exc + + result = await db.execute(select(User).where(User.id == user_id)) + user = result.scalar_one_or_none() + if user is None: + raise AuthenticationError("User not found") + + return user + + +async def require_admin( + user: User = Depends(get_current_user), +) -> User: + """Guard that ensures the current user has the admin role.""" + if user.role != "admin": + raise AuthorizationError("Insufficient permissions") + return user + + +async def require_access( + user: User = Depends(get_current_user), +) -> User: + """Guard that ensures the current user has API access granted.""" + if not user.has_access: + raise AuthorizationError("Insufficient permissions") + return user diff --git a/app/exceptions.py b/app/exceptions.py new file mode 100644 index 0000000..bf4b635 --- /dev/null +++ b/app/exceptions.py @@ -0,0 +1,52 @@ +"""Application exception hierarchy. + +All custom exceptions inherit from AppError. The global exception handler +in middleware.py catches these and returns the appropriate JSON envelope. +""" + + +class AppError(Exception): + """Base application error.""" + + status_code: int = 500 + message: str = "Internal server error" + + def __init__(self, message: str | None = None): + if message is not None: + self.message = message + super().__init__(self.message) + + +class ValidationError(AppError): + status_code = 400 + message = "Validation error" + + +class NotFoundError(AppError): + status_code = 404 + message = "Resource not found" + + +class DuplicateError(AppError): + status_code = 409 + message = "Resource already exists" + + +class AuthenticationError(AppError): + status_code = 401 + message = "Authentication required" + + +class AuthorizationError(AppError): + status_code = 403 + message = "Insufficient permissions" + + +class ProviderError(AppError): + status_code = 502 + message = "Market data provider unavailable" + + +class RateLimitError(AppError): + status_code = 429 + message = "Rate limited" diff --git a/app/main.py b/app/main.py new file mode 100644 index 0000000..7b3211a --- /dev/null +++ b/app/main.py @@ -0,0 +1,106 @@ +"""FastAPI application entry point with lifespan management.""" + +import logging +import sys +from contextlib import asynccontextmanager +from collections.abc import AsyncGenerator + +from fastapi import FastAPI +from passlib.hash import bcrypt +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import settings +from app.database import async_session_factory, engine +from app.middleware import register_exception_handlers +from app.models.user import User +from app.scheduler import configure_scheduler, scheduler +from app.routers.admin import router as admin_router +from app.routers.auth import router as auth_router +from app.routers.health import router as health_router +from app.routers.ingestion import router as ingestion_router +from app.routers.ohlcv import router as ohlcv_router +from app.routers.indicators import router as indicators_router +from app.routers.fundamentals import router as fundamentals_router +from app.routers.scores import router as scores_router +from app.routers.trades import router as trades_router +from app.routers.watchlist import router as watchlist_router +from app.routers.sentiment import router as sentiment_router +from app.routers.sr_levels import router as sr_levels_router +from app.routers.tickers import router as tickers_router + + +def _configure_logging() -> None: + """Set up structured JSON-style logging.""" + handler = logging.StreamHandler(sys.stdout) + handler.setFormatter( + logging.Formatter( + '{"time":"%(asctime)s","level":"%(levelname)s",' + '"logger":"%(name)s","message":"%(message)s"}' + ) + ) + root = logging.getLogger() + root.handlers.clear() + root.addHandler(handler) + root.setLevel(settings.log_level.upper()) + + +async def _create_default_admin(session: AsyncSession) -> None: + """Create the default admin account if no admin user exists.""" + result = await session.execute( + select(User).where(User.role == "admin") + ) + if result.scalar_one_or_none() is None: + admin = User( + username="admin", + password_hash=bcrypt.hash("admin"), + role="admin", + has_access=True, + ) + session.add(admin) + await session.commit() + logging.getLogger(__name__).info("Default admin account created") + + +@asynccontextmanager +async def lifespan(_app: FastAPI) -> AsyncGenerator[None, None]: + """Manage startup and shutdown lifecycle.""" + logger = logging.getLogger(__name__) + _configure_logging() + logger.info("Starting Stock Data Backend") + + async with async_session_factory() as session: + await _create_default_admin(session) + + configure_scheduler() + scheduler.start() + logger.info("Scheduler started") + + yield + + scheduler.shutdown(wait=False) + logger.info("Scheduler stopped") + await engine.dispose() + logger.info("Shutting down") + + +app = FastAPI( + title="Stock Data Backend", + version="0.1.0", + lifespan=lifespan, +) + +register_exception_handlers(app) +app.include_router(health_router, prefix="/api/v1") +app.include_router(auth_router, prefix="/api/v1") +app.include_router(admin_router, prefix="/api/v1") +app.include_router(tickers_router, prefix="/api/v1") +app.include_router(ohlcv_router, prefix="/api/v1") +app.include_router(ingestion_router, prefix="/api/v1") +app.include_router(indicators_router, prefix="/api/v1") +app.include_router(sr_levels_router, prefix="/api/v1") +app.include_router(sentiment_router, prefix="/api/v1") +app.include_router(fundamentals_router, prefix="/api/v1") +app.include_router(scores_router, prefix="/api/v1") +app.include_router(trades_router, prefix="/api/v1") +app.include_router(watchlist_router, prefix="/api/v1") diff --git a/app/middleware.py b/app/middleware.py new file mode 100644 index 0000000..5d6b203 --- /dev/null +++ b/app/middleware.py @@ -0,0 +1,61 @@ +"""Global exception handlers for the FastAPI application. + +Maps AppError subclasses and other exceptions to JSON envelope responses. +""" + +import logging +import traceback + +from fastapi import FastAPI, Request +from fastapi.exceptions import RequestValidationError +from fastapi.responses import JSONResponse + +from app.exceptions import AppError + +logger = logging.getLogger(__name__) + + +def register_exception_handlers(app: FastAPI) -> None: + """Register all global exception handlers on the FastAPI app.""" + + @app.exception_handler(AppError) + async def app_error_handler(_request: Request, exc: AppError) -> JSONResponse: + return JSONResponse( + status_code=exc.status_code, + content={ + "status": "error", + "data": None, + "error": exc.message, + }, + ) + + @app.exception_handler(RequestValidationError) + async def validation_error_handler( + _request: Request, exc: RequestValidationError + ) -> JSONResponse: + details = "; ".join( + f"{'.'.join(str(loc) for loc in e['loc'])}: {e['msg']}" + for e in exc.errors() + ) + return JSONResponse( + status_code=400, + content={ + "status": "error", + "data": None, + "error": f"Validation error: {details}", + }, + ) + + @app.exception_handler(Exception) + async def unhandled_error_handler( + _request: Request, exc: Exception + ) -> JSONResponse: + logger.error("Unhandled exception:\n%s", traceback.format_exc()) + return JSONResponse( + status_code=500, + content={ + "status": "error", + "data": None, + "error": "Internal server error", + }, + ) diff --git a/app/models/__init__.py b/app/models/__init__.py new file mode 100644 index 0000000..3521e21 --- /dev/null +++ b/app/models/__init__.py @@ -0,0 +1,25 @@ +from app.models.ticker import Ticker +from app.models.ohlcv import OHLCVRecord +from app.models.user import User +from app.models.sentiment import SentimentScore +from app.models.fundamental import FundamentalData +from app.models.score import DimensionScore, CompositeScore +from app.models.sr_level import SRLevel +from app.models.trade_setup import TradeSetup +from app.models.watchlist import WatchlistEntry +from app.models.settings import SystemSetting, IngestionProgress + +__all__ = [ + "Ticker", + "OHLCVRecord", + "User", + "SentimentScore", + "FundamentalData", + "DimensionScore", + "CompositeScore", + "SRLevel", + "TradeSetup", + "WatchlistEntry", + "SystemSetting", + "IngestionProgress", +] diff --git a/app/models/fundamental.py b/app/models/fundamental.py new file mode 100644 index 0000000..76573bb --- /dev/null +++ b/app/models/fundamental.py @@ -0,0 +1,24 @@ +from datetime import datetime + +from sqlalchemy import DateTime, Float, ForeignKey +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class FundamentalData(Base): + __tablename__ = "fundamental_data" + + id: Mapped[int] = mapped_column(primary_key=True) + ticker_id: Mapped[int] = mapped_column( + ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False + ) + pe_ratio: Mapped[float | None] = mapped_column(Float, nullable=True) + revenue_growth: Mapped[float | None] = mapped_column(Float, nullable=True) + earnings_surprise: Mapped[float | None] = mapped_column(Float, nullable=True) + market_cap: Mapped[float | None] = mapped_column(Float, nullable=True) + fetched_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False + ) + + ticker = relationship("Ticker", back_populates="fundamental_data") diff --git a/app/models/ohlcv.py b/app/models/ohlcv.py new file mode 100644 index 0000000..91527cc --- /dev/null +++ b/app/models/ohlcv.py @@ -0,0 +1,30 @@ +from datetime import date, datetime + +from sqlalchemy import BigInteger, Date, DateTime, Float, ForeignKey, Index, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class OHLCVRecord(Base): + __tablename__ = "ohlcv_records" + __table_args__ = ( + UniqueConstraint("ticker_id", "date", name="uq_ohlcv_ticker_date"), + Index("ix_ohlcv_ticker_date", "ticker_id", "date"), + ) + + id: Mapped[int] = mapped_column(primary_key=True) + ticker_id: Mapped[int] = mapped_column( + ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False + ) + date: Mapped[date] = mapped_column(Date, nullable=False) + open: Mapped[float] = mapped_column(Float, nullable=False) + high: Mapped[float] = mapped_column(Float, nullable=False) + low: Mapped[float] = mapped_column(Float, nullable=False) + close: Mapped[float] = mapped_column(Float, nullable=False) + volume: Mapped[int] = mapped_column(BigInteger, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=datetime.utcnow, nullable=False + ) + + ticker = relationship("Ticker", back_populates="ohlcv_records") diff --git a/app/models/score.py b/app/models/score.py new file mode 100644 index 0000000..e0758c4 --- /dev/null +++ b/app/models/score.py @@ -0,0 +1,40 @@ +from datetime import datetime + +from sqlalchemy import Boolean, DateTime, Float, ForeignKey, String, Text +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class DimensionScore(Base): + __tablename__ = "dimension_scores" + + id: Mapped[int] = mapped_column(primary_key=True) + ticker_id: Mapped[int] = mapped_column( + ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False + ) + dimension: Mapped[str] = mapped_column(String(50), nullable=False) + score: Mapped[float] = mapped_column(Float, nullable=False) + is_stale: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) + computed_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False + ) + + ticker = relationship("Ticker", back_populates="dimension_scores") + + +class CompositeScore(Base): + __tablename__ = "composite_scores" + + id: Mapped[int] = mapped_column(primary_key=True) + ticker_id: Mapped[int] = mapped_column( + ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False + ) + score: Mapped[float] = mapped_column(Float, nullable=False) + is_stale: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) + weights_json: Mapped[str] = mapped_column(Text, nullable=False) + computed_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False + ) + + ticker = relationship("Ticker", back_populates="composite_scores") diff --git a/app/models/sentiment.py b/app/models/sentiment.py new file mode 100644 index 0000000..e902ec8 --- /dev/null +++ b/app/models/sentiment.py @@ -0,0 +1,23 @@ +from datetime import datetime + +from sqlalchemy import DateTime, ForeignKey, Integer, String +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class SentimentScore(Base): + __tablename__ = "sentiment_scores" + + id: Mapped[int] = mapped_column(primary_key=True) + ticker_id: Mapped[int] = mapped_column( + ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False + ) + classification: Mapped[str] = mapped_column(String(20), nullable=False) + confidence: Mapped[int] = mapped_column(Integer, nullable=False) + source: Mapped[str] = mapped_column(String(100), nullable=False) + timestamp: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False + ) + + ticker = relationship("Ticker", back_populates="sentiment_scores") diff --git a/app/models/settings.py b/app/models/settings.py new file mode 100644 index 0000000..62345dc --- /dev/null +++ b/app/models/settings.py @@ -0,0 +1,35 @@ +from datetime import date, datetime + +from sqlalchemy import Date, DateTime, ForeignKey, String, Text, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class SystemSetting(Base): + __tablename__ = "system_settings" + + id: Mapped[int] = mapped_column(primary_key=True) + key: Mapped[str] = mapped_column(String(100), unique=True, nullable=False) + value: Mapped[str] = mapped_column(Text, nullable=False) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False + ) + + +class IngestionProgress(Base): + __tablename__ = "ingestion_progress" + __table_args__ = ( + UniqueConstraint("ticker_id", name="uq_ingestion_progress_ticker"), + ) + + id: Mapped[int] = mapped_column(primary_key=True) + ticker_id: Mapped[int] = mapped_column( + ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False + ) + last_ingested_date: Mapped[date] = mapped_column(Date, nullable=False) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False + ) + + ticker = relationship("Ticker", back_populates="ingestion_progress") diff --git a/app/models/sr_level.py b/app/models/sr_level.py new file mode 100644 index 0000000..32d01c2 --- /dev/null +++ b/app/models/sr_level.py @@ -0,0 +1,24 @@ +from datetime import datetime + +from sqlalchemy import DateTime, Float, ForeignKey, Integer, String +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class SRLevel(Base): + __tablename__ = "sr_levels" + + id: Mapped[int] = mapped_column(primary_key=True) + ticker_id: Mapped[int] = mapped_column( + ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False + ) + price_level: Mapped[float] = mapped_column(Float, nullable=False) + type: Mapped[str] = mapped_column(String(20), nullable=False) + strength: Mapped[int] = mapped_column(Integer, nullable=False) + detection_method: Mapped[str] = mapped_column(String(50), nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=datetime.utcnow, nullable=False + ) + + ticker = relationship("Ticker", back_populates="sr_levels") diff --git a/app/models/ticker.py b/app/models/ticker.py new file mode 100644 index 0000000..30120a2 --- /dev/null +++ b/app/models/ticker.py @@ -0,0 +1,27 @@ +from datetime import datetime + +from sqlalchemy import String, DateTime +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class Ticker(Base): + __tablename__ = "tickers" + + id: Mapped[int] = mapped_column(primary_key=True) + symbol: Mapped[str] = mapped_column(String(10), unique=True, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=datetime.utcnow, nullable=False + ) + + # Relationships (cascade deletes) + ohlcv_records = relationship("OHLCVRecord", back_populates="ticker", cascade="all, delete-orphan") + sentiment_scores = relationship("SentimentScore", back_populates="ticker", cascade="all, delete-orphan") + fundamental_data = relationship("FundamentalData", back_populates="ticker", cascade="all, delete-orphan") + sr_levels = relationship("SRLevel", back_populates="ticker", cascade="all, delete-orphan") + dimension_scores = relationship("DimensionScore", back_populates="ticker", cascade="all, delete-orphan") + composite_scores = relationship("CompositeScore", back_populates="ticker", cascade="all, delete-orphan") + trade_setups = relationship("TradeSetup", back_populates="ticker", cascade="all, delete-orphan") + watchlist_entries = relationship("WatchlistEntry", back_populates="ticker", cascade="all, delete-orphan") + ingestion_progress = relationship("IngestionProgress", back_populates="ticker", cascade="all, delete-orphan", uselist=False) diff --git a/app/models/trade_setup.py b/app/models/trade_setup.py new file mode 100644 index 0000000..308057d --- /dev/null +++ b/app/models/trade_setup.py @@ -0,0 +1,26 @@ +from datetime import datetime + +from sqlalchemy import DateTime, Float, ForeignKey, String +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class TradeSetup(Base): + __tablename__ = "trade_setups" + + id: Mapped[int] = mapped_column(primary_key=True) + ticker_id: Mapped[int] = mapped_column( + ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False + ) + direction: Mapped[str] = mapped_column(String(10), nullable=False) + entry_price: Mapped[float] = mapped_column(Float, nullable=False) + stop_loss: Mapped[float] = mapped_column(Float, nullable=False) + target: Mapped[float] = mapped_column(Float, nullable=False) + rr_ratio: Mapped[float] = mapped_column(Float, nullable=False) + composite_score: Mapped[float] = mapped_column(Float, nullable=False) + detected_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False + ) + + ticker = relationship("Ticker", back_populates="trade_setups") diff --git a/app/models/user.py b/app/models/user.py new file mode 100644 index 0000000..388524a --- /dev/null +++ b/app/models/user.py @@ -0,0 +1,24 @@ +from datetime import datetime + +from sqlalchemy import Boolean, DateTime, String +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class User(Base): + __tablename__ = "users" + + id: Mapped[int] = mapped_column(primary_key=True) + username: Mapped[str] = mapped_column(String(100), unique=True, nullable=False) + password_hash: Mapped[str] = mapped_column(String(255), nullable=False) + role: Mapped[str] = mapped_column(String(20), nullable=False, default="user") + has_access: Mapped[bool] = mapped_column(Boolean, nullable=False, default=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=datetime.utcnow, nullable=False + ) + updated_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False + ) + + watchlist_entries = relationship("WatchlistEntry", back_populates="user", cascade="all, delete-orphan") diff --git a/app/models/watchlist.py b/app/models/watchlist.py new file mode 100644 index 0000000..39f1227 --- /dev/null +++ b/app/models/watchlist.py @@ -0,0 +1,28 @@ +from datetime import datetime + +from sqlalchemy import DateTime, ForeignKey, String, UniqueConstraint +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class WatchlistEntry(Base): + __tablename__ = "watchlist_entries" + __table_args__ = ( + UniqueConstraint("user_id", "ticker_id", name="uq_watchlist_user_ticker"), + ) + + id: Mapped[int] = mapped_column(primary_key=True) + user_id: Mapped[int] = mapped_column( + ForeignKey("users.id", ondelete="CASCADE"), nullable=False + ) + ticker_id: Mapped[int] = mapped_column( + ForeignKey("tickers.id", ondelete="CASCADE"), nullable=False + ) + entry_type: Mapped[str] = mapped_column(String(10), nullable=False) + added_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), default=datetime.utcnow, nullable=False + ) + + user = relationship("User", back_populates="watchlist_entries") + ticker = relationship("Ticker", back_populates="watchlist_entries") diff --git a/app/providers/__init__.py b/app/providers/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/app/providers/__init__.py @@ -0,0 +1 @@ + diff --git a/app/providers/alpaca.py b/app/providers/alpaca.py new file mode 100644 index 0000000..0d37008 --- /dev/null +++ b/app/providers/alpaca.py @@ -0,0 +1,63 @@ +"""Alpaca Markets OHLCV provider using the alpaca-py SDK.""" + +from __future__ import annotations + +import asyncio +import logging +from datetime import date + +from alpaca.data.historical import StockHistoricalDataClient +from alpaca.data.requests import StockBarsRequest +from alpaca.data.timeframe import TimeFrame + +from app.exceptions import ProviderError, RateLimitError +from app.providers.protocol import OHLCVData + +logger = logging.getLogger(__name__) + + +class AlpacaOHLCVProvider: + """Fetches daily OHLCV bars from Alpaca Markets Data API.""" + + def __init__(self, api_key: str, api_secret: str) -> None: + if not api_key or not api_secret: + raise ProviderError("Alpaca API key and secret are required") + self._client = StockHistoricalDataClient(api_key, api_secret) + + async def fetch_ohlcv( + self, ticker: str, start_date: date, end_date: date + ) -> list[OHLCVData]: + """Fetch daily OHLCV bars for *ticker* between *start_date* and *end_date*.""" + try: + request = StockBarsRequest( + symbol_or_symbols=ticker, + timeframe=TimeFrame.Day, + start=start_date, + end=end_date, + ) + + # alpaca-py's client is synchronous — run in a thread + bars = await asyncio.to_thread(self._client.get_stock_bars, request) + + results: list[OHLCVData] = [] + bar_set = bars.get(ticker, []) if hasattr(bars, "get") else getattr(bars, "data", {}).get(ticker, []) + for bar in bar_set: + results.append( + OHLCVData( + ticker=ticker, + date=bar.timestamp.date(), + open=float(bar.open), + high=float(bar.high), + low=float(bar.low), + close=float(bar.close), + volume=int(bar.volume), + ) + ) + return results + + except Exception as exc: + msg = str(exc).lower() + if "rate" in msg and "limit" in msg: + raise RateLimitError(f"Alpaca rate limit hit for {ticker}") from exc + logger.error("Alpaca provider error for %s: %s", ticker, exc) + raise ProviderError(f"Alpaca provider error for {ticker}: {exc}") from exc diff --git a/app/providers/fmp.py b/app/providers/fmp.py new file mode 100644 index 0000000..aef8c44 --- /dev/null +++ b/app/providers/fmp.py @@ -0,0 +1,94 @@ +"""Financial Modeling Prep (FMP) fundamentals provider using httpx.""" + +from __future__ import annotations + +import logging +from datetime import datetime, timezone + +import httpx + +from app.exceptions import ProviderError, RateLimitError +from app.providers.protocol import FundamentalData + +logger = logging.getLogger(__name__) + +_FMP_BASE_URL = "https://financialmodelingprep.com/api/v3" + + +class FMPFundamentalProvider: + """Fetches fundamental data from Financial Modeling Prep REST API.""" + + def __init__(self, api_key: str) -> None: + if not api_key: + raise ProviderError("FMP API key is required") + self._api_key = api_key + + async def fetch_fundamentals(self, ticker: str) -> FundamentalData: + """Fetch P/E, revenue growth, earnings surprise, and market cap.""" + try: + async with httpx.AsyncClient(timeout=30.0) as client: + profile = await self._fetch_profile(client, ticker) + earnings = await self._fetch_earnings_surprise(client, ticker) + + pe_ratio = self._safe_float(profile.get("pe")) + revenue_growth = self._safe_float(profile.get("revenueGrowth")) + market_cap = self._safe_float(profile.get("mktCap")) + earnings_surprise = self._safe_float(earnings) + + return FundamentalData( + ticker=ticker, + pe_ratio=pe_ratio, + revenue_growth=revenue_growth, + earnings_surprise=earnings_surprise, + market_cap=market_cap, + fetched_at=datetime.now(timezone.utc), + ) + + except (ProviderError, RateLimitError): + raise + except Exception as exc: + logger.error("FMP provider error for %s: %s", ticker, exc) + raise ProviderError(f"FMP provider error for {ticker}: {exc}") from exc + + async def _fetch_profile(self, client: httpx.AsyncClient, ticker: str) -> dict: + """Fetch company profile (P/E, revenue growth, market cap).""" + url = f"{_FMP_BASE_URL}/profile/{ticker}" + resp = await client.get(url, params={"apikey": self._api_key}) + self._check_response(resp, ticker, "profile") + data = resp.json() + if isinstance(data, list) and data: + return data[0] + return data if isinstance(data, dict) else {} + + async def _fetch_earnings_surprise( + self, client: httpx.AsyncClient, ticker: str + ) -> float | None: + """Fetch the most recent earnings surprise percentage.""" + url = f"{_FMP_BASE_URL}/earnings-surprises/{ticker}" + resp = await client.get(url, params={"apikey": self._api_key}) + self._check_response(resp, ticker, "earnings-surprises") + data = resp.json() + if isinstance(data, list) and data: + return self._safe_float(data[0].get("actualEarningResult")) + return None + + def _check_response( + self, resp: httpx.Response, ticker: str, endpoint: str + ) -> None: + """Raise appropriate errors for non-200 responses.""" + if resp.status_code == 429: + raise RateLimitError(f"FMP rate limit hit for {ticker} ({endpoint})") + if resp.status_code != 200: + raise ProviderError( + f"FMP {endpoint} error for {ticker}: HTTP {resp.status_code}" + ) + + @staticmethod + def _safe_float(value: object) -> float | None: + """Convert a value to float, returning None on failure.""" + if value is None: + return None + try: + return float(value) + except (TypeError, ValueError): + return None diff --git a/app/providers/gemini_sentiment.py b/app/providers/gemini_sentiment.py new file mode 100644 index 0000000..25e04d0 --- /dev/null +++ b/app/providers/gemini_sentiment.py @@ -0,0 +1,90 @@ +"""Gemini sentiment provider using google-genai with search grounding.""" + +from __future__ import annotations + +import json +import logging +from datetime import datetime, timezone + +from google import genai +from google.genai import types + +from app.exceptions import ProviderError, RateLimitError +from app.providers.protocol import SentimentData + +logger = logging.getLogger(__name__) + +_SENTIMENT_PROMPT = """\ +Analyze the current market sentiment for the stock ticker {ticker}. +Search the web for recent news articles, social media mentions, and analyst opinions. + +Respond ONLY with a JSON object in this exact format (no markdown, no extra text): +{{"classification": "", "confidence": <0-100>, "reasoning": ""}} + +Rules: +- classification must be exactly one of: bullish, bearish, neutral +- confidence must be an integer from 0 to 100 +- reasoning should be a brief one-sentence explanation +""" + +VALID_CLASSIFICATIONS = {"bullish", "bearish", "neutral"} + + +class GeminiSentimentProvider: + """Fetches sentiment analysis from Gemini with search grounding.""" + + def __init__(self, api_key: str, model: str = "gemini-2.0-flash") -> None: + if not api_key: + raise ProviderError("Gemini API key is required") + self._client = genai.Client(api_key=api_key) + self._model = model + + async def fetch_sentiment(self, ticker: str) -> SentimentData: + """Send a structured prompt to Gemini and parse the JSON response.""" + try: + response = await self._client.aio.models.generate_content( + model=self._model, + contents=_SENTIMENT_PROMPT.format(ticker=ticker), + config=types.GenerateContentConfig( + tools=[types.Tool(google_search=types.GoogleSearch())], + response_mime_type="application/json", + ), + ) + + raw_text = response.text.strip() + logger.debug("Gemini raw response for %s: %s", ticker, raw_text) + parsed = json.loads(raw_text) + + classification = parsed.get("classification", "").lower() + if classification not in VALID_CLASSIFICATIONS: + raise ProviderError( + f"Invalid classification '{classification}' from Gemini for {ticker}" + ) + + confidence = int(parsed.get("confidence", 50)) + confidence = max(0, min(100, confidence)) + + reasoning = parsed.get("reasoning", "") + if reasoning: + logger.info("Gemini sentiment for %s: %s (confidence=%d) — %s", + ticker, classification, confidence, reasoning) + + return SentimentData( + ticker=ticker, + classification=classification, + confidence=confidence, + source="gemini", + timestamp=datetime.now(timezone.utc), + ) + + except json.JSONDecodeError as exc: + logger.error("Failed to parse Gemini JSON for %s: %s", ticker, exc) + raise ProviderError(f"Invalid JSON from Gemini for {ticker}") from exc + except ProviderError: + raise + except Exception as exc: + msg = str(exc).lower() + if "rate" in msg or "quota" in msg or "429" in msg: + raise RateLimitError(f"Gemini rate limit hit for {ticker}") from exc + logger.error("Gemini provider error for %s: %s", ticker, exc) + raise ProviderError(f"Gemini provider error for {ticker}: {exc}") from exc diff --git a/app/providers/protocol.py b/app/providers/protocol.py new file mode 100644 index 0000000..d548454 --- /dev/null +++ b/app/providers/protocol.py @@ -0,0 +1,84 @@ +"""Provider protocols and lightweight data transfer objects. + +Protocols define the interface for external data providers. +DTOs are simple dataclasses — NOT SQLAlchemy models — used to +transfer data between providers and the service layer. +""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import date, datetime +from typing import Protocol + + +# --------------------------------------------------------------------------- +# Data Transfer Objects +# --------------------------------------------------------------------------- + + +@dataclass(frozen=True, slots=True) +class OHLCVData: + """Lightweight OHLCV record returned by market data providers.""" + + ticker: str + date: date + open: float + high: float + low: float + close: float + volume: int + + +@dataclass(frozen=True, slots=True) +class SentimentData: + """Sentiment analysis result returned by sentiment providers.""" + + ticker: str + classification: str # "bullish" | "bearish" | "neutral" + confidence: int # 0-100 + source: str + timestamp: datetime + + +@dataclass(frozen=True, slots=True) +class FundamentalData: + """Fundamental metrics returned by fundamental providers.""" + + ticker: str + pe_ratio: float | None + revenue_growth: float | None + earnings_surprise: float | None + market_cap: float | None + fetched_at: datetime + + +# --------------------------------------------------------------------------- +# Provider Protocols +# --------------------------------------------------------------------------- + + +class MarketDataProvider(Protocol): + """Protocol for OHLCV market data providers.""" + + async def fetch_ohlcv( + self, ticker: str, start_date: date, end_date: date + ) -> list[OHLCVData]: + """Fetch OHLCV data for a ticker in a date range.""" + ... + + +class SentimentProvider(Protocol): + """Protocol for sentiment analysis providers.""" + + async def fetch_sentiment(self, ticker: str) -> SentimentData: + """Fetch current sentiment analysis for a ticker.""" + ... + + +class FundamentalProvider(Protocol): + """Protocol for fundamental data providers.""" + + async def fetch_fundamentals(self, ticker: str) -> FundamentalData: + """Fetch fundamental data for a ticker.""" + ... diff --git a/app/routers/__init__.py b/app/routers/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/app/routers/__init__.py @@ -0,0 +1 @@ + diff --git a/app/routers/admin.py b/app/routers/admin.py new file mode 100644 index 0000000..e69f634 --- /dev/null +++ b/app/routers/admin.py @@ -0,0 +1,193 @@ +"""Admin router: user management, system settings, data cleanup, job control. + +All endpoints require admin role. +""" + +from fastapi import APIRouter, Depends +from sqlalchemy.ext.asyncio import AsyncSession + +from app.dependencies import get_db, require_admin +from app.models.user import User +from app.schemas.admin import ( + CreateUserRequest, + DataCleanupRequest, + JobToggle, + PasswordReset, + RegistrationToggle, + SystemSettingUpdate, + UserManagement, +) +from app.schemas.common import APIEnvelope +from app.services import admin_service + +router = APIRouter(tags=["admin"]) + + +def _user_dict(user: User) -> dict: + return { + "id": user.id, + "username": user.username, + "role": user.role, + "has_access": user.has_access, + "created_at": user.created_at.isoformat() if user.created_at else None, + "updated_at": user.updated_at.isoformat() if user.updated_at else None, + } + + +# --------------------------------------------------------------------------- +# User management +# --------------------------------------------------------------------------- + +@router.get("/admin/users", response_model=APIEnvelope) +async def list_users( + _admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """List all user accounts.""" + users = await admin_service.list_users(db) + return APIEnvelope(status="success", data=[_user_dict(u) for u in users]) + + +@router.post("/admin/users", response_model=APIEnvelope, status_code=201) +async def create_user( + body: CreateUserRequest, + _admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """Create a new user account.""" + user = await admin_service.create_user( + db, body.username, body.password, body.role, body.has_access + ) + return APIEnvelope(status="success", data=_user_dict(user)) + + +@router.put("/admin/users/{user_id}/access", response_model=APIEnvelope) +async def set_user_access( + user_id: int, + body: UserManagement, + _admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """Grant or revoke API access for a user.""" + user = await admin_service.set_user_access(db, user_id, body.has_access) + return APIEnvelope(status="success", data=_user_dict(user)) + + +@router.put("/admin/users/{user_id}/password", response_model=APIEnvelope) +async def reset_password( + user_id: int, + body: PasswordReset, + _admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """Reset a user's password.""" + user = await admin_service.reset_password(db, user_id, body.new_password) + return APIEnvelope(status="success", data=_user_dict(user)) + + +# --------------------------------------------------------------------------- +# Registration toggle +# --------------------------------------------------------------------------- + +@router.put("/admin/settings/registration", response_model=APIEnvelope) +async def toggle_registration( + body: RegistrationToggle, + _admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """Enable or disable user registration.""" + setting = await admin_service.toggle_registration(db, body.enabled) + return APIEnvelope( + status="success", + data={"key": setting.key, "value": setting.value}, + ) + + +# --------------------------------------------------------------------------- +# System settings +# --------------------------------------------------------------------------- + +@router.get("/admin/settings", response_model=APIEnvelope) +async def list_settings( + _admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """List all system settings.""" + settings_list = await admin_service.list_settings(db) + return APIEnvelope( + status="success", + data=[ + {"key": s.key, "value": s.value, "updated_at": s.updated_at.isoformat() if s.updated_at else None} + for s in settings_list + ], + ) + + +@router.put("/admin/settings/{key}", response_model=APIEnvelope) +async def update_setting( + key: str, + body: SystemSettingUpdate, + _admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """Create or update a system setting.""" + setting = await admin_service.update_setting(db, key, body.value) + return APIEnvelope( + status="success", + data={"key": setting.key, "value": setting.value, "updated_at": setting.updated_at.isoformat() if setting.updated_at else None}, + ) + + +# --------------------------------------------------------------------------- +# Data cleanup +# --------------------------------------------------------------------------- + +@router.post("/admin/data/cleanup", response_model=APIEnvelope) +async def cleanup_data( + body: DataCleanupRequest, + _admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """Delete OHLCV, sentiment, and fundamental data older than N days.""" + counts = await admin_service.cleanup_data(db, body.older_than_days) + return APIEnvelope(status="success", data=counts) + + +# --------------------------------------------------------------------------- +# Job control +# --------------------------------------------------------------------------- + +@router.get("/admin/jobs", response_model=APIEnvelope) +async def list_jobs( + _admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """List all scheduled jobs with their current status.""" + jobs = await admin_service.list_jobs(db) + return APIEnvelope(status="success", data=jobs) + + +@router.post("/admin/jobs/{job_name}/trigger", response_model=APIEnvelope) +async def trigger_job( + job_name: str, + _admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """Trigger a manual job run (placeholder).""" + result = await admin_service.trigger_job(db, job_name) + return APIEnvelope(status="success", data=result) + + +@router.put("/admin/jobs/{job_name}/toggle", response_model=APIEnvelope) +async def toggle_job( + job_name: str, + body: JobToggle, + _admin: User = Depends(require_admin), + db: AsyncSession = Depends(get_db), +): + """Enable or disable a scheduled job (placeholder).""" + setting = await admin_service.toggle_job(db, job_name, body.enabled) + return APIEnvelope( + status="success", + data={"key": setting.key, "value": setting.value}, + ) diff --git a/app/routers/auth.py b/app/routers/auth.py new file mode 100644 index 0000000..a08a320 --- /dev/null +++ b/app/routers/auth.py @@ -0,0 +1,34 @@ +"""Auth router: registration and login endpoints.""" + +from fastapi import APIRouter, Depends +from sqlalchemy.ext.asyncio import AsyncSession + +from app.dependencies import get_db +from app.schemas.auth import LoginRequest, RegisterRequest, TokenResponse +from app.schemas.common import APIEnvelope +from app.services import auth_service + +router = APIRouter(tags=["auth"]) + + +@router.post("/auth/register", response_model=APIEnvelope) +async def register(body: RegisterRequest, db: AsyncSession = Depends(get_db)): + """Public endpoint — register a new user.""" + user = await auth_service.register(db, body.username, body.password) + return APIEnvelope( + status="success", + data={ + "id": user.id, + "username": user.username, + "role": user.role, + "has_access": user.has_access, + }, + ) + + +@router.post("/auth/login", response_model=APIEnvelope) +async def login(body: LoginRequest, db: AsyncSession = Depends(get_db)): + """Public endpoint — login and receive a JWT.""" + token = await auth_service.login(db, body.username, body.password) + token_resp = TokenResponse(access_token=token) + return APIEnvelope(status="success", data=token_resp.model_dump()) diff --git a/app/routers/fundamentals.py b/app/routers/fundamentals.py new file mode 100644 index 0000000..207cbf9 --- /dev/null +++ b/app/routers/fundamentals.py @@ -0,0 +1,35 @@ +"""Fundamentals router — fundamental data endpoints.""" + +from fastapi import APIRouter, Depends +from sqlalchemy.ext.asyncio import AsyncSession + +from app.dependencies import get_db, require_access +from app.schemas.common import APIEnvelope +from app.schemas.fundamental import FundamentalResponse +from app.services.fundamental_service import get_fundamental + +router = APIRouter(tags=["fundamentals"]) + + +@router.get("/fundamentals/{symbol}", response_model=APIEnvelope) +async def read_fundamentals( + symbol: str, + _user=Depends(require_access), + db: AsyncSession = Depends(get_db), +) -> APIEnvelope: + """Get latest fundamental data for a symbol.""" + record = await get_fundamental(db, symbol) + + if record is None: + data = FundamentalResponse(symbol=symbol.strip().upper()) + else: + data = FundamentalResponse( + symbol=symbol.strip().upper(), + pe_ratio=record.pe_ratio, + revenue_growth=record.revenue_growth, + earnings_surprise=record.earnings_surprise, + market_cap=record.market_cap, + fetched_at=record.fetched_at, + ) + + return APIEnvelope(status="success", data=data.model_dump()) diff --git a/app/routers/health.py b/app/routers/health.py new file mode 100644 index 0000000..e9b38da --- /dev/null +++ b/app/routers/health.py @@ -0,0 +1,36 @@ +"""Health check endpoint — unauthenticated.""" + +import logging + +from fastapi import APIRouter, Depends +from fastapi.responses import JSONResponse +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession + +from app.dependencies import get_db +from app.schemas.common import APIEnvelope + +logger = logging.getLogger(__name__) + +router = APIRouter(tags=["health"]) + + +@router.get("/health") +async def health_check(db: AsyncSession = Depends(get_db)) -> APIEnvelope: + """Return service health including database connectivity.""" + try: + await db.execute(text("SELECT 1")) + return APIEnvelope( + status="success", + data={"status": "healthy", "database": "connected"}, + ) + except Exception: + logger.exception("Health check: database unreachable") + return JSONResponse( + status_code=503, + content={ + "status": "error", + "data": None, + "error": "Database unreachable", + }, + ) diff --git a/app/routers/indicators.py b/app/routers/indicators.py new file mode 100644 index 0000000..086e8bc --- /dev/null +++ b/app/routers/indicators.py @@ -0,0 +1,64 @@ +"""Indicators router — technical analysis endpoints.""" + +from datetime import date + +from fastapi import APIRouter, Depends, Query +from sqlalchemy.ext.asyncio import AsyncSession + +from app.dependencies import get_db, require_access +from app.schemas.common import APIEnvelope +from app.schemas.indicator import ( + EMACrossResponse, + EMACrossResult, + IndicatorResponse, + IndicatorResult, +) +from app.services.indicator_service import get_ema_cross, get_indicator + +router = APIRouter(tags=["indicators"]) + + +# NOTE: ema-cross must be registered BEFORE {indicator_type} to avoid +# FastAPI matching "ema-cross" as an indicator_type path parameter. + + +@router.get("/indicators/{symbol}/ema-cross", response_model=APIEnvelope) +async def read_ema_cross( + symbol: str, + start_date: date | None = Query(None), + end_date: date | None = Query(None), + short_period: int = Query(20), + long_period: int = Query(50), + _user=Depends(require_access), + db: AsyncSession = Depends(get_db), +) -> APIEnvelope: + """Compute EMA cross signal for a symbol.""" + result = await get_ema_cross( + db, symbol, start_date, end_date, short_period, long_period + ) + data = EMACrossResponse( + symbol=symbol.upper(), + ema_cross=EMACrossResult(**result), + ) + return APIEnvelope(status="success", data=data.model_dump()) + + +@router.get("/indicators/{symbol}/{indicator_type}", response_model=APIEnvelope) +async def read_indicator( + symbol: str, + indicator_type: str, + start_date: date | None = Query(None), + end_date: date | None = Query(None), + period: int | None = Query(None), + _user=Depends(require_access), + db: AsyncSession = Depends(get_db), +) -> APIEnvelope: + """Compute a technical indicator for a symbol.""" + result = await get_indicator( + db, symbol, indicator_type, start_date, end_date, period + ) + data = IndicatorResponse( + symbol=symbol.upper(), + indicator=IndicatorResult(**result), + ) + return APIEnvelope(status="success", data=data.model_dump()) diff --git a/app/routers/ingestion.py b/app/routers/ingestion.py new file mode 100644 index 0000000..2bf244b --- /dev/null +++ b/app/routers/ingestion.py @@ -0,0 +1,127 @@ +"""Ingestion router: trigger data fetches from the market data provider. + +Provides both a single-source OHLCV endpoint and a comprehensive +fetch-all endpoint that collects OHLCV + sentiment + fundamentals +in one call with per-source status reporting. +""" + +from __future__ import annotations + +import logging +from datetime import date + +from fastapi import APIRouter, Depends, Query +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import settings +from app.dependencies import get_db, require_access +from app.exceptions import ProviderError +from app.models.user import User +from app.providers.alpaca import AlpacaOHLCVProvider +from app.providers.fmp import FMPFundamentalProvider +from app.providers.gemini_sentiment import GeminiSentimentProvider +from app.schemas.common import APIEnvelope +from app.services import fundamental_service, ingestion_service, sentiment_service + +logger = logging.getLogger(__name__) + +router = APIRouter(tags=["ingestion"]) + + +def _get_provider() -> AlpacaOHLCVProvider: + """Build the OHLCV provider from current settings.""" + if not settings.alpaca_api_key or not settings.alpaca_api_secret: + raise ProviderError("Alpaca API credentials not configured") + return AlpacaOHLCVProvider(settings.alpaca_api_key, settings.alpaca_api_secret) + + +@router.post("/ingestion/fetch/{symbol}", response_model=APIEnvelope) +async def fetch_symbol( + symbol: str, + start_date: date | None = Query(None, description="Start date (YYYY-MM-DD)"), + end_date: date | None = Query(None, description="End date (YYYY-MM-DD)"), + _user: User = Depends(require_access), + db: AsyncSession = Depends(get_db), +): + """Fetch all data sources for a ticker: OHLCV, sentiment, and fundamentals. + + Returns a per-source breakdown so the frontend can show exactly what + succeeded and what failed. + """ + symbol_upper = symbol.strip().upper() + sources: dict[str, dict] = {} + + # --- OHLCV --- + try: + provider = _get_provider() + result = await ingestion_service.fetch_and_ingest( + db, provider, symbol_upper, start_date, end_date + ) + sources["ohlcv"] = { + "status": "ok" if result.status in ("complete", "partial") else "error", + "records": result.records_ingested, + "message": result.message, + } + except Exception as exc: + logger.error("OHLCV fetch failed for %s: %s", symbol_upper, exc) + sources["ohlcv"] = {"status": "error", "records": 0, "message": str(exc)} + + # --- Sentiment --- + if settings.gemini_api_key: + try: + sent_provider = GeminiSentimentProvider( + settings.gemini_api_key, settings.gemini_model + ) + data = await sent_provider.fetch_sentiment(symbol_upper) + await sentiment_service.store_sentiment( + db, + symbol=symbol_upper, + classification=data.classification, + confidence=data.confidence, + source=data.source, + timestamp=data.timestamp, + ) + sources["sentiment"] = { + "status": "ok", + "classification": data.classification, + "confidence": data.confidence, + "message": None, + } + except Exception as exc: + logger.error("Sentiment fetch failed for %s: %s", symbol_upper, exc) + sources["sentiment"] = {"status": "error", "message": str(exc)} + else: + sources["sentiment"] = { + "status": "skipped", + "message": "Gemini API key not configured", + } + + # --- Fundamentals --- + if settings.fmp_api_key: + try: + fmp_provider = FMPFundamentalProvider(settings.fmp_api_key) + fdata = await fmp_provider.fetch_fundamentals(symbol_upper) + await fundamental_service.store_fundamental( + db, + symbol=symbol_upper, + pe_ratio=fdata.pe_ratio, + revenue_growth=fdata.revenue_growth, + earnings_surprise=fdata.earnings_surprise, + market_cap=fdata.market_cap, + ) + sources["fundamentals"] = {"status": "ok", "message": None} + except Exception as exc: + logger.error("Fundamentals fetch failed for %s: %s", symbol_upper, exc) + sources["fundamentals"] = {"status": "error", "message": str(exc)} + else: + sources["fundamentals"] = { + "status": "skipped", + "message": "FMP API key not configured", + } + + # Always return success — per-source breakdown tells the full story + return APIEnvelope( + status="success", + data={"symbol": symbol_upper, "sources": sources}, + error=None, + ) diff --git a/app/routers/ohlcv.py b/app/routers/ohlcv.py new file mode 100644 index 0000000..405015b --- /dev/null +++ b/app/routers/ohlcv.py @@ -0,0 +1,56 @@ +"""OHLCV router: endpoints for storing and querying price data.""" + +from datetime import date + +from fastapi import APIRouter, Depends, Query +from sqlalchemy.ext.asyncio import AsyncSession + +from app.dependencies import get_db, require_access +from app.models.user import User +from app.schemas.common import APIEnvelope +from app.schemas.ohlcv import OHLCVCreate, OHLCVResponse +from app.services import price_service + +router = APIRouter(tags=["ohlcv"]) + + +@router.post("/ohlcv", response_model=APIEnvelope) +async def create_ohlcv( + body: OHLCVCreate, + _user: User = Depends(require_access), + db: AsyncSession = Depends(get_db), +): + """Upsert an OHLCV record for a ticker and date.""" + record = await price_service.upsert_ohlcv( + db, + symbol=body.symbol, + record_date=body.date, + open_=body.open, + high=body.high, + low=body.low, + close=body.close, + volume=body.volume, + ) + return APIEnvelope( + status="success", + data=OHLCVResponse.model_validate(record).model_dump(mode="json"), + ) + + +@router.get("/ohlcv/{symbol}", response_model=APIEnvelope) +async def get_ohlcv( + symbol: str, + start_date: date | None = Query(None, description="Start date (YYYY-MM-DD)"), + end_date: date | None = Query(None, description="End date (YYYY-MM-DD)"), + _user: User = Depends(require_access), + db: AsyncSession = Depends(get_db), +): + """Query OHLCV records for a ticker, optionally filtered by date range.""" + records = await price_service.query_ohlcv(db, symbol, start_date, end_date) + return APIEnvelope( + status="success", + data=[ + OHLCVResponse.model_validate(r).model_dump(mode="json") + for r in records + ], + ) diff --git a/app/routers/scores.py b/app/routers/scores.py new file mode 100644 index 0000000..611654e --- /dev/null +++ b/app/routers/scores.py @@ -0,0 +1,75 @@ +"""Scores router — scoring engine endpoints.""" + +from fastapi import APIRouter, Depends +from sqlalchemy.ext.asyncio import AsyncSession + +from app.dependencies import get_db, require_access +from app.schemas.common import APIEnvelope +from app.schemas.score import ( + DimensionScoreResponse, + RankingEntry, + RankingResponse, + ScoreResponse, + WeightUpdateRequest, +) +from app.services.scoring_service import get_rankings, get_score, update_weights + +router = APIRouter(tags=["scores"]) + + +@router.get("/scores/{symbol}", response_model=APIEnvelope) +async def read_score( + symbol: str, + _user=Depends(require_access), + db: AsyncSession = Depends(get_db), +) -> APIEnvelope: + """Get composite + dimension scores for a symbol. Recomputes stale scores.""" + result = await get_score(db, symbol) + + data = ScoreResponse( + symbol=result["symbol"], + composite_score=result["composite_score"], + composite_stale=result["composite_stale"], + weights=result["weights"], + dimensions=[ + DimensionScoreResponse(**d) for d in result["dimensions"] + ], + missing_dimensions=result["missing_dimensions"], + computed_at=result["computed_at"], + ) + return APIEnvelope(status="success", data=data.model_dump(mode="json")) + + +@router.get("/rankings", response_model=APIEnvelope) +async def read_rankings( + _user=Depends(require_access), + db: AsyncSession = Depends(get_db), +) -> APIEnvelope: + """Get all tickers ranked by composite score descending.""" + result = await get_rankings(db) + + data = RankingResponse( + rankings=[ + RankingEntry( + symbol=r["symbol"], + composite_score=r["composite_score"], + dimensions=[ + DimensionScoreResponse(**d) for d in r["dimensions"] + ], + ) + for r in result["rankings"] + ], + weights=result["weights"], + ) + return APIEnvelope(status="success", data=data.model_dump(mode="json")) + + +@router.put("/scores/weights", response_model=APIEnvelope) +async def update_score_weights( + body: WeightUpdateRequest, + _user=Depends(require_access), + db: AsyncSession = Depends(get_db), +) -> APIEnvelope: + """Update dimension weights and recompute all composite scores.""" + new_weights = await update_weights(db, body.weights) + return APIEnvelope(status="success", data={"weights": new_weights}) diff --git a/app/routers/sentiment.py b/app/routers/sentiment.py new file mode 100644 index 0000000..2905d21 --- /dev/null +++ b/app/routers/sentiment.py @@ -0,0 +1,46 @@ +"""Sentiment router — sentiment data endpoints.""" + +from fastapi import APIRouter, Depends, Query +from sqlalchemy.ext.asyncio import AsyncSession + +from app.dependencies import get_db, require_access +from app.schemas.common import APIEnvelope +from app.schemas.sentiment import SentimentResponse, SentimentScoreResult +from app.services.sentiment_service import ( + compute_sentiment_dimension_score, + get_sentiment_scores, +) + +router = APIRouter(tags=["sentiment"]) + + +@router.get("/sentiment/{symbol}", response_model=APIEnvelope) +async def read_sentiment( + symbol: str, + lookback_hours: float = Query(24, gt=0, description="Lookback window in hours"), + _user=Depends(require_access), + db: AsyncSession = Depends(get_db), +) -> APIEnvelope: + """Get recent sentiment scores and computed dimension score for a symbol.""" + scores = await get_sentiment_scores(db, symbol, lookback_hours) + dimension_score = await compute_sentiment_dimension_score( + db, symbol, lookback_hours + ) + + data = SentimentResponse( + symbol=symbol.strip().upper(), + scores=[ + SentimentScoreResult( + id=s.id, + classification=s.classification, + confidence=s.confidence, + source=s.source, + timestamp=s.timestamp, + ) + for s in scores + ], + count=len(scores), + dimension_score=round(dimension_score, 2) if dimension_score is not None else None, + lookback_hours=lookback_hours, + ) + return APIEnvelope(status="success", data=data.model_dump()) diff --git a/app/routers/sr_levels.py b/app/routers/sr_levels.py new file mode 100644 index 0000000..5d7ed20 --- /dev/null +++ b/app/routers/sr_levels.py @@ -0,0 +1,38 @@ +"""S/R Levels router — support/resistance detection endpoints.""" + +from fastapi import APIRouter, Depends, Query +from sqlalchemy.ext.asyncio import AsyncSession + +from app.dependencies import get_db, require_access +from app.schemas.common import APIEnvelope +from app.schemas.sr_level import SRLevelResponse, SRLevelResult +from app.services.sr_service import get_sr_levels + +router = APIRouter(tags=["sr-levels"]) + + +@router.get("/sr-levels/{symbol}", response_model=APIEnvelope) +async def read_sr_levels( + symbol: str, + tolerance: float = Query(0.005, ge=0, le=0.1, description="Merge tolerance (default 0.5%)"), + _user=Depends(require_access), + db: AsyncSession = Depends(get_db), +) -> APIEnvelope: + """Get support/resistance levels for a symbol, sorted by strength descending.""" + levels = await get_sr_levels(db, symbol, tolerance) + data = SRLevelResponse( + symbol=symbol.upper(), + levels=[ + SRLevelResult( + id=lvl.id, + price_level=lvl.price_level, + type=lvl.type, + strength=lvl.strength, + detection_method=lvl.detection_method, + created_at=lvl.created_at, + ) + for lvl in levels + ], + count=len(levels), + ) + return APIEnvelope(status="success", data=data.model_dump()) diff --git a/app/routers/tickers.py b/app/routers/tickers.py new file mode 100644 index 0000000..74683ce --- /dev/null +++ b/app/routers/tickers.py @@ -0,0 +1,53 @@ +"""Tickers router: CRUD endpoints for the Ticker Registry.""" + +from fastapi import APIRouter, Depends +from sqlalchemy.ext.asyncio import AsyncSession + +from app.dependencies import get_db, require_access +from app.models.user import User +from app.schemas.common import APIEnvelope +from app.schemas.ticker import TickerCreate, TickerResponse +from app.services import ticker_service + +router = APIRouter(tags=["tickers"]) + + +@router.post("/tickers", response_model=APIEnvelope) +async def create_ticker( + body: TickerCreate, + _user: User = Depends(require_access), + db: AsyncSession = Depends(get_db), +): + """Add a new ticker to the registry.""" + ticker = await ticker_service.add_ticker(db, body.symbol) + return APIEnvelope( + status="success", + data=TickerResponse.model_validate(ticker).model_dump(mode="json"), + ) + + +@router.get("/tickers", response_model=APIEnvelope) +async def list_tickers( + _user: User = Depends(require_access), + db: AsyncSession = Depends(get_db), +): + """List all tracked tickers sorted alphabetically.""" + tickers = await ticker_service.list_tickers(db) + return APIEnvelope( + status="success", + data=[ + TickerResponse.model_validate(t).model_dump(mode="json") + for t in tickers + ], + ) + + +@router.delete("/tickers/{symbol}", response_model=APIEnvelope) +async def delete_ticker( + symbol: str, + _user: User = Depends(require_access), + db: AsyncSession = Depends(get_db), +): + """Delete a ticker and all associated data.""" + await ticker_service.delete_ticker(db, symbol) + return APIEnvelope(status="success", data=None) diff --git a/app/routers/trades.py b/app/routers/trades.py new file mode 100644 index 0000000..5694df7 --- /dev/null +++ b/app/routers/trades.py @@ -0,0 +1,28 @@ +"""Trades router — R:R scanner trade setup endpoints.""" + +from fastapi import APIRouter, Depends, Query +from sqlalchemy.ext.asyncio import AsyncSession + +from app.dependencies import get_db, require_access +from app.schemas.common import APIEnvelope +from app.schemas.trade_setup import TradeSetupResponse +from app.services.rr_scanner_service import get_trade_setups + +router = APIRouter(tags=["trades"]) + + +@router.get("/trades", response_model=APIEnvelope) +async def list_trade_setups( + direction: str | None = Query( + None, description="Filter by direction: long or short" + ), + _user=Depends(require_access), + db: AsyncSession = Depends(get_db), +) -> APIEnvelope: + """Get all trade setups sorted by R:R desc, secondary composite desc. + + Optional direction filter (long/short). + """ + rows = await get_trade_setups(db, direction=direction) + data = [TradeSetupResponse(**r).model_dump(mode="json") for r in rows] + return APIEnvelope(status="success", data=data) diff --git a/app/routers/watchlist.py b/app/routers/watchlist.py new file mode 100644 index 0000000..3b1fc57 --- /dev/null +++ b/app/routers/watchlist.py @@ -0,0 +1,59 @@ +"""Watchlist router — manage user's curated watchlist.""" + +from fastapi import APIRouter, Depends, Query +from sqlalchemy.ext.asyncio import AsyncSession + +from app.dependencies import get_db, require_access +from app.models.user import User +from app.schemas.common import APIEnvelope +from app.schemas.watchlist import WatchlistEntryResponse +from app.services.watchlist_service import ( + add_manual_entry, + get_watchlist, + remove_entry, +) + +router = APIRouter(tags=["watchlist"]) + + +@router.get("/watchlist", response_model=APIEnvelope) +async def list_watchlist( + sort_by: str = Query( + "composite", + description=( + "Sort by: composite, rr, or a dimension name " + "(technical, sr_quality, sentiment, fundamental, momentum)" + ), + ), + user: User = Depends(require_access), + db: AsyncSession = Depends(get_db), +) -> APIEnvelope: + """Get current user's watchlist with enriched data.""" + rows = await get_watchlist(db, user.id, sort_by=sort_by) + data = [WatchlistEntryResponse(**r).model_dump(mode="json") for r in rows] + return APIEnvelope(status="success", data=data) + + +@router.post("/watchlist/{symbol}", response_model=APIEnvelope) +async def add_to_watchlist( + symbol: str, + user: User = Depends(require_access), + db: AsyncSession = Depends(get_db), +) -> APIEnvelope: + """Add a manual entry to the watchlist.""" + entry = await add_manual_entry(db, user.id, symbol) + return APIEnvelope( + status="success", + data={"symbol": symbol.strip().upper(), "entry_type": entry.entry_type}, + ) + + +@router.delete("/watchlist/{symbol}", response_model=APIEnvelope) +async def remove_from_watchlist( + symbol: str, + user: User = Depends(require_access), + db: AsyncSession = Depends(get_db), +) -> APIEnvelope: + """Remove an entry from the watchlist.""" + await remove_entry(db, user.id, symbol) + return APIEnvelope(status="success", data=None) diff --git a/app/scheduler.py b/app/scheduler.py new file mode 100644 index 0000000..8a40a4c --- /dev/null +++ b/app/scheduler.py @@ -0,0 +1,437 @@ +"""APScheduler job definitions and FastAPI lifespan integration. + +Defines four scheduled jobs: + - Data Collector (OHLCV fetch for all tickers) + - Sentiment Collector (sentiment for all tickers) + - Fundamental Collector (fundamentals for all tickers) + - R:R Scanner (trade setup scan for all tickers) + +Each job processes tickers independently, logs errors as structured JSON, +handles rate limits by recording the last successful ticker, and checks +SystemSetting for enabled/disabled state. +""" + +from __future__ import annotations + +import json +import logging +from datetime import date, timedelta + +from apscheduler.schedulers.asyncio import AsyncIOScheduler +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import settings +from app.database import async_session_factory +from app.models.settings import SystemSetting +from app.models.ticker import Ticker +from app.providers.alpaca import AlpacaOHLCVProvider +from app.providers.fmp import FMPFundamentalProvider +from app.providers.gemini_sentiment import GeminiSentimentProvider +from app.services import fundamental_service, ingestion_service, sentiment_service +from app.services.rr_scanner_service import scan_all_tickers + +logger = logging.getLogger(__name__) + +# Module-level scheduler instance +scheduler = AsyncIOScheduler() + +# Track last successful ticker per job for rate-limit resume +_last_successful: dict[str, str | None] = { + "data_collector": None, + "sentiment_collector": None, + "fundamental_collector": None, +} + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + + +def _log_job_error(job_name: str, ticker: str, error: Exception) -> None: + """Log a job error as structured JSON.""" + logger.error( + json.dumps({ + "event": "job_error", + "job": job_name, + "ticker": ticker, + "error_type": type(error).__name__, + "message": str(error), + }) + ) + + +async def _is_job_enabled(db: AsyncSession, job_name: str) -> bool: + """Check SystemSetting for job enabled state. Defaults to True.""" + key = f"job_{job_name}_enabled" + result = await db.execute( + select(SystemSetting).where(SystemSetting.key == key) + ) + setting = result.scalar_one_or_none() + if setting is None: + return True + return setting.value.lower() == "true" + + +async def _get_all_tickers(db: AsyncSession) -> list[str]: + """Return all tracked ticker symbols sorted alphabetically.""" + result = await db.execute(select(Ticker.symbol).order_by(Ticker.symbol)) + return list(result.scalars().all()) + + +def _resume_tickers(symbols: list[str], job_name: str) -> list[str]: + """Reorder tickers to resume after the last successful one (rate-limit resume). + + If a previous run was rate-limited, start from the ticker after the last + successful one. Otherwise return the full list. + """ + last = _last_successful.get(job_name) + if last is None or last not in symbols: + return symbols + idx = symbols.index(last) + # Start from the next ticker, then wrap around + return symbols[idx + 1:] + symbols[:idx + 1] + + +# --------------------------------------------------------------------------- +# Job: Data Collector (OHLCV) +# --------------------------------------------------------------------------- + + +async def collect_ohlcv() -> None: + """Fetch latest daily OHLCV for all tracked tickers. + + Uses AlpacaOHLCVProvider. Processes each ticker independently. + On rate limit, records last successful ticker for resume. + """ + job_name = "data_collector" + logger.info(json.dumps({"event": "job_start", "job": job_name})) + + async with async_session_factory() as db: + if not await _is_job_enabled(db, job_name): + logger.info(json.dumps({"event": "job_skipped", "job": job_name, "reason": "disabled"})) + return + + symbols = await _get_all_tickers(db) + if not symbols: + logger.info(json.dumps({"event": "job_complete", "job": job_name, "tickers": 0})) + return + + # Reorder for rate-limit resume + symbols = _resume_tickers(symbols, job_name) + + # Build provider (skip if keys not configured) + if not settings.alpaca_api_key or not settings.alpaca_api_secret: + logger.warning(json.dumps({"event": "job_skipped", "job": job_name, "reason": "alpaca keys not configured"})) + return + + try: + provider = AlpacaOHLCVProvider(settings.alpaca_api_key, settings.alpaca_api_secret) + except Exception as exc: + logger.error(json.dumps({"event": "job_error", "job": job_name, "error_type": type(exc).__name__, "message": str(exc)})) + return + + end_date = date.today() + start_date = end_date - timedelta(days=5) # Fetch last 5 days to catch up + processed = 0 + + for symbol in symbols: + async with async_session_factory() as db: + try: + result = await ingestion_service.fetch_and_ingest( + db, provider, symbol, start_date=start_date, end_date=end_date, + ) + _last_successful[job_name] = symbol + processed += 1 + logger.info(json.dumps({ + "event": "ticker_collected", + "job": job_name, + "ticker": symbol, + "status": result.status, + "records": result.records_ingested, + })) + if result.status == "partial": + # Rate limited — stop and resume next run + logger.warning(json.dumps({ + "event": "rate_limited", + "job": job_name, + "ticker": symbol, + "processed": processed, + })) + return + except Exception as exc: + _log_job_error(job_name, symbol, exc) + + # Reset resume pointer on full completion + _last_successful[job_name] = None + logger.info(json.dumps({"event": "job_complete", "job": job_name, "tickers": processed})) + + +# --------------------------------------------------------------------------- +# Job: Sentiment Collector +# --------------------------------------------------------------------------- + + +async def collect_sentiment() -> None: + """Fetch sentiment for all tracked tickers via Gemini. + + Processes each ticker independently. On rate limit, records last + successful ticker for resume. + """ + job_name = "sentiment_collector" + logger.info(json.dumps({"event": "job_start", "job": job_name})) + + async with async_session_factory() as db: + if not await _is_job_enabled(db, job_name): + logger.info(json.dumps({"event": "job_skipped", "job": job_name, "reason": "disabled"})) + return + + symbols = await _get_all_tickers(db) + if not symbols: + logger.info(json.dumps({"event": "job_complete", "job": job_name, "tickers": 0})) + return + + symbols = _resume_tickers(symbols, job_name) + + if not settings.gemini_api_key: + logger.warning(json.dumps({"event": "job_skipped", "job": job_name, "reason": "gemini key not configured"})) + return + + try: + provider = GeminiSentimentProvider(settings.gemini_api_key, settings.gemini_model) + except Exception as exc: + logger.error(json.dumps({"event": "job_error", "job": job_name, "error_type": type(exc).__name__, "message": str(exc)})) + return + + processed = 0 + + for symbol in symbols: + async with async_session_factory() as db: + try: + data = await provider.fetch_sentiment(symbol) + await sentiment_service.store_sentiment( + db, + symbol=symbol, + classification=data.classification, + confidence=data.confidence, + source=data.source, + timestamp=data.timestamp, + ) + _last_successful[job_name] = symbol + processed += 1 + logger.info(json.dumps({ + "event": "ticker_collected", + "job": job_name, + "ticker": symbol, + "classification": data.classification, + "confidence": data.confidence, + })) + except Exception as exc: + msg = str(exc).lower() + if "rate" in msg or "quota" in msg or "429" in msg: + logger.warning(json.dumps({ + "event": "rate_limited", + "job": job_name, + "ticker": symbol, + "processed": processed, + })) + return + _log_job_error(job_name, symbol, exc) + + _last_successful[job_name] = None + logger.info(json.dumps({"event": "job_complete", "job": job_name, "tickers": processed})) + + +# --------------------------------------------------------------------------- +# Job: Fundamental Collector +# --------------------------------------------------------------------------- + + +async def collect_fundamentals() -> None: + """Fetch fundamentals for all tracked tickers via FMP. + + Processes each ticker independently. On rate limit, records last + successful ticker for resume. + """ + job_name = "fundamental_collector" + logger.info(json.dumps({"event": "job_start", "job": job_name})) + + async with async_session_factory() as db: + if not await _is_job_enabled(db, job_name): + logger.info(json.dumps({"event": "job_skipped", "job": job_name, "reason": "disabled"})) + return + + symbols = await _get_all_tickers(db) + if not symbols: + logger.info(json.dumps({"event": "job_complete", "job": job_name, "tickers": 0})) + return + + symbols = _resume_tickers(symbols, job_name) + + if not settings.fmp_api_key: + logger.warning(json.dumps({"event": "job_skipped", "job": job_name, "reason": "fmp key not configured"})) + return + + try: + provider = FMPFundamentalProvider(settings.fmp_api_key) + except Exception as exc: + logger.error(json.dumps({"event": "job_error", "job": job_name, "error_type": type(exc).__name__, "message": str(exc)})) + return + + processed = 0 + + for symbol in symbols: + async with async_session_factory() as db: + try: + data = await provider.fetch_fundamentals(symbol) + await fundamental_service.store_fundamental( + db, + symbol=symbol, + pe_ratio=data.pe_ratio, + revenue_growth=data.revenue_growth, + earnings_surprise=data.earnings_surprise, + market_cap=data.market_cap, + ) + _last_successful[job_name] = symbol + processed += 1 + logger.info(json.dumps({ + "event": "ticker_collected", + "job": job_name, + "ticker": symbol, + })) + except Exception as exc: + msg = str(exc).lower() + if "rate" in msg or "429" in msg: + logger.warning(json.dumps({ + "event": "rate_limited", + "job": job_name, + "ticker": symbol, + "processed": processed, + })) + return + _log_job_error(job_name, symbol, exc) + + _last_successful[job_name] = None + logger.info(json.dumps({"event": "job_complete", "job": job_name, "tickers": processed})) + + +# --------------------------------------------------------------------------- +# Job: R:R Scanner +# --------------------------------------------------------------------------- + + +async def scan_rr() -> None: + """Scan all tickers for trade setups meeting the R:R threshold. + + Uses rr_scanner_service.scan_all_tickers which already handles + per-ticker error isolation internally. + """ + job_name = "rr_scanner" + logger.info(json.dumps({"event": "job_start", "job": job_name})) + + async with async_session_factory() as db: + if not await _is_job_enabled(db, job_name): + logger.info(json.dumps({"event": "job_skipped", "job": job_name, "reason": "disabled"})) + return + + try: + setups = await scan_all_tickers( + db, rr_threshold=settings.default_rr_threshold, + ) + logger.info(json.dumps({ + "event": "job_complete", + "job": job_name, + "setups_found": len(setups), + })) + except Exception as exc: + logger.error(json.dumps({ + "event": "job_error", + "job": job_name, + "error_type": type(exc).__name__, + "message": str(exc), + })) + + +# --------------------------------------------------------------------------- +# Frequency helpers +# --------------------------------------------------------------------------- + +_FREQUENCY_MAP: dict[str, dict[str, int]] = { + "hourly": {"hours": 1}, + "daily": {"hours": 24}, +} + + +def _parse_frequency(freq: str) -> dict[str, int]: + """Convert a frequency string to APScheduler interval kwargs.""" + return _FREQUENCY_MAP.get(freq.lower(), {"hours": 24}) + + +# --------------------------------------------------------------------------- +# Scheduler setup +# --------------------------------------------------------------------------- + + +def configure_scheduler() -> None: + """Add all jobs to the scheduler with configured intervals. + + Call this once before scheduler.start(). Removes any existing jobs first + to ensure idempotency. + """ + scheduler.remove_all_jobs() + + # Data Collector — configurable frequency (default: hourly) + ohlcv_interval = _parse_frequency(settings.data_collector_frequency) + scheduler.add_job( + collect_ohlcv, + "interval", + **ohlcv_interval, + id="data_collector", + name="Data Collector (OHLCV)", + replace_existing=True, + ) + + # Sentiment Collector — default 30 min + scheduler.add_job( + collect_sentiment, + "interval", + minutes=settings.sentiment_poll_interval_minutes, + id="sentiment_collector", + name="Sentiment Collector", + replace_existing=True, + ) + + # Fundamental Collector — configurable frequency (default: daily) + fund_interval = _parse_frequency(settings.fundamental_fetch_frequency) + scheduler.add_job( + collect_fundamentals, + "interval", + **fund_interval, + id="fundamental_collector", + name="Fundamental Collector", + replace_existing=True, + ) + + # R:R Scanner — configurable frequency (default: hourly) + rr_interval = _parse_frequency(settings.rr_scan_frequency) + scheduler.add_job( + scan_rr, + "interval", + **rr_interval, + id="rr_scanner", + name="R:R Scanner", + replace_existing=True, + ) + + logger.info( + json.dumps({ + "event": "scheduler_configured", + "jobs": { + "data_collector": ohlcv_interval, + "sentiment_collector": {"minutes": settings.sentiment_poll_interval_minutes}, + "fundamental_collector": fund_interval, + "rr_scanner": rr_interval, + }, + }) + ) diff --git a/app/schemas/__init__.py b/app/schemas/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/app/schemas/__init__.py @@ -0,0 +1 @@ + diff --git a/app/schemas/admin.py b/app/schemas/admin.py new file mode 100644 index 0000000..303253d --- /dev/null +++ b/app/schemas/admin.py @@ -0,0 +1,41 @@ +"""Admin request/response schemas.""" + +from pydantic import BaseModel, Field + + +class UserManagement(BaseModel): + """Schema for user access management.""" + has_access: bool + + +class PasswordReset(BaseModel): + """Schema for resetting a user's password.""" + new_password: str = Field(..., min_length=6) + + +class CreateUserRequest(BaseModel): + """Schema for admin-created user accounts.""" + username: str = Field(..., min_length=1) + password: str = Field(..., min_length=6) + role: str = Field(default="user", pattern=r"^(user|admin)$") + has_access: bool = False + + +class RegistrationToggle(BaseModel): + """Schema for toggling registration on/off.""" + enabled: bool + + +class SystemSettingUpdate(BaseModel): + """Schema for updating a system setting.""" + value: str = Field(..., min_length=1) + + +class DataCleanupRequest(BaseModel): + """Schema for data cleanup — delete records older than N days.""" + older_than_days: int = Field(..., gt=0) + + +class JobToggle(BaseModel): + """Schema for enabling/disabling a scheduled job.""" + enabled: bool diff --git a/app/schemas/auth.py b/app/schemas/auth.py new file mode 100644 index 0000000..ded7bb8 --- /dev/null +++ b/app/schemas/auth.py @@ -0,0 +1,18 @@ +"""Auth request/response schemas.""" + +from pydantic import BaseModel, Field + + +class RegisterRequest(BaseModel): + username: str = Field(..., min_length=1) + password: str = Field(..., min_length=6) + + +class LoginRequest(BaseModel): + username: str + password: str + + +class TokenResponse(BaseModel): + access_token: str + token_type: str = "bearer" diff --git a/app/schemas/common.py b/app/schemas/common.py new file mode 100644 index 0000000..17ca7fc --- /dev/null +++ b/app/schemas/common.py @@ -0,0 +1,13 @@ +"""Shared API schemas used across all endpoints.""" + +from typing import Any, Literal + +from pydantic import BaseModel + + +class APIEnvelope(BaseModel): + """Standard JSON envelope for all API responses.""" + + status: Literal["success", "error"] + data: Any | None = None + error: str | None = None diff --git a/app/schemas/fundamental.py b/app/schemas/fundamental.py new file mode 100644 index 0000000..55e01ee --- /dev/null +++ b/app/schemas/fundamental.py @@ -0,0 +1,18 @@ +"""Pydantic schemas for fundamental data endpoints.""" + +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel + + +class FundamentalResponse(BaseModel): + """Envelope-ready fundamental data response.""" + + symbol: str + pe_ratio: float | None = None + revenue_growth: float | None = None + earnings_surprise: float | None = None + market_cap: float | None = None + fetched_at: datetime | None = None diff --git a/app/schemas/indicator.py b/app/schemas/indicator.py new file mode 100644 index 0000000..a6710be --- /dev/null +++ b/app/schemas/indicator.py @@ -0,0 +1,49 @@ +"""Pydantic schemas for technical indicator endpoints.""" + +from __future__ import annotations + +from datetime import date +from typing import Any, Literal + +from pydantic import BaseModel, Field + + +class IndicatorRequest(BaseModel): + """Query parameters for indicator computation.""" + + start_date: date | None = None + end_date: date | None = None + period: int | None = None + + +class IndicatorResult(BaseModel): + """Raw indicator values plus normalized score.""" + + indicator_type: str + values: dict[str, Any] + score: float = Field(ge=0, le=100) + bars_used: int + + +class IndicatorResponse(BaseModel): + """Envelope-ready indicator response.""" + + symbol: str + indicator: IndicatorResult + + +class EMACrossResult(BaseModel): + """EMA cross signal details.""" + + short_ema: float + long_ema: float + short_period: int + long_period: int + signal: Literal["bullish", "bearish", "neutral"] + + +class EMACrossResponse(BaseModel): + """Envelope-ready EMA cross response.""" + + symbol: str + ema_cross: EMACrossResult diff --git a/app/schemas/ohlcv.py b/app/schemas/ohlcv.py new file mode 100644 index 0000000..910a74b --- /dev/null +++ b/app/schemas/ohlcv.py @@ -0,0 +1,31 @@ +"""OHLCV request/response schemas.""" + +from __future__ import annotations + +import datetime as _dt + +from pydantic import BaseModel, Field + + +class OHLCVCreate(BaseModel): + symbol: str = Field(..., description="Ticker symbol (e.g. AAPL)") + date: _dt.date = Field(..., description="Trading date (YYYY-MM-DD)") + open: float = Field(..., ge=0, description="Opening price") + high: float = Field(..., ge=0, description="High price") + low: float = Field(..., ge=0, description="Low price") + close: float = Field(..., ge=0, description="Closing price") + volume: int = Field(..., ge=0, description="Trading volume") + + +class OHLCVResponse(BaseModel): + id: int + ticker_id: int + date: _dt.date + open: float + high: float + low: float + close: float + volume: int + created_at: _dt.datetime + + model_config = {"from_attributes": True} diff --git a/app/schemas/score.py b/app/schemas/score.py new file mode 100644 index 0000000..61a9bbd --- /dev/null +++ b/app/schemas/score.py @@ -0,0 +1,52 @@ +"""Pydantic schemas for scoring endpoints.""" + +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel, Field + + +class DimensionScoreResponse(BaseModel): + """A single dimension score.""" + + dimension: str + score: float + is_stale: bool + computed_at: datetime | None = None + + +class ScoreResponse(BaseModel): + """Full score response for a ticker: composite + all dimensions.""" + + symbol: str + composite_score: float | None = None + composite_stale: bool = False + weights: dict[str, float] = {} + dimensions: list[DimensionScoreResponse] = [] + missing_dimensions: list[str] = [] + computed_at: datetime | None = None + + +class WeightUpdateRequest(BaseModel): + """Request to update dimension weights.""" + + weights: dict[str, float] = Field( + ..., + description="Dimension name → weight mapping. All weights must be positive.", + ) + + +class RankingEntry(BaseModel): + """A single entry in the rankings list.""" + + symbol: str + composite_score: float + dimensions: list[DimensionScoreResponse] = [] + + +class RankingResponse(BaseModel): + """Rankings response: tickers sorted by composite score descending.""" + + rankings: list[RankingEntry] = [] + weights: dict[str, float] = {} diff --git a/app/schemas/sentiment.py b/app/schemas/sentiment.py new file mode 100644 index 0000000..d57e9cb --- /dev/null +++ b/app/schemas/sentiment.py @@ -0,0 +1,30 @@ +"""Pydantic schemas for sentiment endpoints.""" + +from __future__ import annotations + +from datetime import datetime +from typing import Literal + +from pydantic import BaseModel, Field + + +class SentimentScoreResult(BaseModel): + """A single sentiment score record.""" + + id: int + classification: Literal["bullish", "bearish", "neutral"] + confidence: int = Field(ge=0, le=100) + source: str + timestamp: datetime + + +class SentimentResponse(BaseModel): + """Envelope-ready sentiment response.""" + + symbol: str + scores: list[SentimentScoreResult] + count: int + dimension_score: float | None = Field( + None, ge=0, le=100, description="Time-decay weighted sentiment dimension score" + ) + lookback_hours: float diff --git a/app/schemas/sr_level.py b/app/schemas/sr_level.py new file mode 100644 index 0000000..460afd4 --- /dev/null +++ b/app/schemas/sr_level.py @@ -0,0 +1,27 @@ +"""Pydantic schemas for S/R level endpoints.""" + +from __future__ import annotations + +from datetime import datetime +from typing import Literal + +from pydantic import BaseModel, Field + + +class SRLevelResult(BaseModel): + """A single support/resistance level.""" + + id: int + price_level: float + type: Literal["support", "resistance"] + strength: int = Field(ge=0, le=100) + detection_method: Literal["volume_profile", "pivot_point", "merged"] + created_at: datetime + + +class SRLevelResponse(BaseModel): + """Envelope-ready S/R levels response.""" + + symbol: str + levels: list[SRLevelResult] + count: int diff --git a/app/schemas/ticker.py b/app/schemas/ticker.py new file mode 100644 index 0000000..7f412f2 --- /dev/null +++ b/app/schemas/ticker.py @@ -0,0 +1,17 @@ +"""Ticker request/response schemas.""" + +from datetime import datetime + +from pydantic import BaseModel, Field + + +class TickerCreate(BaseModel): + symbol: str = Field(..., description="NASDAQ ticker symbol (e.g. AAPL)") + + +class TickerResponse(BaseModel): + id: int + symbol: str + created_at: datetime + + model_config = {"from_attributes": True} diff --git a/app/schemas/trade_setup.py b/app/schemas/trade_setup.py new file mode 100644 index 0000000..e52d747 --- /dev/null +++ b/app/schemas/trade_setup.py @@ -0,0 +1,21 @@ +"""Pydantic schemas for trade setup endpoints.""" + +from __future__ import annotations + +from datetime import datetime + +from pydantic import BaseModel + + +class TradeSetupResponse(BaseModel): + """A single trade setup detected by the R:R scanner.""" + + id: int + symbol: str + direction: str + entry_price: float + stop_loss: float + target: float + rr_ratio: float + composite_score: float + detected_at: datetime diff --git a/app/schemas/watchlist.py b/app/schemas/watchlist.py new file mode 100644 index 0000000..17bcce7 --- /dev/null +++ b/app/schemas/watchlist.py @@ -0,0 +1,36 @@ +"""Pydantic schemas for watchlist endpoints.""" + +from __future__ import annotations + +from datetime import datetime +from typing import Literal + +from pydantic import BaseModel, Field + + +class SRLevelSummary(BaseModel): + """Compact SR level for watchlist entry.""" + + price_level: float + type: Literal["support", "resistance"] + strength: int = Field(ge=0, le=100) + + +class DimensionScoreSummary(BaseModel): + """Compact dimension score for watchlist entry.""" + + dimension: str + score: float + + +class WatchlistEntryResponse(BaseModel): + """A single watchlist entry with enriched data.""" + + symbol: str + entry_type: Literal["auto", "manual"] + composite_score: float | None = None + dimensions: list[DimensionScoreSummary] = [] + rr_ratio: float | None = None + rr_direction: str | None = None + sr_levels: list[SRLevelSummary] = [] + added_at: datetime diff --git a/app/services/__init__.py b/app/services/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/app/services/__init__.py @@ -0,0 +1 @@ + diff --git a/app/services/admin_service.py b/app/services/admin_service.py new file mode 100644 index 0000000..d071267 --- /dev/null +++ b/app/services/admin_service.py @@ -0,0 +1,238 @@ +"""Admin service: user management, system settings, data cleanup, job control.""" + +from datetime import datetime, timedelta, timezone + +from passlib.hash import bcrypt +from sqlalchemy import delete, select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import DuplicateError, NotFoundError, ValidationError +from app.models.fundamental import FundamentalData +from app.models.ohlcv import OHLCVRecord +from app.models.sentiment import SentimentScore +from app.models.settings import SystemSetting +from app.models.user import User + + +# --------------------------------------------------------------------------- +# User management +# --------------------------------------------------------------------------- + +async def list_users(db: AsyncSession) -> list[User]: + """Return all users ordered by id.""" + result = await db.execute(select(User).order_by(User.id)) + return list(result.scalars().all()) + + +async def create_user( + db: AsyncSession, + username: str, + password: str, + role: str = "user", + has_access: bool = False, +) -> User: + """Create a new user account (admin action).""" + result = await db.execute(select(User).where(User.username == username)) + if result.scalar_one_or_none() is not None: + raise DuplicateError(f"Username already exists: {username}") + + user = User( + username=username, + password_hash=bcrypt.hash(password), + role=role, + has_access=has_access, + ) + db.add(user) + await db.commit() + await db.refresh(user) + return user + + +async def set_user_access(db: AsyncSession, user_id: int, has_access: bool) -> User: + """Grant or revoke API access for a user.""" + result = await db.execute(select(User).where(User.id == user_id)) + user = result.scalar_one_or_none() + if user is None: + raise NotFoundError(f"User not found: {user_id}") + + user.has_access = has_access + await db.commit() + await db.refresh(user) + return user + + +async def reset_password(db: AsyncSession, user_id: int, new_password: str) -> User: + """Reset a user's password.""" + result = await db.execute(select(User).where(User.id == user_id)) + user = result.scalar_one_or_none() + if user is None: + raise NotFoundError(f"User not found: {user_id}") + + user.password_hash = bcrypt.hash(new_password) + await db.commit() + await db.refresh(user) + return user + + +# --------------------------------------------------------------------------- +# Registration toggle +# --------------------------------------------------------------------------- + +async def toggle_registration(db: AsyncSession, enabled: bool) -> SystemSetting: + """Enable or disable user registration via SystemSetting.""" + result = await db.execute( + select(SystemSetting).where(SystemSetting.key == "registration_enabled") + ) + setting = result.scalar_one_or_none() + value = str(enabled).lower() + + if setting is None: + setting = SystemSetting(key="registration_enabled", value=value) + db.add(setting) + else: + setting.value = value + + await db.commit() + await db.refresh(setting) + return setting + + +# --------------------------------------------------------------------------- +# System settings CRUD +# --------------------------------------------------------------------------- + +async def list_settings(db: AsyncSession) -> list[SystemSetting]: + """Return all system settings.""" + result = await db.execute(select(SystemSetting).order_by(SystemSetting.key)) + return list(result.scalars().all()) + + +async def update_setting(db: AsyncSession, key: str, value: str) -> SystemSetting: + """Create or update a system setting.""" + result = await db.execute( + select(SystemSetting).where(SystemSetting.key == key) + ) + setting = result.scalar_one_or_none() + + if setting is None: + setting = SystemSetting(key=key, value=value) + db.add(setting) + else: + setting.value = value + + await db.commit() + await db.refresh(setting) + return setting + + +# --------------------------------------------------------------------------- +# Data cleanup +# --------------------------------------------------------------------------- + +async def cleanup_data(db: AsyncSession, older_than_days: int) -> dict[str, int]: + """Delete OHLCV, sentiment, and fundamental records older than N days. + + Preserves tickers, users, and latest scores. + Returns a dict with counts of deleted records per table. + """ + cutoff = datetime.now(timezone.utc) - timedelta(days=older_than_days) + counts: dict[str, int] = {} + + # OHLCV — date column is a date, compare with cutoff date + result = await db.execute( + delete(OHLCVRecord).where(OHLCVRecord.date < cutoff.date()) + ) + counts["ohlcv"] = result.rowcount # type: ignore[assignment] + + # Sentiment — timestamp is datetime + result = await db.execute( + delete(SentimentScore).where(SentimentScore.timestamp < cutoff) + ) + counts["sentiment"] = result.rowcount # type: ignore[assignment] + + # Fundamentals — fetched_at is datetime + result = await db.execute( + delete(FundamentalData).where(FundamentalData.fetched_at < cutoff) + ) + counts["fundamentals"] = result.rowcount # type: ignore[assignment] + + await db.commit() + return counts + + +# --------------------------------------------------------------------------- +# Job control (placeholder — scheduler is Task 12.1) +# --------------------------------------------------------------------------- + +VALID_JOB_NAMES = {"data_collector", "sentiment_collector", "fundamental_collector", "rr_scanner"} + +JOB_LABELS = { + "data_collector": "Data Collector (OHLCV)", + "sentiment_collector": "Sentiment Collector", + "fundamental_collector": "Fundamental Collector", + "rr_scanner": "R:R Scanner", +} + + +async def list_jobs(db: AsyncSession) -> list[dict]: + """Return status of all scheduled jobs.""" + from app.scheduler import scheduler + + jobs_out = [] + for name in sorted(VALID_JOB_NAMES): + # Check enabled setting + key = f"job_{name}_enabled" + result = await db.execute( + select(SystemSetting).where(SystemSetting.key == key) + ) + setting = result.scalar_one_or_none() + enabled = setting.value == "true" if setting else True # default enabled + + # Get scheduler job info + job = scheduler.get_job(name) + next_run = None + if job and job.next_run_time: + next_run = job.next_run_time.isoformat() + + jobs_out.append({ + "name": name, + "label": JOB_LABELS.get(name, name), + "enabled": enabled, + "next_run_at": next_run, + "registered": job is not None, + }) + + return jobs_out + + +async def trigger_job(db: AsyncSession, job_name: str) -> dict[str, str]: + """Trigger a manual job run via the scheduler. + + Runs the job immediately (in addition to its regular schedule). + """ + if job_name not in VALID_JOB_NAMES: + raise ValidationError(f"Unknown job: {job_name}. Valid jobs: {', '.join(sorted(VALID_JOB_NAMES))}") + + from app.scheduler import scheduler + + job = scheduler.get_job(job_name) + if job is None: + return {"job": job_name, "status": "not_found", "message": f"Job '{job_name}' is not registered in the scheduler"} + + job.modify(next_run_time=None) # Reset, then trigger immediately + from datetime import datetime, timezone + job.modify(next_run_time=datetime.now(timezone.utc)) + + return {"job": job_name, "status": "triggered", "message": f"Job '{job_name}' triggered for immediate execution"} + + +async def toggle_job(db: AsyncSession, job_name: str, enabled: bool) -> SystemSetting: + """Enable or disable a scheduled job by storing state in SystemSetting. + + Actual scheduler integration happens in Task 12.1. + """ + if job_name not in VALID_JOB_NAMES: + raise ValidationError(f"Unknown job: {job_name}. Valid jobs: {', '.join(sorted(VALID_JOB_NAMES))}") + + key = f"job_{job_name}_enabled" + return await update_setting(db, key, str(enabled).lower()) diff --git a/app/services/auth_service.py b/app/services/auth_service.py new file mode 100644 index 0000000..648e9d4 --- /dev/null +++ b/app/services/auth_service.py @@ -0,0 +1,66 @@ +"""Auth service: registration, login, and JWT token generation.""" + +from datetime import datetime, timedelta, timezone + +from jose import jwt +from passlib.hash import bcrypt +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import settings +from app.dependencies import JWT_ALGORITHM +from app.exceptions import AuthenticationError, AuthorizationError, DuplicateError +from app.models.settings import SystemSetting +from app.models.user import User + + +async def register(db: AsyncSession, username: str, password: str) -> User: + """Register a new user. + + Checks if registration is enabled via SystemSetting, rejects duplicates, + and creates a user with role='user' and has_access=False. + """ + # Check registration toggle + result = await db.execute( + select(SystemSetting).where(SystemSetting.key == "registration_enabled") + ) + setting = result.scalar_one_or_none() + if setting is not None and setting.value.lower() == "false": + raise AuthorizationError("Registration is closed") + + # Check duplicate username + result = await db.execute(select(User).where(User.username == username)) + if result.scalar_one_or_none() is not None: + raise DuplicateError(f"Username already exists: {username}") + + user = User( + username=username, + password_hash=bcrypt.hash(password), + role="user", + has_access=False, + ) + db.add(user) + await db.commit() + await db.refresh(user) + return user + + +async def login(db: AsyncSession, username: str, password: str) -> str: + """Authenticate user and return a JWT access token. + + Returns the same error message for wrong username or wrong password + to avoid leaking which field is incorrect. + """ + result = await db.execute(select(User).where(User.username == username)) + user = result.scalar_one_or_none() + + if user is None or not bcrypt.verify(password, user.password_hash): + raise AuthenticationError("Invalid credentials") + + payload = { + "sub": str(user.id), + "role": user.role, + "exp": datetime.now(timezone.utc) + timedelta(minutes=settings.jwt_expiry_minutes), + } + token = jwt.encode(payload, settings.jwt_secret, algorithm=JWT_ALGORITHM) + return token diff --git a/app/services/fundamental_service.py b/app/services/fundamental_service.py new file mode 100644 index 0000000..433554a --- /dev/null +++ b/app/services/fundamental_service.py @@ -0,0 +1,101 @@ +"""Fundamental data service. + +Stores fundamental data (P/E, revenue growth, earnings surprise, market cap) +and marks the fundamental dimension score as stale on new data. +""" + +from __future__ import annotations + +import logging +from datetime import datetime, timezone + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import NotFoundError +from app.models.fundamental import FundamentalData +from app.models.score import DimensionScore +from app.models.ticker import Ticker + +logger = logging.getLogger(__name__) + + +async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker: + """Look up a ticker by symbol.""" + normalised = symbol.strip().upper() + result = await db.execute(select(Ticker).where(Ticker.symbol == normalised)) + ticker = result.scalar_one_or_none() + if ticker is None: + raise NotFoundError(f"Ticker not found: {normalised}") + return ticker + + +async def store_fundamental( + db: AsyncSession, + symbol: str, + pe_ratio: float | None = None, + revenue_growth: float | None = None, + earnings_surprise: float | None = None, + market_cap: float | None = None, +) -> FundamentalData: + """Store or update fundamental data for a ticker. + + Keeps a single latest snapshot per ticker. On new data, marks the + fundamental dimension score as stale (if one exists). + """ + ticker = await _get_ticker(db, symbol) + + # Check for existing record + result = await db.execute( + select(FundamentalData).where(FundamentalData.ticker_id == ticker.id) + ) + existing = result.scalar_one_or_none() + + now = datetime.now(timezone.utc) + + if existing is not None: + existing.pe_ratio = pe_ratio + existing.revenue_growth = revenue_growth + existing.earnings_surprise = earnings_surprise + existing.market_cap = market_cap + existing.fetched_at = now + record = existing + else: + record = FundamentalData( + ticker_id=ticker.id, + pe_ratio=pe_ratio, + revenue_growth=revenue_growth, + earnings_surprise=earnings_surprise, + market_cap=market_cap, + fetched_at=now, + ) + db.add(record) + + # Mark fundamental dimension score as stale if it exists + # TODO: Use DimensionScore service when built + dim_result = await db.execute( + select(DimensionScore).where( + DimensionScore.ticker_id == ticker.id, + DimensionScore.dimension == "fundamental", + ) + ) + dim_score = dim_result.scalar_one_or_none() + if dim_score is not None: + dim_score.is_stale = True + + await db.commit() + await db.refresh(record) + return record + + +async def get_fundamental( + db: AsyncSession, + symbol: str, +) -> FundamentalData | None: + """Get the latest fundamental data for a ticker.""" + ticker = await _get_ticker(db, symbol) + + result = await db.execute( + select(FundamentalData).where(FundamentalData.ticker_id == ticker.id) + ) + return result.scalar_one_or_none() diff --git a/app/services/indicator_service.py b/app/services/indicator_service.py new file mode 100644 index 0000000..9075b3d --- /dev/null +++ b/app/services/indicator_service.py @@ -0,0 +1,509 @@ +"""Technical Analysis service. + +Computes indicators from OHLCV data. Each indicator function is a pure +function that takes a list of OHLCV-like records and returns raw values +plus a normalized 0-100 score. The service layer handles DB fetching, +caching, and minimum-data validation. +""" + +from __future__ import annotations + +from datetime import date +from typing import Any + +from sqlalchemy.ext.asyncio import AsyncSession + +from app.cache import indicator_cache +from app.exceptions import ValidationError +from app.services.price_service import query_ohlcv + +# --------------------------------------------------------------------------- +# Minimum data requirements per indicator +# --------------------------------------------------------------------------- + +MIN_BARS: dict[str, int] = { + "adx": 28, + "ema": 0, # dynamic: period + 1 + "rsi": 15, + "atr": 15, + "volume_profile": 20, + "pivot_points": 5, +} + +DEFAULT_PERIODS: dict[str, int] = { + "adx": 14, + "ema": 20, + "rsi": 14, + "atr": 14, +} + + +# --------------------------------------------------------------------------- +# Pure computation helpers +# --------------------------------------------------------------------------- + +def _ema(values: list[float], period: int) -> list[float]: + """Compute EMA series. Returns list same length as *values*.""" + if len(values) < period: + return [] + k = 2.0 / (period + 1) + ema_vals: list[float] = [sum(values[:period]) / period] + for v in values[period:]: + ema_vals.append(v * k + ema_vals[-1] * (1 - k)) + return ema_vals + + +def compute_adx( + highs: list[float], + lows: list[float], + closes: list[float], + period: int = 14, +) -> dict[str, Any]: + """Compute ADX from high/low/close arrays. + + Returns dict with ``adx``, ``plus_di``, ``minus_di``, ``score``. + """ + n = len(closes) + if n < 2 * period: + raise ValidationError( + f"ADX requires at least {2 * period} bars, got {n}" + ) + + # True Range, +DM, -DM + tr_list: list[float] = [] + plus_dm: list[float] = [] + minus_dm: list[float] = [] + for i in range(1, n): + h, l, pc = highs[i], lows[i], closes[i - 1] + tr_list.append(max(h - l, abs(h - pc), abs(l - pc))) + up = highs[i] - highs[i - 1] + down = lows[i - 1] - lows[i] + plus_dm.append(up if up > down and up > 0 else 0.0) + minus_dm.append(down if down > up and down > 0 else 0.0) + + # Smoothed TR, +DM, -DM (Wilder smoothing) + def _smooth(vals: list[float], p: int) -> list[float]: + s = [sum(vals[:p])] + for v in vals[p:]: + s.append(s[-1] - s[-1] / p + v) + return s + + s_tr = _smooth(tr_list, period) + s_plus = _smooth(plus_dm, period) + s_minus = _smooth(minus_dm, period) + + # +DI, -DI, DX + dx_list: list[float] = [] + plus_di_last = 0.0 + minus_di_last = 0.0 + for i in range(len(s_tr)): + tr_v = s_tr[i] if s_tr[i] != 0 else 1e-10 + pdi = 100.0 * s_plus[i] / tr_v + mdi = 100.0 * s_minus[i] / tr_v + denom = pdi + mdi if (pdi + mdi) != 0 else 1e-10 + dx_list.append(100.0 * abs(pdi - mdi) / denom) + plus_di_last = pdi + minus_di_last = mdi + + # ADX = smoothed DX + if len(dx_list) < period: + adx_val = sum(dx_list) / len(dx_list) if dx_list else 0.0 + else: + adx_vals = _smooth(dx_list, period) + adx_val = adx_vals[-1] + + score = max(0.0, min(100.0, adx_val)) + + return { + "adx": round(adx_val, 4), + "plus_di": round(plus_di_last, 4), + "minus_di": round(minus_di_last, 4), + "score": round(score, 4), + } + + +def compute_ema( + closes: list[float], + period: int = 20, +) -> dict[str, Any]: + """Compute EMA for *closes* with given *period*. + + Score: normalized position of latest close relative to EMA. + Above EMA → higher score, below → lower. + """ + min_bars = period + 1 + if len(closes) < min_bars: + raise ValidationError( + f"EMA({period}) requires at least {min_bars} bars, got {len(closes)}" + ) + + ema_vals = _ema(closes, period) + latest_ema = ema_vals[-1] + latest_close = closes[-1] + + # Score: 50 = at EMA, 100 = 5%+ above, 0 = 5%+ below + if latest_ema == 0: + pct = 0.0 + else: + pct = (latest_close - latest_ema) / latest_ema * 100.0 + score = max(0.0, min(100.0, 50.0 + pct * 10.0)) + + return { + "ema": round(latest_ema, 4), + "period": period, + "latest_close": round(latest_close, 4), + "score": round(score, 4), + } + + +def compute_rsi( + closes: list[float], + period: int = 14, +) -> dict[str, Any]: + """Compute RSI. Score = RSI value (already 0-100).""" + n = len(closes) + if n < period + 1: + raise ValidationError( + f"RSI requires at least {period + 1} bars, got {n}" + ) + + deltas = [closes[i] - closes[i - 1] for i in range(1, n)] + gains = [d if d > 0 else 0.0 for d in deltas] + losses = [-d if d < 0 else 0.0 for d in deltas] + + avg_gain = sum(gains[:period]) / period + avg_loss = sum(losses[:period]) / period + + for i in range(period, len(deltas)): + avg_gain = (avg_gain * (period - 1) + gains[i]) / period + avg_loss = (avg_loss * (period - 1) + losses[i]) / period + + if avg_loss == 0: + rsi = 100.0 + else: + rs = avg_gain / avg_loss + rsi = 100.0 - 100.0 / (1.0 + rs) + + score = max(0.0, min(100.0, rsi)) + + return { + "rsi": round(rsi, 4), + "period": period, + "score": round(score, 4), + } + + +def compute_atr( + highs: list[float], + lows: list[float], + closes: list[float], + period: int = 14, +) -> dict[str, Any]: + """Compute ATR. Score = normalized inverse (lower ATR = higher score).""" + n = len(closes) + if n < period + 1: + raise ValidationError( + f"ATR requires at least {period + 1} bars, got {n}" + ) + + tr_list: list[float] = [] + for i in range(1, n): + h, l, pc = highs[i], lows[i], closes[i - 1] + tr_list.append(max(h - l, abs(h - pc), abs(l - pc))) + + # Wilder smoothing + atr = sum(tr_list[:period]) / period + for tr in tr_list[period:]: + atr = (atr * (period - 1) + tr) / period + + # Score: inverse normalized. ATR as % of price; lower = higher score. + latest_close = closes[-1] + if latest_close == 0: + atr_pct = 0.0 + else: + atr_pct = atr / latest_close * 100.0 + # 0% ATR → 100 score, 10%+ ATR → 0 score + score = max(0.0, min(100.0, 100.0 - atr_pct * 10.0)) + + return { + "atr": round(atr, 4), + "period": period, + "atr_percent": round(atr_pct, 4), + "score": round(score, 4), + } + + +def compute_volume_profile( + highs: list[float], + lows: list[float], + closes: list[float], + volumes: list[int], + num_bins: int = 20, +) -> dict[str, Any]: + """Compute Volume Profile: POC, Value Area, HVN, LVN. + + Score: proximity of latest close to POC (closer = higher). + """ + n = len(closes) + if n < 20: + raise ValidationError( + f"Volume Profile requires at least 20 bars, got {n}" + ) + + price_min = min(lows) + price_max = max(highs) + if price_max == price_min: + price_max = price_min + 1.0 # avoid zero-width range + + bin_width = (price_max - price_min) / num_bins + bins: list[float] = [0.0] * num_bins + bin_prices: list[float] = [ + price_min + (i + 0.5) * bin_width for i in range(num_bins) + ] + + for i in range(n): + # Distribute volume across bins the bar spans + bar_low, bar_high = lows[i], highs[i] + for b in range(num_bins): + bl = price_min + b * bin_width + bh = bl + bin_width + if bar_high >= bl and bar_low <= bh: + bins[b] += volumes[i] + + total_vol = sum(bins) + if total_vol == 0: + total_vol = 1.0 + + # POC = bin with highest volume + poc_idx = bins.index(max(bins)) + poc = round(bin_prices[poc_idx], 4) + + # Value Area: 70% of total volume around POC + sorted_bins = sorted(range(num_bins), key=lambda i: bins[i], reverse=True) + va_vol = 0.0 + va_indices: list[int] = [] + for idx in sorted_bins: + va_vol += bins[idx] + va_indices.append(idx) + if va_vol >= total_vol * 0.7: + break + va_low = round(price_min + min(va_indices) * bin_width, 4) + va_high = round(price_min + (max(va_indices) + 1) * bin_width, 4) + + # HVN / LVN: bins above/below average volume + avg_vol = total_vol / num_bins + hvn = [round(bin_prices[i], 4) for i in range(num_bins) if bins[i] > avg_vol] + lvn = [round(bin_prices[i], 4) for i in range(num_bins) if bins[i] < avg_vol] + + # Score: proximity of latest close to POC + latest = closes[-1] + price_range = price_max - price_min + if price_range == 0: + score = 100.0 + else: + dist_pct = abs(latest - poc) / price_range + score = max(0.0, min(100.0, 100.0 * (1.0 - dist_pct))) + + return { + "poc": poc, + "value_area_low": va_low, + "value_area_high": va_high, + "hvn": hvn, + "lvn": lvn, + "score": round(score, 4), + } + + +def compute_pivot_points( + highs: list[float], + lows: list[float], + closes: list[float], + window: int = 2, +) -> dict[str, Any]: + """Detect swing highs/lows as pivot points. + + A swing high at index *i* means highs[i] >= all highs in [i-window, i+window]. + Score: based on number of pivots near current price. + """ + n = len(closes) + if n < 5: + raise ValidationError( + f"Pivot Points requires at least 5 bars, got {n}" + ) + + swing_highs: list[float] = [] + swing_lows: list[float] = [] + + for i in range(window, n - window): + # Swing high + if all(highs[i] >= highs[j] for j in range(i - window, i + window + 1)): + swing_highs.append(round(highs[i], 4)) + # Swing low + if all(lows[i] <= lows[j] for j in range(i - window, i + window + 1)): + swing_lows.append(round(lows[i], 4)) + + all_pivots = swing_highs + swing_lows + latest = closes[-1] + + # Score: fraction of pivots within 2% of current price → 0-100 + if not all_pivots or latest == 0: + score = 0.0 + else: + near = sum(1 for p in all_pivots if abs(p - latest) / latest <= 0.02) + score = min(100.0, (near / max(len(all_pivots), 1)) * 100.0) + + return { + "swing_highs": swing_highs, + "swing_lows": swing_lows, + "pivot_count": len(all_pivots), + "score": round(score, 4), + } + + +def compute_ema_cross( + closes: list[float], + short_period: int = 20, + long_period: int = 50, + tolerance: float = 1e-6, +) -> dict[str, Any]: + """Compare short EMA vs long EMA. + + Returns signal: bullish (short > long), bearish (short < long), + neutral (within tolerance). + """ + min_bars = long_period + 1 + if len(closes) < min_bars: + raise ValidationError( + f"EMA Cross requires at least {min_bars} bars, got {len(closes)}" + ) + + short_ema_vals = _ema(closes, short_period) + long_ema_vals = _ema(closes, long_period) + + short_ema = short_ema_vals[-1] + long_ema = long_ema_vals[-1] + + diff = short_ema - long_ema + if abs(diff) <= tolerance: + signal = "neutral" + elif diff > 0: + signal = "bullish" + else: + signal = "bearish" + + return { + "short_ema": round(short_ema, 4), + "long_ema": round(long_ema, 4), + "short_period": short_period, + "long_period": long_period, + "signal": signal, + } + + +# --------------------------------------------------------------------------- +# Supported indicator types +# --------------------------------------------------------------------------- + +INDICATOR_TYPES = {"adx", "ema", "rsi", "atr", "volume_profile", "pivot_points"} + + +# --------------------------------------------------------------------------- +# Service-layer functions (DB + cache + validation) +# --------------------------------------------------------------------------- + +def _extract_ohlcv(records: list) -> tuple[ + list[float], list[float], list[float], list[float], list[int] +]: + """Extract parallel arrays from OHLCVRecord list.""" + opens = [float(r.open) for r in records] + highs = [float(r.high) for r in records] + lows = [float(r.low) for r in records] + closes = [float(r.close) for r in records] + volumes = [int(r.volume) for r in records] + return opens, highs, lows, closes, volumes + + +async def get_indicator( + db: AsyncSession, + symbol: str, + indicator_type: str, + start_date: date | None = None, + end_date: date | None = None, + period: int | None = None, +) -> dict[str, Any]: + """Compute a single indicator for *symbol*. + + Checks cache first; stores result after computing. + """ + indicator_type = indicator_type.lower() + if indicator_type not in INDICATOR_TYPES: + raise ValidationError( + f"Unknown indicator type: {indicator_type}. " + f"Supported: {', '.join(sorted(INDICATOR_TYPES))}" + ) + + cache_key = (symbol.upper(), str(start_date), str(end_date), indicator_type) + cached = indicator_cache.get(cache_key) + if cached is not None: + return cached + + records = await query_ohlcv(db, symbol, start_date, end_date) + _, highs, lows, closes, volumes = _extract_ohlcv(records) + n = len(records) + + if indicator_type == "adx": + p = period or DEFAULT_PERIODS["adx"] + result = compute_adx(highs, lows, closes, period=p) + elif indicator_type == "ema": + p = period or DEFAULT_PERIODS["ema"] + result = compute_ema(closes, period=p) + elif indicator_type == "rsi": + p = period or DEFAULT_PERIODS["rsi"] + result = compute_rsi(closes, period=p) + elif indicator_type == "atr": + p = period or DEFAULT_PERIODS["atr"] + result = compute_atr(highs, lows, closes, period=p) + elif indicator_type == "volume_profile": + result = compute_volume_profile(highs, lows, closes, volumes) + elif indicator_type == "pivot_points": + result = compute_pivot_points(highs, lows, closes) + else: + raise ValidationError(f"Unknown indicator type: {indicator_type}") + + response = { + "indicator_type": indicator_type, + "values": {k: v for k, v in result.items() if k != "score"}, + "score": result["score"], + "bars_used": n, + } + + indicator_cache.set(cache_key, response) + return response + + +async def get_ema_cross( + db: AsyncSession, + symbol: str, + start_date: date | None = None, + end_date: date | None = None, + short_period: int = 20, + long_period: int = 50, +) -> dict[str, Any]: + """Compute EMA cross signal for *symbol*.""" + cache_key = ( + symbol.upper(), + str(start_date), + str(end_date), + f"ema_cross_{short_period}_{long_period}", + ) + cached = indicator_cache.get(cache_key) + if cached is not None: + return cached + + records = await query_ohlcv(db, symbol, start_date, end_date) + _, _, _, closes, _ = _extract_ohlcv(records) + + result = compute_ema_cross(closes, short_period, long_period) + + indicator_cache.set(cache_key, result) + return result diff --git a/app/services/ingestion_service.py b/app/services/ingestion_service.py new file mode 100644 index 0000000..16c32cd --- /dev/null +++ b/app/services/ingestion_service.py @@ -0,0 +1,172 @@ +"""Ingestion Pipeline service: fetch from provider, validate, upsert into Price Store. + +Handles rate-limit resume via IngestionProgress and provider error isolation. +""" + +from __future__ import annotations + +import logging +from dataclasses import dataclass +from datetime import date, timedelta + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import NotFoundError, ProviderError, RateLimitError +from app.models.settings import IngestionProgress +from app.models.ticker import Ticker +from app.providers.protocol import MarketDataProvider +from app.services import price_service + +logger = logging.getLogger(__name__) + + +@dataclass +class IngestionResult: + """Result of an ingestion run.""" + + symbol: str + records_ingested: int + last_date: date | None + status: str # "complete" | "partial" | "error" + message: str | None = None + + +async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker: + """Look up ticker by symbol. Raises NotFoundError if missing.""" + normalised = symbol.strip().upper() + result = await db.execute(select(Ticker).where(Ticker.symbol == normalised)) + ticker = result.scalar_one_or_none() + if ticker is None: + raise NotFoundError(f"Ticker not found: {normalised}") + return ticker + + +async def _get_progress(db: AsyncSession, ticker_id: int) -> IngestionProgress | None: + """Get the IngestionProgress record for a ticker, if any.""" + result = await db.execute( + select(IngestionProgress).where(IngestionProgress.ticker_id == ticker_id) + ) + return result.scalar_one_or_none() + + +async def _update_progress( + db: AsyncSession, ticker_id: int, last_date: date +) -> None: + """Create or update the IngestionProgress record for a ticker.""" + progress = await _get_progress(db, ticker_id) + if progress is None: + progress = IngestionProgress(ticker_id=ticker_id, last_ingested_date=last_date) + db.add(progress) + else: + progress.last_ingested_date = last_date + await db.commit() + + +async def fetch_and_ingest( + db: AsyncSession, + provider: MarketDataProvider, + symbol: str, + start_date: date | None = None, + end_date: date | None = None, +) -> IngestionResult: + """Fetch OHLCV data from provider and upsert into Price Store. + + - Resolves start_date from IngestionProgress if not provided (resume). + - Defaults end_date to today. + - Tracks last_ingested_date after each successful upsert. + - On RateLimitError from provider: returns partial progress. + - On ProviderError: returns error, no data modification. + """ + ticker = await _get_ticker(db, symbol) + + # Resolve end_date + if end_date is None: + end_date = date.today() + + # Resolve start_date: use progress resume or default to 1 year ago + if start_date is None: + progress = await _get_progress(db, ticker.id) + if progress is not None: + start_date = progress.last_ingested_date + timedelta(days=1) + else: + start_date = end_date - timedelta(days=365) + + # If start > end, nothing to fetch + if start_date > end_date: + return IngestionResult( + symbol=ticker.symbol, + records_ingested=0, + last_date=None, + status="complete", + message="Already up to date", + ) + + # Fetch from provider + try: + records = await provider.fetch_ohlcv(ticker.symbol, start_date, end_date) + except RateLimitError: + # No data fetched at all — return partial with 0 records + return IngestionResult( + symbol=ticker.symbol, + records_ingested=0, + last_date=None, + status="partial", + message="Rate limited before any records fetched. Resume available.", + ) + except ProviderError as exc: + logger.error("Provider error for %s: %s", ticker.symbol, exc) + return IngestionResult( + symbol=ticker.symbol, + records_ingested=0, + last_date=None, + status="error", + message=str(exc), + ) + + # Sort records by date to ensure ordered ingestion + records.sort(key=lambda r: r.date) + + ingested_count = 0 + last_ingested: date | None = None + + for record in records: + try: + await price_service.upsert_ohlcv( + db, + symbol=ticker.symbol, + record_date=record.date, + open_=record.open, + high=record.high, + low=record.low, + close=record.close, + volume=record.volume, + ) + ingested_count += 1 + last_ingested = record.date + + # Update progress after each successful upsert + await _update_progress(db, ticker.id, record.date) + + except RateLimitError: + # Mid-ingestion rate limit — return partial progress + logger.warning( + "Rate limited during ingestion for %s after %d records", + ticker.symbol, + ingested_count, + ) + return IngestionResult( + symbol=ticker.symbol, + records_ingested=ingested_count, + last_date=last_ingested, + status="partial", + message=f"Rate limited. Ingested {ingested_count} records. Resume available.", + ) + + return IngestionResult( + symbol=ticker.symbol, + records_ingested=ingested_count, + last_date=last_ingested, + status="complete", + message=f"Successfully ingested {ingested_count} records", + ) diff --git a/app/services/price_service.py b/app/services/price_service.py new file mode 100644 index 0000000..0b68a07 --- /dev/null +++ b/app/services/price_service.py @@ -0,0 +1,110 @@ +"""Price Store service: upsert and query OHLCV records.""" + +from datetime import date, datetime + +from sqlalchemy import select +from sqlalchemy.dialects.postgresql import insert as pg_insert +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import NotFoundError, ValidationError +from app.models.ohlcv import OHLCVRecord +from app.models.ticker import Ticker + + +async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker: + """Look up a ticker by symbol. Raises NotFoundError if missing.""" + normalised = symbol.strip().upper() + result = await db.execute(select(Ticker).where(Ticker.symbol == normalised)) + ticker = result.scalar_one_or_none() + if ticker is None: + raise NotFoundError(f"Ticker not found: {normalised}") + return ticker + + +def _validate_ohlcv( + high: float, low: float, open_: float, close: float, volume: int, record_date: date +) -> None: + """Business-rule validation for an OHLCV record.""" + if high < low: + raise ValidationError("Validation error: high must be >= low") + if any(p < 0 for p in (open_, high, low, close)): + raise ValidationError("Validation error: prices must be >= 0") + if volume < 0: + raise ValidationError("Validation error: volume must be >= 0") + if record_date > date.today(): + raise ValidationError("Validation error: date must not be in the future") + + +async def upsert_ohlcv( + db: AsyncSession, + symbol: str, + record_date: date, + open_: float, + high: float, + low: float, + close: float, + volume: int, +) -> OHLCVRecord: + """Insert or update an OHLCV record for (ticker, date). + + Validates business rules, resolves ticker, then uses + ON CONFLICT DO UPDATE on the (ticker_id, date) unique constraint. + """ + _validate_ohlcv(high, low, open_, close, volume, record_date) + ticker = await _get_ticker(db, symbol) + + stmt = pg_insert(OHLCVRecord).values( + ticker_id=ticker.id, + date=record_date, + open=open_, + high=high, + low=low, + close=close, + volume=volume, + created_at=datetime.utcnow(), + ) + stmt = stmt.on_conflict_do_update( + constraint="uq_ohlcv_ticker_date", + set_={ + "open": stmt.excluded.open, + "high": stmt.excluded.high, + "low": stmt.excluded.low, + "close": stmt.excluded.close, + "volume": stmt.excluded.volume, + "created_at": stmt.excluded.created_at, + }, + ) + stmt = stmt.returning(OHLCVRecord) + result = await db.execute(stmt) + await db.commit() + + record = result.scalar_one() + + # TODO: Invalidate LRU cache entries for this ticker (Task 7.1) + # TODO: Mark composite score as stale for this ticker (Task 10.1) + + return record + + +async def query_ohlcv( + db: AsyncSession, + symbol: str, + start_date: date | None = None, + end_date: date | None = None, +) -> list[OHLCVRecord]: + """Query OHLCV records for a ticker, optionally filtered by date range. + + Returns records sorted by date ascending. + Raises NotFoundError if the ticker does not exist. + """ + ticker = await _get_ticker(db, symbol) + + stmt = select(OHLCVRecord).where(OHLCVRecord.ticker_id == ticker.id) + if start_date is not None: + stmt = stmt.where(OHLCVRecord.date >= start_date) + if end_date is not None: + stmt = stmt.where(OHLCVRecord.date <= end_date) + stmt = stmt.order_by(OHLCVRecord.date.asc()) + + result = await db.execute(stmt) + return list(result.scalars().all()) diff --git a/app/services/rr_scanner_service.py b/app/services/rr_scanner_service.py new file mode 100644 index 0000000..f30ba62 --- /dev/null +++ b/app/services/rr_scanner_service.py @@ -0,0 +1,241 @@ +"""R:R Scanner service. + +Scans tracked tickers for asymmetric risk-reward trade setups. +Long: target = nearest SR above, stop = entry - ATR × multiplier. +Short: target = nearest SR below, stop = entry + ATR × multiplier. +Filters by configurable R:R threshold (default 3:1). +""" + +from __future__ import annotations + +import logging +from datetime import datetime, timezone + +from sqlalchemy import delete, select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import NotFoundError +from app.models.score import CompositeScore +from app.models.sr_level import SRLevel +from app.models.ticker import Ticker +from app.models.trade_setup import TradeSetup +from app.services.indicator_service import _extract_ohlcv, compute_atr +from app.services.price_service import query_ohlcv + +logger = logging.getLogger(__name__) + + +async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker: + normalised = symbol.strip().upper() + result = await db.execute(select(Ticker).where(Ticker.symbol == normalised)) + ticker = result.scalar_one_or_none() + if ticker is None: + raise NotFoundError(f"Ticker not found: {normalised}") + return ticker + + +async def scan_ticker( + db: AsyncSession, + symbol: str, + rr_threshold: float = 3.0, + atr_multiplier: float = 1.5, +) -> list[TradeSetup]: + """Scan a single ticker for trade setups meeting the R:R threshold. + + 1. Fetch OHLCV data and compute ATR. + 2. Fetch SR levels. + 3. Compute long and short setups. + 4. Filter by R:R threshold. + 5. Delete old setups for this ticker and persist new ones. + + Returns list of persisted TradeSetup models. + """ + ticker = await _get_ticker(db, symbol) + + # Fetch OHLCV + records = await query_ohlcv(db, symbol) + if not records or len(records) < 15: + logger.info( + "Skipping %s: insufficient OHLCV data (%d bars, need 15+)", + symbol, len(records), + ) + # Clear any stale setups + await db.execute( + delete(TradeSetup).where(TradeSetup.ticker_id == ticker.id) + ) + return [] + + _, highs, lows, closes, _ = _extract_ohlcv(records) + entry_price = closes[-1] + + # Compute ATR + try: + atr_result = compute_atr(highs, lows, closes) + atr_value = atr_result["atr"] + except Exception: + logger.info("Skipping %s: cannot compute ATR", symbol) + await db.execute( + delete(TradeSetup).where(TradeSetup.ticker_id == ticker.id) + ) + return [] + + if atr_value <= 0: + logger.info("Skipping %s: ATR is zero or negative", symbol) + await db.execute( + delete(TradeSetup).where(TradeSetup.ticker_id == ticker.id) + ) + return [] + + # Fetch SR levels from DB (already computed by sr_service) + sr_result = await db.execute( + select(SRLevel).where(SRLevel.ticker_id == ticker.id) + ) + sr_levels = list(sr_result.scalars().all()) + + if not sr_levels: + logger.info("Skipping %s: no SR levels available", symbol) + await db.execute( + delete(TradeSetup).where(TradeSetup.ticker_id == ticker.id) + ) + return [] + + levels_above = sorted( + [lv for lv in sr_levels if lv.price_level > entry_price], + key=lambda lv: lv.price_level, + ) + levels_below = sorted( + [lv for lv in sr_levels if lv.price_level < entry_price], + key=lambda lv: lv.price_level, + reverse=True, + ) + + # Get composite score for this ticker + comp_result = await db.execute( + select(CompositeScore).where(CompositeScore.ticker_id == ticker.id) + ) + comp = comp_result.scalar_one_or_none() + composite_score = comp.score if comp else 0.0 + + now = datetime.now(timezone.utc) + setups: list[TradeSetup] = [] + + # Long setup: target = nearest SR above, stop = entry - ATR × multiplier + if levels_above: + target = levels_above[0].price_level + stop = entry_price - (atr_value * atr_multiplier) + reward = target - entry_price + risk = entry_price - stop + if risk > 0 and reward > 0: + rr = reward / risk + if rr >= rr_threshold: + setups.append(TradeSetup( + ticker_id=ticker.id, + direction="long", + entry_price=round(entry_price, 4), + stop_loss=round(stop, 4), + target=round(target, 4), + rr_ratio=round(rr, 4), + composite_score=round(composite_score, 4), + detected_at=now, + )) + + # Short setup: target = nearest SR below, stop = entry + ATR × multiplier + if levels_below: + target = levels_below[0].price_level + stop = entry_price + (atr_value * atr_multiplier) + reward = entry_price - target + risk = stop - entry_price + if risk > 0 and reward > 0: + rr = reward / risk + if rr >= rr_threshold: + setups.append(TradeSetup( + ticker_id=ticker.id, + direction="short", + entry_price=round(entry_price, 4), + stop_loss=round(stop, 4), + target=round(target, 4), + rr_ratio=round(rr, 4), + composite_score=round(composite_score, 4), + detected_at=now, + )) + + # Delete old setups for this ticker, persist new ones + await db.execute( + delete(TradeSetup).where(TradeSetup.ticker_id == ticker.id) + ) + for setup in setups: + db.add(setup) + + await db.commit() + + # Refresh to get IDs + for s in setups: + await db.refresh(s) + + return setups + + +async def scan_all_tickers( + db: AsyncSession, + rr_threshold: float = 3.0, + atr_multiplier: float = 1.5, +) -> list[TradeSetup]: + """Scan all tracked tickers for trade setups. + + Processes each ticker independently — one failure doesn't stop others. + Returns all setups found across all tickers. + """ + result = await db.execute(select(Ticker).order_by(Ticker.symbol)) + tickers = list(result.scalars().all()) + + all_setups: list[TradeSetup] = [] + for ticker in tickers: + try: + setups = await scan_ticker( + db, ticker.symbol, rr_threshold, atr_multiplier + ) + all_setups.extend(setups) + except Exception: + logger.exception("Error scanning ticker %s", ticker.symbol) + + return all_setups + + +async def get_trade_setups( + db: AsyncSession, + direction: str | None = None, +) -> list[dict]: + """Get all stored trade setups, optionally filtered by direction. + + Returns dicts sorted by R:R desc, secondary composite desc. + Each dict includes the ticker symbol. + """ + stmt = ( + select(TradeSetup, Ticker.symbol) + .join(Ticker, TradeSetup.ticker_id == Ticker.id) + ) + if direction is not None: + stmt = stmt.where(TradeSetup.direction == direction.lower()) + + stmt = stmt.order_by( + TradeSetup.rr_ratio.desc(), + TradeSetup.composite_score.desc(), + ) + + result = await db.execute(stmt) + rows = result.all() + + return [ + { + "id": setup.id, + "symbol": symbol, + "direction": setup.direction, + "entry_price": setup.entry_price, + "stop_loss": setup.stop_loss, + "target": setup.target, + "rr_ratio": setup.rr_ratio, + "composite_score": setup.composite_score, + "detected_at": setup.detected_at, + } + for setup, symbol in rows + ] diff --git a/app/services/scoring_service.py b/app/services/scoring_service.py new file mode 100644 index 0000000..9ba99b8 --- /dev/null +++ b/app/services/scoring_service.py @@ -0,0 +1,584 @@ +"""Scoring Engine service. + +Computes dimension scores (technical, sr_quality, sentiment, fundamental, +momentum) each 0-100, composite score as weighted average of available +dimensions with re-normalized weights, staleness marking/recomputation +on demand, and weight update triggers full recomputation. +""" + +from __future__ import annotations + +import json +import logging +from datetime import datetime, timezone + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import NotFoundError, ValidationError +from app.models.score import CompositeScore, DimensionScore +from app.models.settings import SystemSetting +from app.models.ticker import Ticker + +logger = logging.getLogger(__name__) + +DIMENSIONS = ["technical", "sr_quality", "sentiment", "fundamental", "momentum"] + +DEFAULT_WEIGHTS: dict[str, float] = { + "technical": 0.25, + "sr_quality": 0.20, + "sentiment": 0.15, + "fundamental": 0.20, + "momentum": 0.20, +} + +SCORING_WEIGHTS_KEY = "scoring_weights" + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker: + normalised = symbol.strip().upper() + result = await db.execute(select(Ticker).where(Ticker.symbol == normalised)) + ticker = result.scalar_one_or_none() + if ticker is None: + raise NotFoundError(f"Ticker not found: {normalised}") + return ticker + + +async def _get_weights(db: AsyncSession) -> dict[str, float]: + """Load scoring weights from SystemSetting, falling back to defaults.""" + result = await db.execute( + select(SystemSetting).where(SystemSetting.key == SCORING_WEIGHTS_KEY) + ) + setting = result.scalar_one_or_none() + if setting is not None: + try: + return json.loads(setting.value) + except (json.JSONDecodeError, TypeError): + logger.warning("Invalid scoring weights in DB, using defaults") + return dict(DEFAULT_WEIGHTS) + + +async def _save_weights(db: AsyncSession, weights: dict[str, float]) -> None: + """Persist scoring weights to SystemSetting.""" + result = await db.execute( + select(SystemSetting).where(SystemSetting.key == SCORING_WEIGHTS_KEY) + ) + setting = result.scalar_one_or_none() + now = datetime.now(timezone.utc) + if setting is not None: + setting.value = json.dumps(weights) + setting.updated_at = now + else: + setting = SystemSetting( + key=SCORING_WEIGHTS_KEY, + value=json.dumps(weights), + updated_at=now, + ) + db.add(setting) + + +# --------------------------------------------------------------------------- +# Dimension score computation +# --------------------------------------------------------------------------- + +async def _compute_technical_score(db: AsyncSession, symbol: str) -> float | None: + """Compute technical dimension score from ADX, EMA, RSI.""" + from app.services.indicator_service import ( + compute_adx, + compute_ema, + compute_rsi, + _extract_ohlcv, + ) + from app.services.price_service import query_ohlcv + + records = await query_ohlcv(db, symbol) + if not records: + return None + + _, highs, lows, closes, _ = _extract_ohlcv(records) + + scores: list[tuple[float, float]] = [] # (weight, score) + + # ADX (weight 0.4) — needs 28+ bars + try: + adx_result = compute_adx(highs, lows, closes) + scores.append((0.4, adx_result["score"])) + except Exception: + pass + + # EMA (weight 0.3) — needs period+1 bars + try: + ema_result = compute_ema(closes) + scores.append((0.3, ema_result["score"])) + except Exception: + pass + + # RSI (weight 0.3) — needs 15+ bars + try: + rsi_result = compute_rsi(closes) + scores.append((0.3, rsi_result["score"])) + except Exception: + pass + + if not scores: + return None + + total_weight = sum(w for w, _ in scores) + if total_weight == 0: + return None + weighted = sum(w * s for w, s in scores) / total_weight + return max(0.0, min(100.0, weighted)) + + +async def _compute_sr_quality_score(db: AsyncSession, symbol: str) -> float | None: + """Compute S/R quality dimension score. + + Based on number of strong levels, proximity to current price, avg strength. + """ + from app.services.price_service import query_ohlcv + from app.services.sr_service import get_sr_levels + + records = await query_ohlcv(db, symbol) + if not records: + return None + + current_price = float(records[-1].close) + if current_price <= 0: + return None + + try: + levels = await get_sr_levels(db, symbol) + except Exception: + return None + + if not levels: + return None + + # Factor 1: Number of strong levels (strength >= 50) — max 40 pts + strong_count = sum(1 for lv in levels if lv.strength >= 50) + count_score = min(40.0, strong_count * 10.0) + + # Factor 2: Proximity of nearest level to current price — max 30 pts + distances = [ + abs(lv.price_level - current_price) / current_price for lv in levels + ] + nearest_dist = min(distances) if distances else 1.0 + # Closer = higher score. 0% distance = 30, 5%+ = 0 + proximity_score = max(0.0, min(30.0, 30.0 * (1.0 - nearest_dist / 0.05))) + + # Factor 3: Average strength — max 30 pts + avg_strength = sum(lv.strength for lv in levels) / len(levels) + strength_score = min(30.0, avg_strength * 0.3) + + total = count_score + proximity_score + strength_score + return max(0.0, min(100.0, total)) + + +async def _compute_sentiment_score(db: AsyncSession, symbol: str) -> float | None: + """Compute sentiment dimension score via sentiment service.""" + from app.services.sentiment_service import compute_sentiment_dimension_score + + try: + return await compute_sentiment_dimension_score(db, symbol) + except Exception: + return None + + +async def _compute_fundamental_score(db: AsyncSession, symbol: str) -> float | None: + """Compute fundamental dimension score. + + Normalized composite of P/E (lower is better), revenue growth + (higher is better), earnings surprise (higher is better). + """ + from app.services.fundamental_service import get_fundamental + + fund = await get_fundamental(db, symbol) + if fund is None: + return None + + scores: list[float] = [] + + # P/E: lower is better. 0-15 = 100, 15-30 = 50-100, 30+ = 0-50 + if fund.pe_ratio is not None and fund.pe_ratio > 0: + pe_score = max(0.0, min(100.0, 100.0 - (fund.pe_ratio - 15.0) * (100.0 / 30.0))) + scores.append(pe_score) + + # Revenue growth: higher is better. 0% = 50, 20%+ = 100, -20% = 0 + if fund.revenue_growth is not None: + rg_score = max(0.0, min(100.0, 50.0 + fund.revenue_growth * 2.5)) + scores.append(rg_score) + + # Earnings surprise: higher is better. 0% = 50, 10%+ = 100, -10% = 0 + if fund.earnings_surprise is not None: + es_score = max(0.0, min(100.0, 50.0 + fund.earnings_surprise * 5.0)) + scores.append(es_score) + + if not scores: + return None + + return sum(scores) / len(scores) + + +async def _compute_momentum_score(db: AsyncSession, symbol: str) -> float | None: + """Compute momentum dimension score. + + Rate of change of price over 5-day and 20-day lookback periods. + """ + from app.services.price_service import query_ohlcv + + records = await query_ohlcv(db, symbol) + if not records or len(records) < 6: + return None + + closes = [float(r.close) for r in records] + latest = closes[-1] + + scores: list[tuple[float, float]] = [] # (weight, score) + + # 5-day ROC (weight 0.5) + if len(closes) >= 6 and closes[-6] > 0: + roc_5 = (latest - closes[-6]) / closes[-6] * 100.0 + # Map: -10% → 0, 0% → 50, +10% → 100 + score_5 = max(0.0, min(100.0, 50.0 + roc_5 * 5.0)) + scores.append((0.5, score_5)) + + # 20-day ROC (weight 0.5) + if len(closes) >= 21 and closes[-21] > 0: + roc_20 = (latest - closes[-21]) / closes[-21] * 100.0 + score_20 = max(0.0, min(100.0, 50.0 + roc_20 * 5.0)) + scores.append((0.5, score_20)) + + if not scores: + return None + + total_weight = sum(w for w, _ in scores) + if total_weight == 0: + return None + weighted = sum(w * s for w, s in scores) / total_weight + return max(0.0, min(100.0, weighted)) + + +_DIMENSION_COMPUTERS = { + "technical": _compute_technical_score, + "sr_quality": _compute_sr_quality_score, + "sentiment": _compute_sentiment_score, + "fundamental": _compute_fundamental_score, + "momentum": _compute_momentum_score, +} + + +# --------------------------------------------------------------------------- +# Public API +# --------------------------------------------------------------------------- + +async def compute_dimension_score( + db: AsyncSession, symbol: str, dimension: str +) -> float | None: + """Compute a single dimension score for a ticker. + + Returns the score (0-100) or None if insufficient data. + Persists the result to the DimensionScore table. + """ + if dimension not in _DIMENSION_COMPUTERS: + raise ValidationError( + f"Unknown dimension: {dimension}. Valid: {', '.join(DIMENSIONS)}" + ) + + ticker = await _get_ticker(db, symbol) + score_val = await _DIMENSION_COMPUTERS[dimension](db, symbol) + + now = datetime.now(timezone.utc) + + # Upsert dimension score + result = await db.execute( + select(DimensionScore).where( + DimensionScore.ticker_id == ticker.id, + DimensionScore.dimension == dimension, + ) + ) + existing = result.scalar_one_or_none() + + if score_val is not None: + score_val = max(0.0, min(100.0, score_val)) + + if existing is not None: + if score_val is not None: + existing.score = score_val + existing.is_stale = False + existing.computed_at = now + else: + # Can't compute — mark stale + existing.is_stale = True + elif score_val is not None: + dim = DimensionScore( + ticker_id=ticker.id, + dimension=dimension, + score=score_val, + is_stale=False, + computed_at=now, + ) + db.add(dim) + + return score_val + + +async def compute_all_dimensions( + db: AsyncSession, symbol: str +) -> dict[str, float | None]: + """Compute all dimension scores for a ticker. Returns dimension → score map.""" + results: dict[str, float | None] = {} + for dim in DIMENSIONS: + results[dim] = await compute_dimension_score(db, symbol, dim) + return results + + +async def compute_composite_score( + db: AsyncSession, + symbol: str, + weights: dict[str, float] | None = None, +) -> tuple[float | None, list[str]]: + """Compute composite score from available dimension scores. + + Returns (composite_score, missing_dimensions). + Missing dimensions are excluded and weights re-normalized. + """ + ticker = await _get_ticker(db, symbol) + + if weights is None: + weights = await _get_weights(db) + + # Get current dimension scores + result = await db.execute( + select(DimensionScore).where(DimensionScore.ticker_id == ticker.id) + ) + dim_scores = {ds.dimension: ds for ds in result.scalars().all()} + + available: list[tuple[str, float, float]] = [] # (dim, weight, score) + missing: list[str] = [] + + for dim in DIMENSIONS: + w = weights.get(dim, 0.0) + if w <= 0: + continue + ds = dim_scores.get(dim) + if ds is not None and not ds.is_stale and ds.score is not None: + available.append((dim, w, ds.score)) + else: + missing.append(dim) + + if not available: + return None, missing + + # Re-normalize weights + total_weight = sum(w for _, w, _ in available) + if total_weight == 0: + return None, missing + + composite = sum(w * s for _, w, s in available) / total_weight + composite = max(0.0, min(100.0, composite)) + + # Persist composite score + now = datetime.now(timezone.utc) + comp_result = await db.execute( + select(CompositeScore).where(CompositeScore.ticker_id == ticker.id) + ) + existing = comp_result.scalar_one_or_none() + + if existing is not None: + existing.score = composite + existing.is_stale = False + existing.weights_json = json.dumps(weights) + existing.computed_at = now + else: + comp = CompositeScore( + ticker_id=ticker.id, + score=composite, + is_stale=False, + weights_json=json.dumps(weights), + computed_at=now, + ) + db.add(comp) + + return composite, missing + + +async def get_score( + db: AsyncSession, symbol: str +) -> dict: + """Get composite + all dimension scores for a ticker. + + Recomputes stale dimensions on demand, then recomputes composite. + Returns a dict suitable for ScoreResponse. + """ + ticker = await _get_ticker(db, symbol) + weights = await _get_weights(db) + + # Check for stale dimension scores and recompute them + result = await db.execute( + select(DimensionScore).where(DimensionScore.ticker_id == ticker.id) + ) + dim_scores = {ds.dimension: ds for ds in result.scalars().all()} + + for dim in DIMENSIONS: + ds = dim_scores.get(dim) + if ds is None or ds.is_stale: + await compute_dimension_score(db, symbol, dim) + + # Check composite staleness + comp_result = await db.execute( + select(CompositeScore).where(CompositeScore.ticker_id == ticker.id) + ) + comp = comp_result.scalar_one_or_none() + + if comp is None or comp.is_stale: + await compute_composite_score(db, symbol, weights) + + await db.commit() + + # Re-fetch everything fresh + result = await db.execute( + select(DimensionScore).where(DimensionScore.ticker_id == ticker.id) + ) + dim_scores_list = list(result.scalars().all()) + + comp_result = await db.execute( + select(CompositeScore).where(CompositeScore.ticker_id == ticker.id) + ) + comp = comp_result.scalar_one_or_none() + + dimensions = [] + missing = [] + for dim in DIMENSIONS: + found = next((ds for ds in dim_scores_list if ds.dimension == dim), None) + if found is not None: + dimensions.append({ + "dimension": found.dimension, + "score": found.score, + "is_stale": found.is_stale, + "computed_at": found.computed_at, + }) + else: + missing.append(dim) + + return { + "symbol": ticker.symbol, + "composite_score": comp.score if comp else None, + "composite_stale": comp.is_stale if comp else False, + "weights": weights, + "dimensions": dimensions, + "missing_dimensions": missing, + "computed_at": comp.computed_at if comp else None, + } + + +async def get_rankings(db: AsyncSession) -> dict: + """Get all tickers ranked by composite score descending. + + Returns dict suitable for RankingResponse. + """ + weights = await _get_weights(db) + + # Get all tickers + result = await db.execute(select(Ticker).order_by(Ticker.symbol)) + tickers = list(result.scalars().all()) + + rankings: list[dict] = [] + for ticker in tickers: + # Get composite score + comp_result = await db.execute( + select(CompositeScore).where(CompositeScore.ticker_id == ticker.id) + ) + comp = comp_result.scalar_one_or_none() + + # If no composite or stale, recompute + if comp is None or comp.is_stale: + # Recompute stale dimensions first + dim_result = await db.execute( + select(DimensionScore).where( + DimensionScore.ticker_id == ticker.id + ) + ) + dim_scores = {ds.dimension: ds for ds in dim_result.scalars().all()} + for dim in DIMENSIONS: + ds = dim_scores.get(dim) + if ds is None or ds.is_stale: + await compute_dimension_score(db, ticker.symbol, dim) + + await compute_composite_score(db, ticker.symbol, weights) + + await db.commit() + + # Re-fetch + comp_result = await db.execute( + select(CompositeScore).where(CompositeScore.ticker_id == ticker.id) + ) + comp = comp_result.scalar_one_or_none() + if comp is None: + continue + + dim_result = await db.execute( + select(DimensionScore).where( + DimensionScore.ticker_id == ticker.id + ) + ) + dims = [ + { + "dimension": ds.dimension, + "score": ds.score, + "is_stale": ds.is_stale, + "computed_at": ds.computed_at, + } + for ds in dim_result.scalars().all() + ] + + rankings.append({ + "symbol": ticker.symbol, + "composite_score": comp.score, + "dimensions": dims, + }) + + # Sort by composite score descending + rankings.sort(key=lambda r: r["composite_score"], reverse=True) + + return { + "rankings": rankings, + "weights": weights, + } + + +async def update_weights( + db: AsyncSession, weights: dict[str, float] +) -> dict[str, float]: + """Update scoring weights and recompute all composite scores. + + Validates that all weights are positive and dimensions are valid. + Returns the new weights. + """ + # Validate + for dim, w in weights.items(): + if dim not in DIMENSIONS: + raise ValidationError( + f"Unknown dimension: {dim}. Valid: {', '.join(DIMENSIONS)}" + ) + if w < 0: + raise ValidationError(f"Weight for {dim} must be non-negative, got {w}") + + # Ensure all dimensions have a weight (default 0 for unspecified) + full_weights = {dim: weights.get(dim, 0.0) for dim in DIMENSIONS} + + # Persist + await _save_weights(db, full_weights) + + # Recompute all composite scores + result = await db.execute(select(Ticker)) + tickers = list(result.scalars().all()) + + for ticker in tickers: + await compute_composite_score(db, ticker.symbol, full_weights) + + await db.commit() + return full_weights diff --git a/app/services/sentiment_service.py b/app/services/sentiment_service.py new file mode 100644 index 0000000..7a45d28 --- /dev/null +++ b/app/services/sentiment_service.py @@ -0,0 +1,131 @@ +"""Sentiment service. + +Stores sentiment records and computes the sentiment dimension score +using a time-decay weighted average over a configurable lookback window. +""" + +from __future__ import annotations + +import math +from datetime import datetime, timedelta, timezone + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import NotFoundError +from app.models.sentiment import SentimentScore +from app.models.ticker import Ticker + + +async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker: + """Look up a ticker by symbol.""" + normalised = symbol.strip().upper() + result = await db.execute(select(Ticker).where(Ticker.symbol == normalised)) + ticker = result.scalar_one_or_none() + if ticker is None: + raise NotFoundError(f"Ticker not found: {normalised}") + return ticker + + +async def store_sentiment( + db: AsyncSession, + symbol: str, + classification: str, + confidence: int, + source: str, + timestamp: datetime | None = None, +) -> SentimentScore: + """Store a new sentiment record for a ticker.""" + ticker = await _get_ticker(db, symbol) + + if timestamp is None: + timestamp = datetime.now(timezone.utc) + + record = SentimentScore( + ticker_id=ticker.id, + classification=classification, + confidence=confidence, + source=source, + timestamp=timestamp, + ) + db.add(record) + await db.commit() + await db.refresh(record) + return record + + +async def get_sentiment_scores( + db: AsyncSession, + symbol: str, + lookback_hours: float = 24, +) -> list[SentimentScore]: + """Get recent sentiment records within the lookback window.""" + ticker = await _get_ticker(db, symbol) + cutoff = datetime.now(timezone.utc) - timedelta(hours=lookback_hours) + + result = await db.execute( + select(SentimentScore) + .where( + SentimentScore.ticker_id == ticker.id, + SentimentScore.timestamp >= cutoff, + ) + .order_by(SentimentScore.timestamp.desc()) + ) + return list(result.scalars().all()) + + +def _classification_to_base_score(classification: str, confidence: int) -> float: + """Map classification + confidence to a base score (0-100). + + bullish → confidence (high confidence = high score) + bearish → 100 - confidence (high confidence bearish = low score) + neutral → 50 + """ + cl = classification.lower() + if cl == "bullish": + return float(confidence) + elif cl == "bearish": + return float(100 - confidence) + else: + return 50.0 + + +async def compute_sentiment_dimension_score( + db: AsyncSession, + symbol: str, + lookback_hours: float = 24, + decay_rate: float = 0.1, +) -> float | None: + """Compute the sentiment dimension score using time-decay weighted average. + + Returns a score in [0, 100] or None if no scores exist in the window. + + Algorithm: + 1. For each score in the lookback window, compute base_score from + classification + confidence. + 2. Apply time decay: weight = exp(-decay_rate * hours_since_score). + 3. Weighted average: sum(base_score * weight) / sum(weight). + """ + scores = await get_sentiment_scores(db, symbol, lookback_hours) + if not scores: + return None + + now = datetime.now(timezone.utc) + weighted_sum = 0.0 + weight_total = 0.0 + + for score in scores: + ts = score.timestamp + if ts.tzinfo is None: + ts = ts.replace(tzinfo=timezone.utc) + hours_since = (now - ts).total_seconds() / 3600.0 + weight = math.exp(-decay_rate * hours_since) + base = _classification_to_base_score(score.classification, score.confidence) + weighted_sum += base * weight + weight_total += weight + + if weight_total == 0: + return None + + result = weighted_sum / weight_total + return max(0.0, min(100.0, result)) diff --git a/app/services/sr_service.py b/app/services/sr_service.py new file mode 100644 index 0000000..e7d3b62 --- /dev/null +++ b/app/services/sr_service.py @@ -0,0 +1,274 @@ +"""S/R Detector service. + +Detects support/resistance levels from Volume Profile (HVN/LVN) and +Pivot Points (swing highs/lows), assigns strength scores, merges nearby +levels, tags as support/resistance, and persists to DB. +""" + +from __future__ import annotations + +from datetime import datetime + +from sqlalchemy import delete, select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import NotFoundError, ValidationError +from app.models.sr_level import SRLevel +from app.models.ticker import Ticker +from app.services.indicator_service import ( + _extract_ohlcv, + compute_pivot_points, + compute_volume_profile, +) +from app.services.price_service import query_ohlcv + +DEFAULT_TOLERANCE = 0.005 # 0.5% + + +async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker: + """Look up a ticker by symbol.""" + normalised = symbol.strip().upper() + result = await db.execute(select(Ticker).where(Ticker.symbol == normalised)) + ticker = result.scalar_one_or_none() + if ticker is None: + raise NotFoundError(f"Ticker not found: {normalised}") + return ticker + + +def _count_price_touches( + price_level: float, + highs: list[float], + lows: list[float], + closes: list[float], + tolerance: float = DEFAULT_TOLERANCE, +) -> int: + """Count how many bars touched/respected a price level within tolerance.""" + count = 0 + tol = price_level * tolerance if price_level != 0 else tolerance + for i in range(len(closes)): + # A bar "touches" the level if the level is within the bar's range + # (within tolerance) + if lows[i] - tol <= price_level <= highs[i] + tol: + count += 1 + return count + + +def _strength_from_touches(touches: int, total_bars: int) -> int: + """Convert touch count to a 0-100 strength score. + + More touches relative to total bars = higher strength. + Cap at 100. + """ + if total_bars == 0: + return 0 + # Scale: each touch contributes proportionally, with a multiplier + # so that a level touched ~20% of bars gets score ~100 + raw = (touches / total_bars) * 500.0 + return max(0, min(100, int(round(raw)))) + + +def _extract_candidate_levels( + highs: list[float], + lows: list[float], + closes: list[float], + volumes: list[int], +) -> list[tuple[float, str]]: + """Extract candidate S/R levels from Volume Profile and Pivot Points. + + Returns list of (price_level, detection_method) tuples. + """ + candidates: list[tuple[float, str]] = [] + + # Volume Profile: HVN and LVN as candidate levels + try: + vp = compute_volume_profile(highs, lows, closes, volumes) + for price in vp.get("hvn", []): + candidates.append((price, "volume_profile")) + for price in vp.get("lvn", []): + candidates.append((price, "volume_profile")) + except ValidationError: + pass # Not enough data for volume profile + + # Pivot Points: swing highs and lows + try: + pp = compute_pivot_points(highs, lows, closes) + for price in pp.get("swing_highs", []): + candidates.append((price, "pivot_point")) + for price in pp.get("swing_lows", []): + candidates.append((price, "pivot_point")) + except ValidationError: + pass # Not enough data for pivot points + + return candidates + + +def _merge_levels( + levels: list[dict], + tolerance: float = DEFAULT_TOLERANCE, +) -> list[dict]: + """Merge levels within tolerance into consolidated levels. + + Levels from different methods within tolerance are merged. + Merged levels combine strength scores (capped at 100) and get + detection_method = "merged". + """ + if not levels: + return [] + + # Sort by price + sorted_levels = sorted(levels, key=lambda x: x["price_level"]) + merged: list[dict] = [] + + for level in sorted_levels: + if not merged: + merged.append(dict(level)) + continue + + last = merged[-1] + ref_price = last["price_level"] + tol = ref_price * tolerance if ref_price != 0 else tolerance + + if abs(level["price_level"] - ref_price) <= tol: + # Merge: average price, combine strength, mark as merged + combined_strength = min(100, last["strength"] + level["strength"]) + avg_price = (last["price_level"] + level["price_level"]) / 2.0 + method = ( + "merged" + if last["detection_method"] != level["detection_method"] + else last["detection_method"] + ) + last["price_level"] = round(avg_price, 4) + last["strength"] = combined_strength + last["detection_method"] = method + else: + merged.append(dict(level)) + + return merged + + +def _tag_levels( + levels: list[dict], + current_price: float, +) -> list[dict]: + """Tag each level as 'support' or 'resistance' relative to current price.""" + for level in levels: + if level["price_level"] < current_price: + level["type"] = "support" + else: + level["type"] = "resistance" + return levels + + +def detect_sr_levels( + highs: list[float], + lows: list[float], + closes: list[float], + volumes: list[int], + tolerance: float = DEFAULT_TOLERANCE, +) -> list[dict]: + """Detect, score, merge, and tag S/R levels from OHLCV data. + + Returns list of dicts with keys: price_level, type, strength, + detection_method — sorted by strength descending. + """ + if not closes: + return [] + + candidates = _extract_candidate_levels(highs, lows, closes, volumes) + if not candidates: + return [] + + total_bars = len(closes) + current_price = closes[-1] + + # Build level dicts with strength scores + raw_levels: list[dict] = [] + for price, method in candidates: + touches = _count_price_touches(price, highs, lows, closes, tolerance) + strength = _strength_from_touches(touches, total_bars) + raw_levels.append({ + "price_level": price, + "strength": strength, + "detection_method": method, + "type": "", # will be tagged after merge + }) + + # Merge nearby levels + merged = _merge_levels(raw_levels, tolerance) + + # Tag as support/resistance + tagged = _tag_levels(merged, current_price) + + # Sort by strength descending + tagged.sort(key=lambda x: x["strength"], reverse=True) + + return tagged + + +async def recalculate_sr_levels( + db: AsyncSession, + symbol: str, + tolerance: float = DEFAULT_TOLERANCE, +) -> list[SRLevel]: + """Recalculate S/R levels for a ticker and persist to DB. + + 1. Fetch OHLCV data + 2. Detect levels + 3. Delete old levels for ticker + 4. Insert new levels + 5. Return new levels sorted by strength desc + """ + ticker = await _get_ticker(db, symbol) + + records = await query_ohlcv(db, symbol) + if not records: + # No OHLCV data — clear any existing levels + await db.execute( + delete(SRLevel).where(SRLevel.ticker_id == ticker.id) + ) + await db.commit() + return [] + + _, highs, lows, closes, volumes = _extract_ohlcv(records) + + levels = detect_sr_levels(highs, lows, closes, volumes, tolerance) + + # Delete old levels + await db.execute( + delete(SRLevel).where(SRLevel.ticker_id == ticker.id) + ) + + # Insert new levels + now = datetime.utcnow() + new_models: list[SRLevel] = [] + for lvl in levels: + model = SRLevel( + ticker_id=ticker.id, + price_level=lvl["price_level"], + type=lvl["type"], + strength=lvl["strength"], + detection_method=lvl["detection_method"], + created_at=now, + ) + db.add(model) + new_models.append(model) + + await db.commit() + + # Refresh to get IDs + for m in new_models: + await db.refresh(m) + + return new_models + + +async def get_sr_levels( + db: AsyncSession, + symbol: str, + tolerance: float = DEFAULT_TOLERANCE, +) -> list[SRLevel]: + """Get S/R levels for a ticker, recalculating on every request (MVP). + + Returns levels sorted by strength descending. + """ + return await recalculate_sr_levels(db, symbol, tolerance) diff --git a/app/services/ticker_service.py b/app/services/ticker_service.py new file mode 100644 index 0000000..e083253 --- /dev/null +++ b/app/services/ticker_service.py @@ -0,0 +1,57 @@ +"""Ticker Registry service: add, delete, and list tracked tickers.""" + +import re + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import DuplicateError, NotFoundError, ValidationError +from app.models.ticker import Ticker + + +async def add_ticker(db: AsyncSession, symbol: str) -> Ticker: + """Add a new ticker after validation. + + Validates: non-empty, uppercase alphanumeric. Auto-uppercases input. + Raises DuplicateError if symbol already tracked. + """ + stripped = symbol.strip() + if not stripped: + raise ValidationError("Ticker symbol must not be empty or whitespace-only") + + normalised = stripped.upper() + if not re.fullmatch(r"[A-Z0-9]+", normalised): + raise ValidationError( + f"Ticker symbol must be alphanumeric: {normalised}" + ) + + result = await db.execute(select(Ticker).where(Ticker.symbol == normalised)) + if result.scalar_one_or_none() is not None: + raise DuplicateError(f"Ticker already exists: {normalised}") + + ticker = Ticker(symbol=normalised) + db.add(ticker) + await db.commit() + await db.refresh(ticker) + return ticker + + +async def delete_ticker(db: AsyncSession, symbol: str) -> None: + """Delete a ticker and cascade all associated data. + + Raises NotFoundError if the symbol is not tracked. + """ + normalised = symbol.strip().upper() + result = await db.execute(select(Ticker).where(Ticker.symbol == normalised)) + ticker = result.scalar_one_or_none() + if ticker is None: + raise NotFoundError(f"Ticker not found: {normalised}") + + await db.delete(ticker) + await db.commit() + + +async def list_tickers(db: AsyncSession) -> list[Ticker]: + """Return all tracked tickers sorted alphabetically by symbol.""" + result = await db.execute(select(Ticker).order_by(Ticker.symbol.asc())) + return list(result.scalars().all()) diff --git a/app/services/watchlist_service.py b/app/services/watchlist_service.py new file mode 100644 index 0000000..a8fa4a8 --- /dev/null +++ b/app/services/watchlist_service.py @@ -0,0 +1,288 @@ +"""Watchlist service. + +Auto-populates top-X tickers by composite score (default 10), supports +manual add/remove (tagged, not subject to auto-population), enforces +cap (auto + 10 manual, default max 20), and updates auto entries on +score recomputation. +""" + +from __future__ import annotations + +import logging +from datetime import datetime, timezone + +from sqlalchemy import delete, func, select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.exceptions import DuplicateError, NotFoundError, ValidationError +from app.models.score import CompositeScore, DimensionScore +from app.models.sr_level import SRLevel +from app.models.ticker import Ticker +from app.models.trade_setup import TradeSetup +from app.models.watchlist import WatchlistEntry + +logger = logging.getLogger(__name__) + +DEFAULT_AUTO_SIZE = 10 +MAX_MANUAL = 10 + + +async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker: + normalised = symbol.strip().upper() + result = await db.execute(select(Ticker).where(Ticker.symbol == normalised)) + ticker = result.scalar_one_or_none() + if ticker is None: + raise NotFoundError(f"Ticker not found: {normalised}") + return ticker + + +async def auto_populate( + db: AsyncSession, + user_id: int, + top_x: int = DEFAULT_AUTO_SIZE, +) -> None: + """Auto-populate watchlist with top-X tickers by composite score. + + Replaces existing auto entries. Manual entries are untouched. + """ + # Get top-X tickers by composite score (non-stale, descending) + stmt = ( + select(CompositeScore) + .where(CompositeScore.is_stale == False) # noqa: E712 + .order_by(CompositeScore.score.desc()) + .limit(top_x) + ) + result = await db.execute(stmt) + top_scores = list(result.scalars().all()) + top_ticker_ids = {cs.ticker_id for cs in top_scores} + + # Delete existing auto entries for this user + await db.execute( + delete(WatchlistEntry).where( + WatchlistEntry.user_id == user_id, + WatchlistEntry.entry_type == "auto", + ) + ) + + # Get manual ticker_ids so we don't duplicate + manual_result = await db.execute( + select(WatchlistEntry.ticker_id).where( + WatchlistEntry.user_id == user_id, + WatchlistEntry.entry_type == "manual", + ) + ) + manual_ticker_ids = {row[0] for row in manual_result.all()} + + now = datetime.now(timezone.utc) + for ticker_id in top_ticker_ids: + if ticker_id in manual_ticker_ids: + continue # Already on watchlist as manual + entry = WatchlistEntry( + user_id=user_id, + ticker_id=ticker_id, + entry_type="auto", + added_at=now, + ) + db.add(entry) + + await db.flush() + + +async def add_manual_entry( + db: AsyncSession, + user_id: int, + symbol: str, +) -> WatchlistEntry: + """Add a manual watchlist entry. + + Raises DuplicateError if already on watchlist. + Raises ValidationError if manual cap exceeded. + """ + ticker = await _get_ticker(db, symbol) + + # Check if already on watchlist + existing = await db.execute( + select(WatchlistEntry).where( + WatchlistEntry.user_id == user_id, + WatchlistEntry.ticker_id == ticker.id, + ) + ) + if existing.scalar_one_or_none() is not None: + raise DuplicateError(f"Ticker already on watchlist: {ticker.symbol}") + + # Count current manual entries + count_result = await db.execute( + select(func.count()).select_from(WatchlistEntry).where( + WatchlistEntry.user_id == user_id, + WatchlistEntry.entry_type == "manual", + ) + ) + manual_count = count_result.scalar() or 0 + + if manual_count >= MAX_MANUAL: + raise ValidationError( + f"Manual watchlist cap reached ({MAX_MANUAL}). " + "Remove an entry before adding a new one." + ) + + # Check total cap + total_result = await db.execute( + select(func.count()).select_from(WatchlistEntry).where( + WatchlistEntry.user_id == user_id, + ) + ) + total_count = total_result.scalar() or 0 + max_total = DEFAULT_AUTO_SIZE + MAX_MANUAL + + if total_count >= max_total: + raise ValidationError( + f"Watchlist cap reached ({max_total}). " + "Remove an entry before adding a new one." + ) + + entry = WatchlistEntry( + user_id=user_id, + ticker_id=ticker.id, + entry_type="manual", + added_at=datetime.now(timezone.utc), + ) + db.add(entry) + await db.commit() + await db.refresh(entry) + return entry + + +async def remove_entry( + db: AsyncSession, + user_id: int, + symbol: str, +) -> None: + """Remove a watchlist entry (manual or auto).""" + ticker = await _get_ticker(db, symbol) + + result = await db.execute( + select(WatchlistEntry).where( + WatchlistEntry.user_id == user_id, + WatchlistEntry.ticker_id == ticker.id, + ) + ) + entry = result.scalar_one_or_none() + if entry is None: + raise NotFoundError(f"Ticker not on watchlist: {ticker.symbol}") + + await db.delete(entry) + await db.commit() + + +async def _enrich_entry( + db: AsyncSession, + entry: WatchlistEntry, + symbol: str, +) -> dict: + """Build enriched watchlist entry dict with scores, R:R, and SR levels.""" + ticker_id = entry.ticker_id + + # Composite score + comp_result = await db.execute( + select(CompositeScore).where(CompositeScore.ticker_id == ticker_id) + ) + comp = comp_result.scalar_one_or_none() + + # Dimension scores + dim_result = await db.execute( + select(DimensionScore).where(DimensionScore.ticker_id == ticker_id) + ) + dims = [ + {"dimension": ds.dimension, "score": ds.score} + for ds in dim_result.scalars().all() + ] + + # Best trade setup (highest R:R) for this ticker + setup_result = await db.execute( + select(TradeSetup) + .where(TradeSetup.ticker_id == ticker_id) + .order_by(TradeSetup.rr_ratio.desc()) + .limit(1) + ) + setup = setup_result.scalar_one_or_none() + + # Active SR levels + sr_result = await db.execute( + select(SRLevel) + .where(SRLevel.ticker_id == ticker_id) + .order_by(SRLevel.strength.desc()) + ) + sr_levels = [ + { + "price_level": lv.price_level, + "type": lv.type, + "strength": lv.strength, + } + for lv in sr_result.scalars().all() + ] + + return { + "symbol": symbol, + "entry_type": entry.entry_type, + "composite_score": comp.score if comp else None, + "dimensions": dims, + "rr_ratio": setup.rr_ratio if setup else None, + "rr_direction": setup.direction if setup else None, + "sr_levels": sr_levels, + "added_at": entry.added_at, + } + + +async def get_watchlist( + db: AsyncSession, + user_id: int, + sort_by: str = "composite", +) -> list[dict]: + """Get user's watchlist with enriched data. + + Runs auto_populate first to ensure auto entries are current, + then enriches each entry with scores, R:R, and SR levels. + + sort_by: "composite", "rr", or a dimension name + (e.g. "technical", "sr_quality", "sentiment", "fundamental", "momentum"). + """ + # Auto-populate to refresh auto entries + await auto_populate(db, user_id) + await db.commit() + + # Fetch all entries with ticker symbol + stmt = ( + select(WatchlistEntry, Ticker.symbol) + .join(Ticker, WatchlistEntry.ticker_id == Ticker.id) + .where(WatchlistEntry.user_id == user_id) + ) + result = await db.execute(stmt) + rows = result.all() + + entries: list[dict] = [] + for entry, symbol in rows: + enriched = await _enrich_entry(db, entry, symbol) + entries.append(enriched) + + # Sort + if sort_by == "composite": + entries.sort( + key=lambda e: e["composite_score"] if e["composite_score"] is not None else -1, + reverse=True, + ) + elif sort_by == "rr": + entries.sort( + key=lambda e: e["rr_ratio"] if e["rr_ratio"] is not None else -1, + reverse=True, + ) + else: + # Sort by a specific dimension score + def _dim_sort_key(e: dict) -> float: + for d in e["dimensions"]: + if d["dimension"] == sort_by: + return d["score"] + return -1.0 + + entries.sort(key=_dim_sort_key, reverse=True) + + return entries diff --git a/deploy/nginx.conf b/deploy/nginx.conf new file mode 100644 index 0000000..c4835aa --- /dev/null +++ b/deploy/nginx.conf @@ -0,0 +1,30 @@ +# Nginx reverse proxy configuration for stock-data-backend +# Domain: signal.thiessen.io → localhost:8000 (uvicorn) +# +# Installation: +# sudo cp deploy/nginx.conf /etc/nginx/sites-available/stock-data-backend +# sudo ln -s /etc/nginx/sites-available/stock-data-backend /etc/nginx/sites-enabled/ +# sudo nginx -t && sudo systemctl reload nginx +# +# SSL/TLS (recommended): +# sudo apt install certbot python3-certbot-nginx +# sudo certbot --nginx -d signal.thiessen.io +# Certbot will automatically modify this file to add SSL directives. + +server { + listen 80; + server_name signal.thiessen.io; + + # Redirect all HTTP to HTTPS (uncomment after certbot setup) + # return 301 https://$host$request_uri; + + location / { + proxy_pass http://127.0.0.1:8000; + proxy_set_header Host $host; + proxy_set_header X-Real-IP $remote_addr; + proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + proxy_set_header X-Forwarded-Proto $scheme; + proxy_read_timeout 120s; + proxy_connect_timeout 10s; + } +} diff --git a/deploy/setup_db.sh b/deploy/setup_db.sh new file mode 100755 index 0000000..080e9eb --- /dev/null +++ b/deploy/setup_db.sh @@ -0,0 +1,44 @@ +#!/bin/bash +# Idempotent database setup script for stock-data-backend +# Creates PostgreSQL user and database if they don't exist, then runs migrations. +# Safe to run multiple times. +# +# Usage: +# chmod +x deploy/setup_db.sh +# ./deploy/setup_db.sh +# +# Customize these via environment variables: +# DB_NAME=stock_data_backend DB_USER=stock_backend DB_PASS=changeme ./deploy/setup_db.sh + +set -e + +DB_NAME="${DB_NAME:-stock_data_backend}" +DB_USER="${DB_USER:-stock_backend}" +DB_PASS="${DB_PASS:-changeme}" + +echo "Setting up database: ${DB_NAME} with user: ${DB_USER}" + +# Create role and database if they don't exist +sudo -u postgres psql < + + + + + Signal Dashboard + + + + + +
+ + + diff --git a/frontend/package-lock.json b/frontend/package-lock.json new file mode 100644 index 0000000..aaf6ebc --- /dev/null +++ b/frontend/package-lock.json @@ -0,0 +1,3392 @@ +{ + "name": "signal-dashboard", + "version": "0.1.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "signal-dashboard", + "version": "0.1.0", + "dependencies": { + "@tanstack/react-query": "^5.62.0", + "axios": "^1.7.9", + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-router-dom": "^6.28.0", + "recharts": "^2.14.1", + "zustand": "^5.0.1" + }, + "devDependencies": { + "@types/react": "^18.3.12", + "@types/react-dom": "^18.3.1", + "@vitejs/plugin-react": "^4.3.4", + "autoprefixer": "^10.4.20", + "postcss": "^8.4.49", + "tailwindcss": "^3.4.16", + "typescript": "~5.6.3", + "vite": "^5.4.11" + } + }, + "node_modules/@alloc/quick-lru": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/@alloc/quick-lru/-/quick-lru-5.2.0.tgz", + "integrity": "sha512-UrcABB+4bUrFABwbluTIBErXwvbsU/V7TZWfmbgJfbkwiBuziS9gxdODUyuiecfdGQ85jglMW6juS3+z5TsKLw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.29.0.tgz", + "integrity": "sha512-9NhCeYjq9+3uxgdtp20LSiJXJvN0FeCtNGpJxuMFZ1Kv3cWUNb6DOhJwUvcVCzKGR66cw4njwM6hrJLqgOwbcw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.28.5", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.29.0.tgz", + "integrity": "sha512-T1NCJqT/j9+cn8fvkt7jtwbLBfLC/1y1c7NtCeXFRgzGTsafi68MRv8yzkYSapBnFA6L3U2VSc02ciDzoAJhJg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.29.0.tgz", + "integrity": "sha512-CGOfOJqWjg2qW/Mb6zNsDm+u5vFQ8DxXfbM09z69p5Z6+mE1ikP2jUXw+j42Pf1XTYED2Rni5f95npYeuwMDQA==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-compilation-targets": "^7.28.6", + "@babel/helper-module-transforms": "^7.28.6", + "@babel/helpers": "^7.28.6", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/traverse": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/remapping": "^2.3.5", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/generator": { + "version": "7.29.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.29.1.tgz", + "integrity": "sha512-qsaF+9Qcm2Qv8SRIMMscAvG4O3lJ0F1GuMo5HR/Bp02LopNgnZBC/EkbevHFeGs4ls/oPz9v+Bsmzbkbe+0dUw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.29.0", + "@babel/types": "^7.29.0", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.28.6.tgz", + "integrity": "sha512-JYtls3hqi15fcx5GaSNL7SCTJ2MNmjrkHXg4FSpOA/grxK8KwyZ5bubHsCq8FXCkua6xhuaaBit+3b7+VZRfcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.28.6", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@babel/helper-globals/-/helper-globals-7.28.0.tgz", + "integrity": "sha512-+W6cISkXFa1jXsDEdYA8HeevQT/FULhxzR99pxphltZcVaugps53THCeiWA8SguxxpSp3gKPiuYfSWopkLQ4hw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.28.6.tgz", + "integrity": "sha512-l5XkZK7r7wa9LucGw9LwZyyCUscb4x37JWTPz7swwFE/0FMQAGpiWUZn8u9DzkSBWEcK25jmvubfpw2dnAMdbw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.28.6.tgz", + "integrity": "sha512-67oXFAYr2cDLDVGLXTEABjdBJZ6drElUSI7WKp70NrpyISso3plG9SAGEF6y7zbha/wOzUByWWTJvEDVNIUGcA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.28.6", + "@babel/helper-validator-identifier": "^7.28.5", + "@babel/traverse": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.28.6.tgz", + "integrity": "sha512-S9gzZ/bz83GRysI7gAD4wPT/AI3uCnY+9xn+Mx/KPs2JwHJIz1W8PZkg2cqyt3RNOBM8ejcXhV6y8Og7ly/Dug==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.27.1.tgz", + "integrity": "sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.28.5", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.28.5.tgz", + "integrity": "sha512-qSs4ifwzKJSV39ucNjsvc6WVHs6b7S03sOh2OcHF9UHfVPqWWALUsNUVzhSBiItjRZoLHx7nIarVjqKVusUZ1Q==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.27.1.tgz", + "integrity": "sha512-YvjJow9FxbhFFKDSuFnVCe2WxXk1zWc22fFePVNEaWJEu8IrZVlda6N0uHwzZrUM1il7NC9Mlp4MaJYbYd9JSg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helpers": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.6.tgz", + "integrity": "sha512-xOBvwq86HHdB7WUDTfKfT/Vuxh7gElQ+Sfti2Cy6yIWNW05P8iUslOVcZ4/sKbE+/jQaukQAdz/gf3724kYdqw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/parser": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.29.0.tgz", + "integrity": "sha512-IyDgFV5GeDUVX4YdF/3CPULtVGSXXMLh1xVIgdCgxApktqnQV0r7/8Nqthg+8YLGaAtdyIlo2qIdZrbCv4+7ww==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.29.0" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-self": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-self/-/plugin-transform-react-jsx-self-7.27.1.tgz", + "integrity": "sha512-6UzkCs+ejGdZ5mFFC/OCUrv028ab2fp1znZmCZjAOBKiBK2jXD1O+BPSfX8X2qjJ75fZBMSnQn3Rq2mrBJK2mw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-react-jsx-source": { + "version": "7.27.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-react-jsx-source/-/plugin-transform-react-jsx-source-7.27.1.tgz", + "integrity": "sha512-zbwoTsBruTeKB9hSq73ha66iFeJHuaFkUbwvqElnygoNbj/jHRsSeokowZFN3CZ64IvEqcmmkVe89OPXc7ldAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/runtime": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.28.6.tgz", + "integrity": "sha512-05WQkdpL9COIMz4LjTxGpPNCdlpyimKppYNoJ5Di5EUObifl8t4tuLuUBBZEpoLYOmfvIWrsp9fCl0HoPRVTdA==", + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/template": { + "version": "7.28.6", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.28.6.tgz", + "integrity": "sha512-YA6Ma2KsCdGb+WC6UpBVFJGXL58MDA6oyONbjyF/+5sBgxY/dwkhLogbMT2GXXyU84/IhRw/2D1Os1B/giz+BQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.28.6", + "@babel/parser": "^7.28.6", + "@babel/types": "^7.28.6" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.29.0.tgz", + "integrity": "sha512-4HPiQr0X7+waHfyXPZpWPfWL/J7dcN1mx9gL6WdQVMbPnF3+ZhSMs8tCxN7oHddJE9fhNE7+lxdnlyemKfJRuA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.29.0", + "@babel/generator": "^7.29.0", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.29.0", + "@babel/template": "^7.28.6", + "@babel/types": "^7.29.0", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.29.0", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.29.0.tgz", + "integrity": "sha512-LwdZHpScM4Qz8Xw2iKSzS+cfglZzJGvofQICy7W7v4caru4EaAmyUuO6BGrbyQ2mYV11W0U8j5mBhd14dd3B0A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.28.5" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", + "integrity": "sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/remapping": { + "version": "2.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/remapping/-/remapping-2.3.5.tgz", + "integrity": "sha512-LI9u/+laYG4Ds1TDKSJW2YPrIlcVYOwi2fUC6xB43lueCjgxV4lffOCZCtYFiH6TNOX+tQKXx97T4IKHbhyHEQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz", + "integrity": "sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og==", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz", + "integrity": "sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@remix-run/router": { + "version": "1.23.2", + "resolved": "https://registry.npmjs.org/@remix-run/router/-/router-1.23.2.tgz", + "integrity": "sha512-Ic6m2U/rMjTkhERIa/0ZtXJP17QUi2CbWE7cqx4J58M8aA3QTfW+2UlQ4psvTX9IO1RfNVhK3pcpdjej7L+t2w==", + "license": "MIT", + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.27", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.27.tgz", + "integrity": "sha512-+d0F4MKMCbeVUJwG96uQ4SgAznZNSq93I3V+9NHA4OpvqG8mRCpGdKmK8l/dl02h2CCDHwW2FqilnTyDcAnqjA==", + "dev": true, + "license": "MIT" + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.57.1.tgz", + "integrity": "sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.57.1.tgz", + "integrity": "sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.57.1.tgz", + "integrity": "sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.57.1.tgz", + "integrity": "sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.57.1.tgz", + "integrity": "sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.57.1.tgz", + "integrity": "sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.57.1.tgz", + "integrity": "sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.57.1.tgz", + "integrity": "sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.57.1.tgz", + "integrity": "sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.57.1.tgz", + "integrity": "sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.57.1.tgz", + "integrity": "sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-loong64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-musl/-/rollup-linux-loong64-musl-4.57.1.tgz", + "integrity": "sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==", + "cpu": [ + "loong64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.57.1.tgz", + "integrity": "sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-ppc64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-musl/-/rollup-linux-ppc64-musl-4.57.1.tgz", + "integrity": "sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.57.1.tgz", + "integrity": "sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.57.1.tgz", + "integrity": "sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.57.1.tgz", + "integrity": "sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==", + "cpu": [ + "s390x" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.57.1.tgz", + "integrity": "sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.57.1.tgz", + "integrity": "sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-openbsd-x64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openbsd-x64/-/rollup-openbsd-x64-4.57.1.tgz", + "integrity": "sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openbsd" + ] + }, + "node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.57.1.tgz", + "integrity": "sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.57.1.tgz", + "integrity": "sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.57.1.tgz", + "integrity": "sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==", + "cpu": [ + "ia32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.57.1.tgz", + "integrity": "sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.57.1.tgz", + "integrity": "sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@tanstack/query-core": { + "version": "5.90.20", + "resolved": "https://registry.npmjs.org/@tanstack/query-core/-/query-core-5.90.20.tgz", + "integrity": "sha512-OMD2HLpNouXEfZJWcKeVKUgQ5n+n3A2JFmBaScpNDUqSrQSjiveC7dKMe53uJUg1nDG16ttFPz2xfilz6i2uVg==", + "license": "MIT", + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + } + }, + "node_modules/@tanstack/react-query": { + "version": "5.90.21", + "resolved": "https://registry.npmjs.org/@tanstack/react-query/-/react-query-5.90.21.tgz", + "integrity": "sha512-0Lu6y5t+tvlTJMTO7oh5NSpJfpg/5D41LlThfepTixPYkJ0sE2Jj0m0f6yYqujBwIXlId87e234+MxG3D3g7kg==", + "license": "MIT", + "dependencies": { + "@tanstack/query-core": "5.90.20" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/tannerlinsley" + }, + "peerDependencies": { + "react": "^18 || ^19" + } + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.27.0", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.27.0.tgz", + "integrity": "sha512-ufFd2Xi92OAVPYsy+P4n7/U7e68fex0+Ee8gSG9KX7eo084CWiQ4sdxktvdl0bOPupXtVJPY19zk6EwWqUQ8lg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.28.0", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.28.0.tgz", + "integrity": "sha512-8PvcXf70gTDZBgt9ptxJ8elBeBjcLOAcOtoO/mPJjtji1+CdGbHgm77om1GrsPxsiE+uXIpNSK64UYaIwQXd4Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.28.2" + } + }, + "node_modules/@types/d3-array": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/@types/d3-array/-/d3-array-3.2.2.tgz", + "integrity": "sha512-hOLWVbm7uRza0BYXpIIW5pxfrKe0W+D5lrFiAEYR+pb6w3N2SwSMaJbXdUfSEv+dT4MfHBLtn5js0LAWaO6otw==", + "license": "MIT" + }, + "node_modules/@types/d3-color": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/@types/d3-color/-/d3-color-3.1.3.tgz", + "integrity": "sha512-iO90scth9WAbmgv7ogoq57O9YpKmFBbmoEoCHDB2xMBY0+/KVrqAaCDyCE16dUspeOvIxFFRI+0sEtqDqy2b4A==", + "license": "MIT" + }, + "node_modules/@types/d3-ease": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-ease/-/d3-ease-3.0.2.tgz", + "integrity": "sha512-NcV1JjO5oDzoK26oMzbILE6HW7uVXOHLQvHshBUW4UMdZGfiY6v5BeQwh9a9tCzv+CeefZQHJt5SRgK154RtiA==", + "license": "MIT" + }, + "node_modules/@types/d3-interpolate": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-interpolate/-/d3-interpolate-3.0.4.tgz", + "integrity": "sha512-mgLPETlrpVV1YRJIglr4Ez47g7Yxjl1lj7YKsiMCb27VJH9W8NVM6Bb9d8kkpG/uAQS5AmbA48q2IAolKKo1MA==", + "license": "MIT", + "dependencies": { + "@types/d3-color": "*" + } + }, + "node_modules/@types/d3-path": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/@types/d3-path/-/d3-path-3.1.1.tgz", + "integrity": "sha512-VMZBYyQvbGmWyWVea0EHs/BwLgxc+MKi1zLDCONksozI4YJMcTt8ZEuIR4Sb1MMTE8MMW49v0IwI5+b7RmfWlg==", + "license": "MIT" + }, + "node_modules/@types/d3-scale": { + "version": "4.0.9", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.9.tgz", + "integrity": "sha512-dLmtwB8zkAeO/juAMfnV+sItKjlsw2lKdZVVy6LRr0cBmegxSABiLEpGVmSJJ8O08i4+sGR6qQtb6WtuwJdvVw==", + "license": "MIT", + "dependencies": { + "@types/d3-time": "*" + } + }, + "node_modules/@types/d3-shape": { + "version": "3.1.8", + "resolved": "https://registry.npmjs.org/@types/d3-shape/-/d3-shape-3.1.8.tgz", + "integrity": "sha512-lae0iWfcDeR7qt7rA88BNiqdvPS5pFVPpo5OfjElwNaT2yyekbM0C9vK+yqBqEmHr6lDkRnYNoTBYlAgJa7a4w==", + "license": "MIT", + "dependencies": { + "@types/d3-path": "*" + } + }, + "node_modules/@types/d3-time": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.4.tgz", + "integrity": "sha512-yuzZug1nkAAaBlBBikKZTgzCeA+k1uy4ZFwWANOfKw5z5LRhV0gNA7gNkKm7HoK+HRN0wX3EkxGk0fpbWhmB7g==", + "license": "MIT" + }, + "node_modules/@types/d3-timer": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/@types/d3-timer/-/d3-timer-3.0.2.tgz", + "integrity": "sha512-Ps3T8E8dZDam6fUyNiMkekK3XUsaUEik+idO9/YjPtfj2qruF8tFBXS7XhtE4iIXBLxhmLjP3SXpLhVf21I9Lw==", + "license": "MIT" + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz", + "integrity": "sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/prop-types": { + "version": "15.7.15", + "resolved": "https://registry.npmjs.org/@types/prop-types/-/prop-types-15.7.15.tgz", + "integrity": "sha512-F6bEyamV9jKGAFBEmlQnesRPGOQqS2+Uwi0Em15xenOxHaf2hv6L8YCVn3rPdPJOiJfPiCnLIRyvwVaqMY3MIw==", + "devOptional": true, + "license": "MIT" + }, + "node_modules/@types/react": { + "version": "18.3.28", + "resolved": "https://registry.npmjs.org/@types/react/-/react-18.3.28.tgz", + "integrity": "sha512-z9VXpC7MWrhfWipitjNdgCauoMLRdIILQsAEV+ZesIzBq/oUlxk0m3ApZuMFCXdnS4U7KrI+l3WRUEGQ8K1QKw==", + "devOptional": true, + "license": "MIT", + "peer": true, + "dependencies": { + "@types/prop-types": "*", + "csstype": "^3.2.2" + } + }, + "node_modules/@types/react-dom": { + "version": "18.3.7", + "resolved": "https://registry.npmjs.org/@types/react-dom/-/react-dom-18.3.7.tgz", + "integrity": "sha512-MEe3UeoENYVFXzoXEWsvcpg6ZvlrFNlOQ7EOsvhI3CfAXwzPfO8Qwuxd40nepsYKqyyVQnTdEfv68q91yLcKrQ==", + "dev": true, + "license": "MIT", + "peerDependencies": { + "@types/react": "^18.0.0" + } + }, + "node_modules/@vitejs/plugin-react": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-react/-/plugin-react-4.7.0.tgz", + "integrity": "sha512-gUu9hwfWvvEDBBmgtAowQCojwZmJ5mcLn3aufeCsitijs3+f2NsrPtlAWIR6OPiqljl96GVCUbLe0HyqIpVaoA==", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/core": "^7.28.0", + "@babel/plugin-transform-react-jsx-self": "^7.27.1", + "@babel/plugin-transform-react-jsx-source": "^7.27.1", + "@rolldown/pluginutils": "1.0.0-beta.27", + "@types/babel__core": "^7.20.5", + "react-refresh": "^0.17.0" + }, + "engines": { + "node": "^14.18.0 || >=16.0.0" + }, + "peerDependencies": { + "vite": "^4.2.0 || ^5.0.0 || ^6.0.0 || ^7.0.0" + } + }, + "node_modules/any-promise": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/any-promise/-/any-promise-1.3.0.tgz", + "integrity": "sha512-7UvmKalWRt1wgjL1RrGxoSJW/0QZFIegpeGvZG9kjp8vrRu55XTHbwnqq2GpXm9uLbcuhxm3IqX9OB4MZR1b2A==", + "dev": true, + "license": "MIT" + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "dev": true, + "license": "MIT" + }, + "node_modules/asynckit": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", + "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==", + "license": "MIT" + }, + "node_modules/autoprefixer": { + "version": "10.4.24", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.24.tgz", + "integrity": "sha512-uHZg7N9ULTVbutaIsDRoUkoS8/h3bdsmVJYZ5l3wv8Cp/6UIIoRDm90hZ+BwxUj/hGBEzLxdHNSKuFpn8WOyZw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "browserslist": "^4.28.1", + "caniuse-lite": "^1.0.30001766", + "fraction.js": "^5.3.4", + "picocolors": "^1.1.1", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/axios": { + "version": "1.13.5", + "resolved": "https://registry.npmjs.org/axios/-/axios-1.13.5.tgz", + "integrity": "sha512-cz4ur7Vb0xS4/KUN0tPWe44eqxrIu31me+fbang3ijiNscE129POzipJJA6zniq2C/Z6sJCjMimjS8Lc/GAs8Q==", + "license": "MIT", + "dependencies": { + "follow-redirects": "^1.15.11", + "form-data": "^4.0.5", + "proxy-from-env": "^1.1.0" + } + }, + "node_modules/baseline-browser-mapping": { + "version": "2.10.0", + "resolved": "https://registry.npmjs.org/baseline-browser-mapping/-/baseline-browser-mapping-2.10.0.tgz", + "integrity": "sha512-lIyg0szRfYbiy67j9KN8IyeD7q7hcmqnJ1ddWmNt19ItGpNN64mnllmxUNFIOdOm6by97jlL6wfpTTJrmnjWAA==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.cjs" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz", + "integrity": "sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.28.1", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.28.1.tgz", + "integrity": "sha512-ZC5Bd0LgJXgwGqUknZY/vkUQ04r8NXnJZ3yYi4vDmSiZmC/pdSN0NbNRPxZpbtO4uAfDUAFffO8IZoM3Gj8IkA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "peer": true, + "dependencies": { + "baseline-browser-mapping": "^2.9.0", + "caniuse-lite": "^1.0.30001759", + "electron-to-chromium": "^1.5.263", + "node-releases": "^2.0.27", + "update-browserslist-db": "^1.2.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz", + "integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/camelcase-css": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/camelcase-css/-/camelcase-css-2.0.1.tgz", + "integrity": "sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001770", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001770.tgz", + "integrity": "sha512-x/2CLQ1jHENRbHg5PSId2sXq1CIO1CISvwWAj027ltMVG2UNgW+w9oH2+HzgEIRFembL8bUlXtfbBHR1fCg2xw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "CC-BY-4.0" + }, + "node_modules/chokidar": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz", + "integrity": "sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==", + "dev": true, + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/clsx": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz", + "integrity": "sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==", + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/combined-stream": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", + "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", + "license": "MIT", + "dependencies": { + "delayed-stream": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/commander": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/commander/-/commander-4.1.1.tgz", + "integrity": "sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true, + "license": "MIT" + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "license": "MIT", + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/csstype": { + "version": "3.2.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.2.3.tgz", + "integrity": "sha512-z1HGKcYy2xA8AGQfwrn0PAy+PB7X/GSj3UVJW9qKyn43xWa+gl5nXmU4qqLMRzWVLFC8KusUX8T/0kCiOYpAIQ==", + "license": "MIT" + }, + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "license": "ISC", + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "license": "BSD-3-Clause", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-format": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.2.tgz", + "integrity": "sha512-AJDdYOdnyRDV5b6ArilzCPPwc1ejkHcoyFarqlPqT7zRYjhavcT3uSrqcMvsgh2CgoPbK3RCwyHaVyxYcP2Arg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "license": "ISC", + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "license": "ISC", + "dependencies": { + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "license": "ISC", + "dependencies": { + "d3-path": "^3.1.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "license": "ISC", + "dependencies": { + "d3-array": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "license": "ISC", + "dependencies": { + "d3-time": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.3.tgz", + "integrity": "sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA==", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decimal.js-light": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/decimal.js-light/-/decimal.js-light-2.5.1.tgz", + "integrity": "sha512-qIMFpTMZmny+MMIitAB6D7iVPEorVw6YQRWkvarTkT4tBeSLLiHzcwj6q0MmYSFCiVpiqPJTJEYIrpcPzVEIvg==", + "license": "MIT" + }, + "node_modules/delayed-stream": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", + "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", + "license": "MIT", + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/didyoumean": { + "version": "1.2.2", + "resolved": "https://registry.npmjs.org/didyoumean/-/didyoumean-1.2.2.tgz", + "integrity": "sha512-gxtyfqMg7GKyhQmb056K7M3xszy/myH8w+B4RT+QXBQsvAOdc3XymqDDPHx1BgPgsdAA5SIifona89YtRATDzw==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/dlv": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/dlv/-/dlv-1.1.3.tgz", + "integrity": "sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA==", + "dev": true, + "license": "MIT" + }, + "node_modules/dom-helpers": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/dom-helpers/-/dom-helpers-5.2.1.tgz", + "integrity": "sha512-nRCa7CK3VTrM2NmGkIy4cbK7IZlgBE/PYMn55rrXefr5xXDP0LdtfPnblFDoVdcAfslJ7or6iqAUnx0CCGIWQA==", + "license": "MIT", + "dependencies": { + "@babel/runtime": "^7.8.7", + "csstype": "^3.0.2" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz", + "integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.302", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.302.tgz", + "integrity": "sha512-sM6HAN2LyK82IyPBpznDRqlTQAtuSaO+ShzFiWTvoMJLHyZ+Y39r8VMfHzwbU8MVBzQ4Wdn85+wlZl2TLGIlwg==", + "dev": true, + "license": "ISC" + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz", + "integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz", + "integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz", + "integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-set-tostringtag": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz", + "integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==", + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.6", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/eventemitter3": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/eventemitter3/-/eventemitter3-4.0.7.tgz", + "integrity": "sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw==", + "license": "MIT" + }, + "node_modules/fast-equals": { + "version": "5.4.0", + "resolved": "https://registry.npmjs.org/fast-equals/-/fast-equals-5.4.0.tgz", + "integrity": "sha512-jt2DW/aNFNwke7AUd+Z+e6pz39KO5rzdbbFCg2sGafS4mk13MI7Z8O5z9cADNn5lhGODIgLwug6TZO2ctf7kcw==", + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/fast-glob": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz", + "integrity": "sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg==", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.8" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-glob/node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fastq": { + "version": "1.20.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.20.1.tgz", + "integrity": "sha512-GGToxJ/w1x32s/D2EKND7kTil4n8OVk/9mycTc4VDza13lOvpUZTGX3mFSCtV9ksdGBVzvsyAVLM6mHFThxXxw==", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/follow-redirects": { + "version": "1.15.11", + "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz", + "integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==", + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/form-data": { + "version": "4.0.5", + "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.5.tgz", + "integrity": "sha512-8RipRLol37bNs2bhoV67fiTEvdTrbMUYcFTiy3+wuuOnUog2QBHCZWXDRijWQfAkhBj2Uf5UnVaiWwA5vdd82w==", + "license": "MIT", + "dependencies": { + "asynckit": "^0.4.0", + "combined-stream": "^1.0.8", + "es-set-tostringtag": "^2.1.0", + "hasown": "^2.0.2", + "mime-types": "^2.1.12" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/fraction.js": { + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/fraction.js/-/fraction.js-5.3.4.tgz", + "integrity": "sha512-1X1NTtiJphryn/uLQz3whtY6jK3fTqoE3ohKs0tT+Ujr1W59oopxmoEh7Lu5p6vBaPbgoM0bzveAW4Qi5RyWDQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz", + "integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==", + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz", + "integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==", + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/glob-parent": { + "version": "6.0.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz", + "integrity": "sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz", + "integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz", + "integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz", + "integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==", + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz", + "integrity": "sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==", + "dev": true, + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.16.1.tgz", + "integrity": "sha512-UfoeMA6fIJ8wTYFEUjelnaGI67v6+N7qXJEvQuIGa99l4xsCruSYOVSQ0uPANn4dAzm8lkYPaKLrrijLq7x23w==", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/jiti": { + "version": "1.21.7", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", + "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", + "dev": true, + "license": "MIT", + "peer": true, + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", + "license": "MIT" + }, + "node_modules/jsesc": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.1.0.tgz", + "integrity": "sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json5": { + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/lilconfig": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-3.1.3.tgz", + "integrity": "sha512-/vlFKAoH5Cgt3Ie+JLhRbwOsCQePABiU3tJ1egGvyQ+33R/vcwM2Zl2QR/LzjsBeItPt3oSVXapn+m4nQDvpzw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/antonk52" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "resolved": "https://registry.npmjs.org/lines-and-columns/-/lines-and-columns-1.2.4.tgz", + "integrity": "sha512-7ylylesZQ/PV29jhEDl3Ufjo6ZX7gCqJr5F7PKrqc93v7fzSymt1BpwEU8nAUXs8qzzvqhbjhK5QZg6Mt/HkBg==", + "dev": true, + "license": "MIT" + }, + "node_modules/lodash": { + "version": "4.17.23", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz", + "integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==", + "license": "MIT" + }, + "node_modules/loose-envify": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", + "integrity": "sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==", + "license": "MIT", + "dependencies": { + "js-tokens": "^3.0.0 || ^4.0.0" + }, + "bin": { + "loose-envify": "cli.js" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz", + "integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==", + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", + "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", + "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/mz": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/mz/-/mz-2.7.0.tgz", + "integrity": "sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0", + "object-assign": "^4.0.1", + "thenify-all": "^1.0.0" + } + }, + "node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/node-releases": { + "version": "2.0.27", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.27.tgz", + "integrity": "sha512-nmh3lCkYZ3grZvqcCH+fjmQ7X+H0OeZgP40OierEaAptX4XofMh5kwNbWh7lBduUzCcV/8kZ+NDLCwm2iorIlA==", + "dev": true, + "license": "MIT" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", + "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/object-hash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/object-hash/-/object-hash-3.0.0.tgz", + "integrity": "sha512-RSn9F68PjH9HqtltsSnqYC1XXoWe9Bju5+213R98cNGttag9q9yAOTzdbsqvIa7aNm5WffBZFpWYr2aWrklWAw==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==", + "dev": true, + "license": "MIT" + }, + "node_modules/picocolors": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz", + "integrity": "sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/pirates": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.7.tgz", + "integrity": "sha512-TfySrs/5nm8fQJDcBDuUng3VOUKsd7S+zqvbOTiGXHfxX4wK31ard+hoNuvkicM/2YFzlpDgABOevKSsB4G/FA==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6" + } + }, + "node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "peer": true, + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-import": { + "version": "15.1.0", + "resolved": "https://registry.npmjs.org/postcss-import/-/postcss-import-15.1.0.tgz", + "integrity": "sha512-hpr+J05B2FVYUAXHeK1YyI267J/dDDhMU6B6civm8hSY1jYJnBXxzKDKDswzJmtLHryrjhnDjqqp/49t8FALew==", + "dev": true, + "license": "MIT", + "dependencies": { + "postcss-value-parser": "^4.0.0", + "read-cache": "^1.0.0", + "resolve": "^1.1.7" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "postcss": "^8.0.0" + } + }, + "node_modules/postcss-js": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/postcss-js/-/postcss-js-4.1.0.tgz", + "integrity": "sha512-oIAOTqgIo7q2EOwbhb8UalYePMvYoIeRY2YKntdpFQXNosSu3vLrniGgmH9OKs/qAkfoj5oB3le/7mINW1LCfw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "camelcase-css": "^2.0.1" + }, + "engines": { + "node": "^12 || ^14 || >= 16" + }, + "peerDependencies": { + "postcss": "^8.4.21" + } + }, + "node_modules/postcss-load-config": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/postcss-load-config/-/postcss-load-config-6.0.1.tgz", + "integrity": "sha512-oPtTM4oerL+UXmx+93ytZVN82RrlY/wPUV8IeDxFrzIjXOLF1pN+EmKPLbubvKHT2HC20xXsCAH2Z+CKV6Oz/g==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "lilconfig": "^3.1.1" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "jiti": ">=1.21.0", + "postcss": ">=8.0.9", + "tsx": "^4.8.1", + "yaml": "^2.4.2" + }, + "peerDependenciesMeta": { + "jiti": { + "optional": true + }, + "postcss": { + "optional": true + }, + "tsx": { + "optional": true + }, + "yaml": { + "optional": true + } + } + }, + "node_modules/postcss-nested": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/postcss-nested/-/postcss-nested-6.2.0.tgz", + "integrity": "sha512-HQbt28KulC5AJzG+cZtj9kvKB93CFCdLvog1WFLf1D+xmMvPGlBstkpTEZfK5+AN9hfJocyBFCNiqyS48bpgzQ==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "postcss-selector-parser": "^6.1.1" + }, + "engines": { + "node": ">=12.0" + }, + "peerDependencies": { + "postcss": "^8.2.14" + } + }, + "node_modules/postcss-selector-parser": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-6.1.2.tgz", + "integrity": "sha512-Q8qQfPiZ+THO/3ZrOrO0cJJKfpYCagtMUkXbnEfmgUjwXg6z/WBeOyS9APBBPCTSiDV+s4SwQGu8yFsiMRIudg==", + "dev": true, + "license": "MIT", + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/postcss-value-parser/-/postcss-value-parser-4.2.0.tgz", + "integrity": "sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==", + "dev": true, + "license": "MIT" + }, + "node_modules/prop-types": { + "version": "15.8.1", + "resolved": "https://registry.npmjs.org/prop-types/-/prop-types-15.8.1.tgz", + "integrity": "sha512-oj87CgZICdulUohogVAR7AjlC0327U4el4L6eAvOqCeudMDVU0NThNaV+b9Df4dXgSP1gXMTnPdhfe/2qDH5cg==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.4.0", + "object-assign": "^4.1.1", + "react-is": "^16.13.1" + } + }, + "node_modules/prop-types/node_modules/react-is": { + "version": "16.13.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-16.13.1.tgz", + "integrity": "sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==", + "license": "MIT" + }, + "node_modules/proxy-from-env": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz", + "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==", + "license": "MIT" + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/react": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react/-/react-18.3.1.tgz", + "integrity": "sha512-wS+hAgJShR0KhEvPJArfuPVN1+Hz1t0Y6n5jLrGQbkb4urgPE/0Rve+1kMB1v/oWgHgm4WIcV+i7F2pTVj+2iQ==", + "license": "MIT", + "peer": true, + "dependencies": { + "loose-envify": "^1.1.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-dom": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-dom/-/react-dom-18.3.1.tgz", + "integrity": "sha512-5m4nQKp+rZRb09LNH59GM4BxTh9251/ylbKIbpe7TpGxfJ+9kv6BLkLBXIjjspbgbnIBNqlI23tRnTWT0snUIw==", + "license": "MIT", + "peer": true, + "dependencies": { + "loose-envify": "^1.1.0", + "scheduler": "^0.23.2" + }, + "peerDependencies": { + "react": "^18.3.1" + } + }, + "node_modules/react-is": { + "version": "18.3.1", + "resolved": "https://registry.npmjs.org/react-is/-/react-is-18.3.1.tgz", + "integrity": "sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==", + "license": "MIT" + }, + "node_modules/react-refresh": { + "version": "0.17.0", + "resolved": "https://registry.npmjs.org/react-refresh/-/react-refresh-0.17.0.tgz", + "integrity": "sha512-z6F7K9bV85EfseRCp2bzrpyQ0Gkw1uLoCel9XBVWPg/TjRj94SkJzUTGfOa4bs7iJvBWtQG0Wq7wnI0syw3EBQ==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/react-router": { + "version": "6.30.3", + "resolved": "https://registry.npmjs.org/react-router/-/react-router-6.30.3.tgz", + "integrity": "sha512-XRnlbKMTmktBkjCLE8/XcZFlnHvr2Ltdr1eJX4idL55/9BbORzyZEaIkBFDhFGCEWBBItsVrDxwx3gnisMitdw==", + "license": "MIT", + "dependencies": { + "@remix-run/router": "1.23.2" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "react": ">=16.8" + } + }, + "node_modules/react-router-dom": { + "version": "6.30.3", + "resolved": "https://registry.npmjs.org/react-router-dom/-/react-router-dom-6.30.3.tgz", + "integrity": "sha512-pxPcv1AczD4vso7G4Z3TKcvlxK7g7TNt3/FNGMhfqyntocvYKj+GCatfigGDjbLozC4baguJ0ReCigoDJXb0ag==", + "license": "MIT", + "dependencies": { + "@remix-run/router": "1.23.2", + "react-router": "6.30.3" + }, + "engines": { + "node": ">=14.0.0" + }, + "peerDependencies": { + "react": ">=16.8", + "react-dom": ">=16.8" + } + }, + "node_modules/react-smooth": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/react-smooth/-/react-smooth-4.0.4.tgz", + "integrity": "sha512-gnGKTpYwqL0Iii09gHobNolvX4Kiq4PKx6eWBCYYix+8cdw+cGo3do906l1NBPKkSWx1DghC1dlWG9L2uGd61Q==", + "license": "MIT", + "dependencies": { + "fast-equals": "^5.0.1", + "prop-types": "^15.8.1", + "react-transition-group": "^4.4.5" + }, + "peerDependencies": { + "react": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.8.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/react-transition-group": { + "version": "4.4.5", + "resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz", + "integrity": "sha512-pZcd1MCJoiKiBR2NRxeCRg13uCXbydPnmB4EOeRrY7480qNWO8IIgQG6zlDkm6uRMsURXPuKq0GWtiM59a5Q6g==", + "license": "BSD-3-Clause", + "dependencies": { + "@babel/runtime": "^7.5.5", + "dom-helpers": "^5.0.1", + "loose-envify": "^1.4.0", + "prop-types": "^15.6.2" + }, + "peerDependencies": { + "react": ">=16.6.0", + "react-dom": ">=16.6.0" + } + }, + "node_modules/read-cache": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/read-cache/-/read-cache-1.0.0.tgz", + "integrity": "sha512-Owdv/Ft7IjOgm/i0xvNDZ1LrRANRfew4b2prF3OWMQLxLfu3bS8FVhCsrSCMK4lR56Y9ya+AThoTpDCTxCmpRA==", + "dev": true, + "license": "MIT", + "dependencies": { + "pify": "^2.3.0" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz", + "integrity": "sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==", + "dev": true, + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/recharts": { + "version": "2.15.4", + "resolved": "https://registry.npmjs.org/recharts/-/recharts-2.15.4.tgz", + "integrity": "sha512-UT/q6fwS3c1dHbXv2uFgYJ9BMFHu3fwnd7AYZaEQhXuYQ4hgsxLvsUXzGdKeZrW5xopzDCvuA2N41WJ88I7zIw==", + "license": "MIT", + "dependencies": { + "clsx": "^2.0.0", + "eventemitter3": "^4.0.1", + "lodash": "^4.17.21", + "react-is": "^18.3.1", + "react-smooth": "^4.0.4", + "recharts-scale": "^0.4.4", + "tiny-invariant": "^1.3.1", + "victory-vendor": "^36.6.8" + }, + "engines": { + "node": ">=14" + }, + "peerDependencies": { + "react": "^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0", + "react-dom": "^16.0.0 || ^17.0.0 || ^18.0.0 || ^19.0.0" + } + }, + "node_modules/recharts-scale": { + "version": "0.4.5", + "resolved": "https://registry.npmjs.org/recharts-scale/-/recharts-scale-0.4.5.tgz", + "integrity": "sha512-kivNFO+0OcUNu7jQquLXAxz1FIwZj8nrj+YkOKc5694NbjCvcT6aSZiIzNzd2Kul4o4rTto8QVR9lMNtxD4G1w==", + "license": "MIT", + "dependencies": { + "decimal.js-light": "^2.4.1" + } + }, + "node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz", + "integrity": "sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rollup": { + "version": "4.57.1", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.57.1.tgz", + "integrity": "sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.57.1", + "@rollup/rollup-android-arm64": "4.57.1", + "@rollup/rollup-darwin-arm64": "4.57.1", + "@rollup/rollup-darwin-x64": "4.57.1", + "@rollup/rollup-freebsd-arm64": "4.57.1", + "@rollup/rollup-freebsd-x64": "4.57.1", + "@rollup/rollup-linux-arm-gnueabihf": "4.57.1", + "@rollup/rollup-linux-arm-musleabihf": "4.57.1", + "@rollup/rollup-linux-arm64-gnu": "4.57.1", + "@rollup/rollup-linux-arm64-musl": "4.57.1", + "@rollup/rollup-linux-loong64-gnu": "4.57.1", + "@rollup/rollup-linux-loong64-musl": "4.57.1", + "@rollup/rollup-linux-ppc64-gnu": "4.57.1", + "@rollup/rollup-linux-ppc64-musl": "4.57.1", + "@rollup/rollup-linux-riscv64-gnu": "4.57.1", + "@rollup/rollup-linux-riscv64-musl": "4.57.1", + "@rollup/rollup-linux-s390x-gnu": "4.57.1", + "@rollup/rollup-linux-x64-gnu": "4.57.1", + "@rollup/rollup-linux-x64-musl": "4.57.1", + "@rollup/rollup-openbsd-x64": "4.57.1", + "@rollup/rollup-openharmony-arm64": "4.57.1", + "@rollup/rollup-win32-arm64-msvc": "4.57.1", + "@rollup/rollup-win32-ia32-msvc": "4.57.1", + "@rollup/rollup-win32-x64-gnu": "4.57.1", + "@rollup/rollup-win32-x64-msvc": "4.57.1", + "fsevents": "~2.3.2" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/scheduler": { + "version": "0.23.2", + "resolved": "https://registry.npmjs.org/scheduler/-/scheduler-0.23.2.tgz", + "integrity": "sha512-UOShsPwz7NrMUqhR6t0hWjFduvOzbtv7toDH1/hIrfRNIDBnnBWd0CwJTGvTpngVlmwGCdP9/Zl/tVrDqcuYzQ==", + "license": "MIT", + "dependencies": { + "loose-envify": "^1.1.0" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.1.tgz", + "integrity": "sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/sucrase": { + "version": "3.35.1", + "resolved": "https://registry.npmjs.org/sucrase/-/sucrase-3.35.1.tgz", + "integrity": "sha512-DhuTmvZWux4H1UOnWMB3sk0sbaCVOoQZjv8u1rDoTV0HTdGem9hkAZtl4JZy8P2z4Bg0nT+YMeOFyVr4zcG5Tw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.2", + "commander": "^4.0.0", + "lines-and-columns": "^1.1.6", + "mz": "^2.7.0", + "pirates": "^4.0.1", + "tinyglobby": "^0.2.11", + "ts-interface-checker": "^0.1.9" + }, + "bin": { + "sucrase": "bin/sucrase", + "sucrase-node": "bin/sucrase-node" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/tailwindcss": { + "version": "3.4.19", + "resolved": "https://registry.npmjs.org/tailwindcss/-/tailwindcss-3.4.19.tgz", + "integrity": "sha512-3ofp+LL8E+pK/JuPLPggVAIaEuhvIz4qNcf3nA1Xn2o/7fb7s/TYpHhwGDv1ZU3PkBluUVaF8PyCHcm48cKLWQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "@alloc/quick-lru": "^5.2.0", + "arg": "^5.0.2", + "chokidar": "^3.6.0", + "didyoumean": "^1.2.2", + "dlv": "^1.1.3", + "fast-glob": "^3.3.2", + "glob-parent": "^6.0.2", + "is-glob": "^4.0.3", + "jiti": "^1.21.7", + "lilconfig": "^3.1.3", + "micromatch": "^4.0.8", + "normalize-path": "^3.0.0", + "object-hash": "^3.0.0", + "picocolors": "^1.1.1", + "postcss": "^8.4.47", + "postcss-import": "^15.1.0", + "postcss-js": "^4.0.1", + "postcss-load-config": "^4.0.2 || ^5.0 || ^6.0", + "postcss-nested": "^6.2.0", + "postcss-selector-parser": "^6.1.2", + "resolve": "^1.22.8", + "sucrase": "^3.35.0" + }, + "bin": { + "tailwind": "lib/cli.js", + "tailwindcss": "lib/cli.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/thenify": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/thenify/-/thenify-3.3.1.tgz", + "integrity": "sha512-RVZSIV5IG10Hk3enotrhvz0T9em6cyHBLkH/YAZuKqd8hRkKhSfCGIcP2KUY0EPxndzANBmNllzWPwak+bheSw==", + "dev": true, + "license": "MIT", + "dependencies": { + "any-promise": "^1.0.0" + } + }, + "node_modules/thenify-all": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/thenify-all/-/thenify-all-1.6.0.tgz", + "integrity": "sha512-RNxQH/qI8/t3thXJDwcstUO4zeqo64+Uy/+sNVRBx4Xn2OX+OZ9oP+iJnNFqplFra2ZUVeKCSa2oVWi3T4uVmA==", + "dev": true, + "license": "MIT", + "dependencies": { + "thenify": ">= 3.1.0 < 4" + }, + "engines": { + "node": ">=0.8" + } + }, + "node_modules/tiny-invariant": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/tiny-invariant/-/tiny-invariant-1.3.3.tgz", + "integrity": "sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==", + "license": "MIT" + }, + "node_modules/tinyglobby": { + "version": "0.2.15", + "resolved": "https://registry.npmjs.org/tinyglobby/-/tinyglobby-0.2.15.tgz", + "integrity": "sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "fdir": "^6.5.0", + "picomatch": "^4.0.3" + }, + "engines": { + "node": ">=12.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/SuperchupuDev" + } + }, + "node_modules/tinyglobby/node_modules/fdir": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/fdir/-/fdir-6.5.0.tgz", + "integrity": "sha512-tIbYtZbucOs0BRGqPJkshJUYdL+SDH7dVM8gjy+ERp3WAUjLEFJE+02kanyHtwjWOnwrKYBiwAmM0p4kLJAnXg==", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "picomatch": "^3 || ^4" + }, + "peerDependenciesMeta": { + "picomatch": { + "optional": true + } + } + }, + "node_modules/tinyglobby/node_modules/picomatch": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz", + "integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==", + "dev": true, + "license": "MIT", + "peer": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/ts-interface-checker": { + "version": "0.1.13", + "resolved": "https://registry.npmjs.org/ts-interface-checker/-/ts-interface-checker-0.1.13.tgz", + "integrity": "sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/typescript": { + "version": "5.6.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz", + "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==", + "dev": true, + "license": "Apache-2.0", + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.2.3.tgz", + "integrity": "sha512-Js0m9cx+qOgDxo0eMiFGEueWztz+d4+M3rGlmKPT+T4IS/jP4ylw3Nwpu6cpTTP8R1MAC1kF4VbdLt3ARf209w==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", + "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==", + "dev": true, + "license": "MIT" + }, + "node_modules/victory-vendor": { + "version": "36.9.2", + "resolved": "https://registry.npmjs.org/victory-vendor/-/victory-vendor-36.9.2.tgz", + "integrity": "sha512-PnpQQMuxlwYdocC8fIJqVXvkeViHYzotI+NJrCuav0ZYFoq912ZHBk3mCeuj+5/VpodOjPe1z0Fk2ihgzlXqjQ==", + "license": "MIT AND ISC", + "dependencies": { + "@types/d3-array": "^3.0.3", + "@types/d3-ease": "^3.0.0", + "@types/d3-interpolate": "^3.0.1", + "@types/d3-scale": "^4.0.2", + "@types/d3-shape": "^3.1.0", + "@types/d3-time": "^3.0.0", + "@types/d3-timer": "^3.0.0", + "d3-array": "^3.1.6", + "d3-ease": "^3.0.1", + "d3-interpolate": "^3.0.1", + "d3-scale": "^4.0.2", + "d3-shape": "^3.1.0", + "d3-time": "^3.0.0", + "d3-timer": "^3.0.1" + } + }, + "node_modules/vite": { + "version": "5.4.21", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==", + "dev": true, + "license": "ISC" + }, + "node_modules/zustand": { + "version": "5.0.11", + "resolved": "https://registry.npmjs.org/zustand/-/zustand-5.0.11.tgz", + "integrity": "sha512-fdZY+dk7zn/vbWNCYmzZULHRrss0jx5pPFiOuMZ/5HJN6Yv3u+1Wswy/4MpZEkEGhtNH+pwxZB8OKgUBPzYAGg==", + "license": "MIT", + "engines": { + "node": ">=12.20.0" + }, + "peerDependencies": { + "@types/react": ">=18.0.0", + "immer": ">=9.0.6", + "react": ">=18.0.0", + "use-sync-external-store": ">=1.2.0" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "immer": { + "optional": true + }, + "react": { + "optional": true + }, + "use-sync-external-store": { + "optional": true + } + } + } + } +} diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..b77dc98 --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,32 @@ +{ + "name": "signal-dashboard", + "private": true, + "version": "0.1.0", + "type": "module", + "scripts": { + "dev": "vite", + "build": "tsc -b && vite build", + "preview": "vite preview", + "test": "vitest --run", + "test:watch": "vitest" + }, + "dependencies": { + "react": "^18.3.1", + "react-dom": "^18.3.1", + "react-router-dom": "^6.28.0", + "@tanstack/react-query": "^5.62.0", + "zustand": "^5.0.1", + "axios": "^1.7.9", + "recharts": "^2.14.1" + }, + "devDependencies": { + "@types/react": "^18.3.12", + "@types/react-dom": "^18.3.1", + "@vitejs/plugin-react": "^4.3.4", + "typescript": "~5.6.3", + "vite": "^5.4.11", + "tailwindcss": "^3.4.16", + "postcss": "^8.4.49", + "autoprefixer": "^10.4.20" + } +} diff --git a/frontend/postcss.config.js b/frontend/postcss.config.js new file mode 100644 index 0000000..2aa7205 --- /dev/null +++ b/frontend/postcss.config.js @@ -0,0 +1,6 @@ +export default { + plugins: { + tailwindcss: {}, + autoprefixer: {}, + }, +}; diff --git a/frontend/src/App.tsx b/frontend/src/App.tsx new file mode 100644 index 0000000..3c2ea7f --- /dev/null +++ b/frontend/src/App.tsx @@ -0,0 +1,31 @@ +import { Routes, Route, Navigate } from 'react-router-dom'; +import ProtectedRoute from './components/auth/ProtectedRoute'; +import AppShell from './components/layout/AppShell'; +import LoginPage from './pages/LoginPage'; +import RegisterPage from './pages/RegisterPage'; +import WatchlistPage from './pages/WatchlistPage'; +import TickerDetailPage from './pages/TickerDetailPage'; +import ScannerPage from './pages/ScannerPage'; +import RankingsPage from './pages/RankingsPage'; +import AdminPage from './pages/AdminPage'; + +export default function App() { + return ( + + } /> + } /> + }> + }> + } /> + } /> + } /> + } /> + } /> + }> + } /> + + + + + ); +} diff --git a/frontend/src/api/admin.ts b/frontend/src/api/admin.ts new file mode 100644 index 0000000..45dee1d --- /dev/null +++ b/frontend/src/api/admin.ts @@ -0,0 +1,83 @@ +import apiClient from './client'; +import type { AdminUser, SystemSetting } from '../lib/types'; + +// Users +export function listUsers() { + return apiClient.get('admin/users').then((r) => r.data); +} + +export function createUser(data: { + username: string; + password: string; + role: string; + has_access: boolean; +}) { + return apiClient.post('admin/users', data).then((r) => r.data); +} + +export function updateAccess(userId: number, hasAccess: boolean) { + return apiClient + .put<{ message: string }>(`admin/users/${userId}/access`, { + has_access: hasAccess, + }) + .then((r) => r.data); +} + +export function resetPassword(userId: number, password: string) { + return apiClient + .put<{ message: string }>(`admin/users/${userId}/password`, { password }) + .then((r) => r.data); +} + +// Settings +export function listSettings() { + return apiClient + .get('admin/settings') + .then((r) => r.data); +} + +export function updateSetting(key: string, value: string) { + return apiClient + .put<{ message: string }>(`admin/settings/${key}`, { value }) + .then((r) => r.data); +} + +export function updateRegistration(enabled: boolean) { + return apiClient + .put<{ message: string }>('admin/settings/registration', { enabled }) + .then((r) => r.data); +} + +// Jobs +export interface JobStatus { + name: string; + label: string; + enabled: boolean; + next_run_at: string | null; + registered: boolean; +} + +export function listJobs() { + return apiClient.get('admin/jobs').then((r) => r.data); +} + +export function toggleJob(jobName: string, enabled: boolean) { + return apiClient + .put<{ message: string }>(`admin/jobs/${jobName}/toggle`, { enabled }) + .then((r) => r.data); +} + +export function triggerJob(jobName: string) { + return apiClient + .post<{ message: string }>(`admin/jobs/${jobName}/trigger`) + .then((r) => r.data); +} + +// Data cleanup +export function cleanupData(olderThanDays: number) { + return apiClient + .post<{ message: string }>('admin/data/cleanup', { + older_than_days: olderThanDays, + }) + .then((r) => r.data); +} diff --git a/frontend/src/api/auth.ts b/frontend/src/api/auth.ts new file mode 100644 index 0000000..136afbe --- /dev/null +++ b/frontend/src/api/auth.ts @@ -0,0 +1,14 @@ +import apiClient from './client'; +import type { TokenResponse } from '../lib/types'; + +export function login(username: string, password: string) { + return apiClient + .post('auth/login', { username, password }) + .then((r) => r.data); +} + +export function register(username: string, password: string) { + return apiClient + .post<{ message: string }>('auth/register', { username, password }) + .then((r) => r.data); +} diff --git a/frontend/src/api/client.ts b/frontend/src/api/client.ts new file mode 100644 index 0000000..5204007 --- /dev/null +++ b/frontend/src/api/client.ts @@ -0,0 +1,69 @@ +import axios from 'axios'; +import type { APIEnvelope } from '../lib/types'; +import { useAuthStore } from '../stores/authStore'; + +/** + * Typed error class for API errors, providing structured error handling + * across the application. + */ +export class ApiError extends Error { + constructor(message: string) { + super(message); + this.name = 'ApiError'; + } +} + +/** + * Central Axios instance configured for the Stock Data Backend API. + * - Base URL: /api/v1/ + * - Timeout: 30 seconds + * - JSON content type + */ +const apiClient = axios.create({ + baseURL: '/api/v1/', + timeout: 30_000, + headers: { 'Content-Type': 'application/json' }, +}); + +/** + * Request interceptor: attaches JWT Bearer token from the auth store + * to every outgoing request when a token is available. + */ +apiClient.interceptors.request.use((config) => { + const token = useAuthStore.getState().token; + if (token) { + config.headers.Authorization = `Bearer ${token}`; + } + return config; +}); + +/** + * Response interceptor: + * - Success path: unwraps the { status, data, error } envelope, returning + * only the `data` field. Throws ApiError if envelope status is 'error'. + * - Error path: handles 401 by clearing auth and redirecting to login. + * All other errors are wrapped in ApiError with a descriptive message. + */ +apiClient.interceptors.response.use( + (response) => { + const envelope = response.data as APIEnvelope; + if (envelope.status === 'error') { + throw new ApiError(envelope.error ?? 'Unknown API error'); + } + // Return unwrapped data — callers receive the inner payload directly. + // We override the response shape here; downstream API functions cast as needed. + response.data = envelope.data; + return response; + }, + (error) => { + if (axios.isAxiosError(error) && error.response?.status === 401) { + useAuthStore.getState().logout(); + window.location.href = '/login'; + } + const msg = + error.response?.data?.error ?? error.message ?? 'Network error'; + throw new ApiError(msg); + }, +); + +export default apiClient; diff --git a/frontend/src/api/fundamentals.ts b/frontend/src/api/fundamentals.ts new file mode 100644 index 0000000..95599a4 --- /dev/null +++ b/frontend/src/api/fundamentals.ts @@ -0,0 +1,8 @@ +import apiClient from './client'; +import type { FundamentalResponse } from '../lib/types'; + +export function getFundamentals(symbol: string) { + return apiClient + .get(`fundamentals/${symbol}`) + .then((r) => r.data); +} diff --git a/frontend/src/api/health.ts b/frontend/src/api/health.ts new file mode 100644 index 0000000..c29b1ad --- /dev/null +++ b/frontend/src/api/health.ts @@ -0,0 +1,5 @@ +import apiClient from './client'; + +export function check() { + return apiClient.get<{ status: string }>('health').then((r) => r.data); +} diff --git a/frontend/src/api/indicators.ts b/frontend/src/api/indicators.ts new file mode 100644 index 0000000..b5518bf --- /dev/null +++ b/frontend/src/api/indicators.ts @@ -0,0 +1,24 @@ +import apiClient from './client'; +import type { IndicatorResult, EMACrossResult } from '../lib/types'; + +interface IndicatorEnvelopeData { + symbol: string; + indicator: IndicatorResult; +} + +interface EMACrossEnvelopeData { + symbol: string; + ema_cross: EMACrossResult; +} + +export function getIndicator(symbol: string, indicatorType: string) { + return apiClient + .get(`indicators/${symbol}/${indicatorType}`) + .then((r) => (r.data as unknown as IndicatorEnvelopeData).indicator); +} + +export function getEMACross(symbol: string) { + return apiClient + .get(`indicators/${symbol}/ema-cross`) + .then((r) => (r.data as unknown as EMACrossEnvelopeData).ema_cross); +} diff --git a/frontend/src/api/ingestion.ts b/frontend/src/api/ingestion.ts new file mode 100644 index 0000000..771111f --- /dev/null +++ b/frontend/src/api/ingestion.ts @@ -0,0 +1,7 @@ +import apiClient from './client'; + +export function fetchData(symbol: string) { + return apiClient + .post<{ message: string }>(`ingestion/fetch/${symbol}`) + .then((r) => r.data); +} diff --git a/frontend/src/api/ohlcv.ts b/frontend/src/api/ohlcv.ts new file mode 100644 index 0000000..fa204a7 --- /dev/null +++ b/frontend/src/api/ohlcv.ts @@ -0,0 +1,6 @@ +import apiClient from './client'; +import type { OHLCVBar } from '../lib/types'; + +export function getOHLCV(symbol: string) { + return apiClient.get(`ohlcv/${symbol}`).then((r) => r.data); +} diff --git a/frontend/src/api/scores.ts b/frontend/src/api/scores.ts new file mode 100644 index 0000000..16cda16 --- /dev/null +++ b/frontend/src/api/scores.ts @@ -0,0 +1,18 @@ +import apiClient from './client'; +import type { ScoreResponse, RankingsResponse } from '../lib/types'; + +export function getScores(symbol: string) { + return apiClient + .get(`scores/${symbol}`) + .then((r) => r.data); +} + +export function getRankings() { + return apiClient.get('rankings').then((r) => r.data); +} + +export function updateWeights(weights: Record) { + return apiClient + .put<{ message: string }>('scores/weights', weights) + .then((r) => r.data); +} diff --git a/frontend/src/api/sentiment.ts b/frontend/src/api/sentiment.ts new file mode 100644 index 0000000..5289189 --- /dev/null +++ b/frontend/src/api/sentiment.ts @@ -0,0 +1,8 @@ +import apiClient from './client'; +import type { SentimentResponse } from '../lib/types'; + +export function getSentiment(symbol: string) { + return apiClient + .get(`sentiment/${symbol}`) + .then((r) => r.data); +} diff --git a/frontend/src/api/sr-levels.ts b/frontend/src/api/sr-levels.ts new file mode 100644 index 0000000..3a1d3b7 --- /dev/null +++ b/frontend/src/api/sr-levels.ts @@ -0,0 +1,8 @@ +import apiClient from './client'; +import type { SRLevelResponse } from '../lib/types'; + +export function getLevels(symbol: string) { + return apiClient + .get(`sr-levels/${symbol}`) + .then((r) => r.data); +} diff --git a/frontend/src/api/tickers.ts b/frontend/src/api/tickers.ts new file mode 100644 index 0000000..33da8a6 --- /dev/null +++ b/frontend/src/api/tickers.ts @@ -0,0 +1,16 @@ +import apiClient from './client'; +import type { Ticker } from '../lib/types'; + +export function list() { + return apiClient.get('tickers').then((r) => r.data); +} + +export function create(symbol: string) { + return apiClient.post('tickers', { symbol }).then((r) => r.data); +} + +export function deleteTicker(symbol: string) { + return apiClient + .delete<{ message: string }>(`tickers/${symbol}`) + .then((r) => r.data); +} diff --git a/frontend/src/api/trades.ts b/frontend/src/api/trades.ts new file mode 100644 index 0000000..6afa4de --- /dev/null +++ b/frontend/src/api/trades.ts @@ -0,0 +1,6 @@ +import apiClient from './client'; +import type { TradeSetup } from '../lib/types'; + +export function list() { + return apiClient.get('trades').then((r) => r.data); +} diff --git a/frontend/src/api/watchlist.ts b/frontend/src/api/watchlist.ts new file mode 100644 index 0000000..a4f06c4 --- /dev/null +++ b/frontend/src/api/watchlist.ts @@ -0,0 +1,18 @@ +import apiClient from './client'; +import type { WatchlistEntry } from '../lib/types'; + +export function list() { + return apiClient.get('watchlist').then((r) => r.data); +} + +export function add(symbol: string) { + return apiClient + .post(`watchlist/${symbol}`) + .then((r) => r.data); +} + +export function remove(symbol: string) { + return apiClient + .delete<{ message: string }>(`watchlist/${symbol}`) + .then((r) => r.data); +} diff --git a/frontend/src/components/admin/DataCleanup.tsx b/frontend/src/components/admin/DataCleanup.tsx new file mode 100644 index 0000000..335b482 --- /dev/null +++ b/frontend/src/components/admin/DataCleanup.tsx @@ -0,0 +1,36 @@ +import { useState } from 'react'; +import { useCleanupData } from '../../hooks/useAdmin'; + +export function DataCleanup() { + const [days, setDays] = useState(90); + const cleanup = useCleanupData(); + + function handleSubmit(e: React.FormEvent) { + e.preventDefault(); + if (days < 1) return; + cleanup.mutate(days); + } + + return ( +
+
+ + setDays(Number(e.target.value))} + className="w-28 input-glass px-3 py-2 text-sm" + /> +
+ +
+ ); +} diff --git a/frontend/src/components/admin/JobControls.tsx b/frontend/src/components/admin/JobControls.tsx new file mode 100644 index 0000000..f07c09c --- /dev/null +++ b/frontend/src/components/admin/JobControls.tsx @@ -0,0 +1,82 @@ +import { useJobs, useToggleJob, useTriggerJob } from '../../hooks/useAdmin'; +import { SkeletonTable } from '../ui/Skeleton'; + +function formatNextRun(iso: string | null): string { + if (!iso) return '—'; + const d = new Date(iso); + const now = new Date(); + const diffMs = d.getTime() - now.getTime(); + if (diffMs < 0) return 'imminent'; + const mins = Math.round(diffMs / 60_000); + if (mins < 60) return `in ${mins}m`; + const hrs = Math.round(mins / 60); + return `in ${hrs}h`; +} + +export function JobControls() { + const { data: jobs, isLoading } = useJobs(); + const toggleJob = useToggleJob(); + const triggerJob = useTriggerJob(); + + if (isLoading) return ; + + return ( +
+ {jobs?.map((job) => ( +
+
+
+ {/* Status dot */} + +
+ {job.label} +
+ + {job.enabled ? 'Active' : 'Inactive'} + + {job.enabled && job.next_run_at && ( + + Next run {formatNextRun(job.next_run_at)} + + )} + {!job.registered && ( + Not registered + )} +
+
+
+ +
+ + +
+
+
+ ))} +
+ ); +} diff --git a/frontend/src/components/admin/SettingsForm.tsx b/frontend/src/components/admin/SettingsForm.tsx new file mode 100644 index 0000000..040ec73 --- /dev/null +++ b/frontend/src/components/admin/SettingsForm.tsx @@ -0,0 +1,78 @@ +import { useState } from 'react'; +import { useSettings, useUpdateSetting } from '../../hooks/useAdmin'; +import { SkeletonTable } from '../ui/Skeleton'; +import type { SystemSetting } from '../../lib/types'; + +export function SettingsForm() { + const { data: settings, isLoading, isError, error } = useSettings(); + const updateSetting = useUpdateSetting(); + const [edits, setEdits] = useState>({}); + + function currentValue(setting: SystemSetting) { + return edits[setting.key] ?? setting.value; + } + + function handleChange(key: string, value: string) { + setEdits((prev) => ({ ...prev, [key]: value })); + } + + function handleSave(key: string) { + const value = edits[key]; + if (value === undefined) return; + updateSetting.mutate( + { key, value }, + { onSuccess: () => setEdits((prev) => { const next = { ...prev }; delete next[key]; return next; }) }, + ); + } + + function handleToggleRegistration(current: string) { + updateSetting.mutate({ key: 'registration', value: current === 'true' ? 'false' : 'true' }); + } + + if (isLoading) return ; + if (isError) return

{(error as Error)?.message || 'Failed to load settings'}

; + if (!settings || settings.length === 0) return

No settings found.

; + + return ( +
+ {settings.map((setting) => ( +
+ + {setting.key === 'registration' ? ( + + ) : ( +
+ handleChange(setting.key, e.target.value)} + className="flex-1 input-glass px-3 py-2 text-sm" + /> + {edits[setting.key] !== undefined && edits[setting.key] !== setting.value && ( + + )} +
+ )} +
+ ))} +
+ ); +} diff --git a/frontend/src/components/admin/TickerManagement.tsx b/frontend/src/components/admin/TickerManagement.tsx new file mode 100644 index 0000000..b4d5750 --- /dev/null +++ b/frontend/src/components/admin/TickerManagement.tsx @@ -0,0 +1,92 @@ +import { useState } from 'react'; +import { useTickers, useAddTicker, useDeleteTicker } from '../../hooks/useTickers'; +import { ConfirmDialog } from '../ui/ConfirmDialog'; +import { SkeletonTable } from '../ui/Skeleton'; +import { formatDateTime } from '../../lib/format'; + +export function TickerManagement() { + const { data: tickers, isLoading, isError, error } = useTickers(); + const addTicker = useAddTicker(); + const deleteTicker = useDeleteTicker(); + const [newSymbol, setNewSymbol] = useState(''); + const [deleteTarget, setDeleteTarget] = useState(null); + + function handleAdd(e: React.FormEvent) { + e.preventDefault(); + const symbol = newSymbol.trim().toUpperCase(); + if (!symbol) return; + addTicker.mutate(symbol, { onSuccess: () => setNewSymbol('') }); + } + + function handleConfirmDelete() { + if (!deleteTarget) return; + deleteTicker.mutate(deleteTarget, { onSuccess: () => setDeleteTarget(null) }); + } + + return ( +
+
+ setNewSymbol(e.target.value)} + placeholder="Enter ticker symbol (e.g. AAPL)" + className="flex-1 input-glass px-3 py-2.5 text-sm" + /> + +
+ + {isLoading && } + {isError &&

{(error as Error)?.message || 'Failed to load tickers'}

} + + {tickers && tickers.length > 0 && ( +
+ + + + + + + + + + {tickers.map((ticker) => ( + + + + + + ))} + +
SymbolAddedActions
{ticker.symbol}{formatDateTime(ticker.created_at)} + +
+
+ )} + + {tickers && tickers.length === 0 && ( +

No tickers registered yet. Add one above.

+ )} + + setDeleteTarget(null)} + /> +
+ ); +} diff --git a/frontend/src/components/admin/UserTable.tsx b/frontend/src/components/admin/UserTable.tsx new file mode 100644 index 0000000..81bd812 --- /dev/null +++ b/frontend/src/components/admin/UserTable.tsx @@ -0,0 +1,132 @@ +import { useState } from 'react'; +import { useUsers, useCreateUser, useUpdateAccess, useResetPassword } from '../../hooks/useAdmin'; +import { SkeletonTable } from '../ui/Skeleton'; +import type { AdminUser } from '../../lib/types'; + +export function UserTable() { + const { data: users, isLoading, isError, error } = useUsers(); + const createUser = useCreateUser(); + const updateAccess = useUpdateAccess(); + const resetPassword = useResetPassword(); + + const [newUsername, setNewUsername] = useState(''); + const [newPassword, setNewPassword] = useState(''); + const [newRole, setNewRole] = useState('user'); + const [newAccess, setNewAccess] = useState(true); + const [resetTarget, setResetTarget] = useState(null); + const [resetPw, setResetPw] = useState(''); + + function handleCreate(e: React.FormEvent) { + e.preventDefault(); + if (!newUsername.trim() || !newPassword.trim()) return; + createUser.mutate( + { username: newUsername.trim(), password: newPassword, role: newRole, has_access: newAccess }, + { onSuccess: () => { setNewUsername(''); setNewPassword(''); setNewRole('user'); setNewAccess(true); } }, + ); + } + + function handleToggleAccess(user: AdminUser) { + updateAccess.mutate({ userId: user.id, hasAccess: !user.has_access }); + } + + function handleResetPassword(userId: number) { + if (!resetPw.trim()) return; + resetPassword.mutate( + { userId, password: resetPw }, + { onSuccess: () => { setResetTarget(null); setResetPw(''); } }, + ); + } + + return ( +
+ {/* Create user form */} +
+
+ + setNewUsername(e.target.value)} + placeholder="username" className="input-glass px-3 py-2 text-sm" /> +
+
+ + setNewPassword(e.target.value)} + placeholder="password" className="input-glass px-3 py-2 text-sm" /> +
+
+ + +
+ + +
+ + {isLoading && } + {isError &&

{(error as Error)?.message || 'Failed to load users'}

} + + {users && users.length > 0 && ( +
+ + + + + + + + + + + {users.map((user) => ( + + + + + + + ))} + +
UsernameRoleAccessActions
{user.username}{user.role} + + +
+ + {resetTarget === user.id ? ( + + setResetPw(e.target.value)} + placeholder="new password" className="w-32 input-glass px-2 py-1 text-xs" /> + + + + ) : ( + + )} +
+
+
+ )} + + {users && users.length === 0 &&

No users found.

} +
+ ); +} diff --git a/frontend/src/components/auth/ProtectedRoute.tsx b/frontend/src/components/auth/ProtectedRoute.tsx new file mode 100644 index 0000000..968f655 --- /dev/null +++ b/frontend/src/components/auth/ProtectedRoute.tsx @@ -0,0 +1,21 @@ +import { Navigate, Outlet } from 'react-router-dom'; +import { useAuthStore } from '../../stores/authStore'; + +interface ProtectedRouteProps { + requireAdmin?: boolean; +} + +export default function ProtectedRoute({ requireAdmin }: ProtectedRouteProps) { + const token = useAuthStore((s) => s.token); + const role = useAuthStore((s) => s.role); + + if (!token) { + return ; + } + + if (requireAdmin && role !== 'admin') { + return ; + } + + return ; +} diff --git a/frontend/src/components/charts/CandlestickChart.tsx b/frontend/src/components/charts/CandlestickChart.tsx new file mode 100644 index 0000000..86b757b --- /dev/null +++ b/frontend/src/components/charts/CandlestickChart.tsx @@ -0,0 +1,232 @@ +import { useMemo, useRef, useEffect, useCallback } from 'react'; +import type { OHLCVBar, SRLevel } from '../../lib/types'; +import { formatPrice, formatDate } from '../../lib/format'; + +interface CandlestickChartProps { + data: OHLCVBar[]; + srLevels?: SRLevel[]; + maxSRLevels?: number; +} + +function filterTopSRLevels(levels: SRLevel[], max: number): SRLevel[] { + if (levels.length <= max) return levels; + return [...levels].sort((a, b) => b.strength - a.strength).slice(0, max); +} + +interface TooltipState { + visible: boolean; + x: number; + y: number; + bar: OHLCVBar | null; +} + +export function CandlestickChart({ data, srLevels = [], maxSRLevels = 6 }: CandlestickChartProps) { + const canvasRef = useRef(null); + const containerRef = useRef(null); + const tooltipRef = useRef(null); + const tooltipState = useRef({ visible: false, x: 0, y: 0, bar: null }); + const animFrame = useRef(0); + + const topLevels = useMemo(() => filterTopSRLevels(srLevels, maxSRLevels), [srLevels, maxSRLevels]); + + const draw = useCallback(() => { + const canvas = canvasRef.current; + const container = containerRef.current; + if (!canvas || !container || data.length === 0) return; + + const dpr = window.devicePixelRatio || 1; + const rect = container.getBoundingClientRect(); + const W = rect.width; + const H = 400; + + canvas.width = W * dpr; + canvas.height = H * dpr; + canvas.style.width = `${W}px`; + canvas.style.height = `${H}px`; + + const ctx = canvas.getContext('2d'); + if (!ctx) return; + ctx.scale(dpr, dpr); + ctx.clearRect(0, 0, W, H); + + // Margins + const ml = 12, mr = 70, mt = 12, mb = 32; + const cw = W - ml - mr; + const ch = H - mt - mb; + + // Price range + const allPrices = data.flatMap((b) => [b.high, b.low]); + const srPrices = topLevels.map((l) => l.price_level); + const allVals = [...allPrices, ...srPrices]; + const minP = Math.min(...allVals); + const maxP = Math.max(...allVals); + const pad = (maxP - minP) * 0.06 || 1; + const lo = minP - pad; + const hi = maxP + pad; + + const yScale = (v: number) => mt + ch - ((v - lo) / (hi - lo)) * ch; + const barW = cw / data.length; + const candleW = Math.max(barW * 0.65, 1); + + // Grid lines (horizontal) + const nTicks = 6; + ctx.strokeStyle = 'rgba(255,255,255,0.04)'; + ctx.lineWidth = 1; + ctx.fillStyle = '#6b7280'; + ctx.font = '11px Inter, system-ui, sans-serif'; + ctx.textAlign = 'right'; + for (let i = 0; i <= nTicks; i++) { + const v = lo + ((hi - lo) * i) / nTicks; + const y = yScale(v); + ctx.beginPath(); + ctx.moveTo(ml, y); + ctx.lineTo(ml + cw, y); + ctx.stroke(); + ctx.fillText(formatPrice(v), W - 8, y + 4); + } + + // X-axis labels + ctx.textAlign = 'center'; + const labelInterval = Math.max(Math.floor(data.length / 8), 1); + for (let i = 0; i < data.length; i += labelInterval) { + const x = ml + i * barW + barW / 2; + ctx.fillStyle = '#6b7280'; + ctx.fillText(formatDate(data[i].date), x, H - 6); + } + + // S/R levels + topLevels.forEach((level) => { + const y = yScale(level.price_level); + const isSupport = level.type === 'support'; + const color = isSupport ? '#10b981' : '#ef4444'; + + ctx.strokeStyle = color; + ctx.lineWidth = 1.5; + ctx.globalAlpha = 0.55; + ctx.setLineDash([6, 3]); + ctx.beginPath(); + ctx.moveTo(ml, y); + ctx.lineTo(ml + cw, y); + ctx.stroke(); + ctx.setLineDash([]); + ctx.globalAlpha = 1; + + // Label + ctx.fillStyle = color; + ctx.font = '10px Inter, system-ui, sans-serif'; + ctx.textAlign = 'left'; + ctx.fillText( + `${level.type[0].toUpperCase()} ${formatPrice(level.price_level)}`, + ml + cw + 4, + y + 3 + ); + }); + + // Candles + data.forEach((bar, i) => { + const x = ml + i * barW + barW / 2; + const bullish = bar.close >= bar.open; + const color = bullish ? '#10b981' : '#ef4444'; + + const yHigh = yScale(bar.high); + const yLow = yScale(bar.low); + const yOpen = yScale(bar.open); + const yClose = yScale(bar.close); + + // Wick + ctx.strokeStyle = color; + ctx.lineWidth = 1; + ctx.beginPath(); + ctx.moveTo(x, yHigh); + ctx.lineTo(x, yLow); + ctx.stroke(); + + // Body + const bodyTop = Math.min(yOpen, yClose); + const bodyH = Math.max(Math.abs(yOpen - yClose), 1); + ctx.fillStyle = color; + ctx.fillRect(x - candleW / 2, bodyTop, candleW, bodyH); + }); + + // Store geometry for hit testing + (canvas as any).__chartMeta = { ml, mr, mt, mb, cw, ch, barW, lo, hi, yScale }; + }, [data, topLevels]); + + useEffect(() => { + draw(); + const onResize = () => { + cancelAnimationFrame(animFrame.current); + animFrame.current = requestAnimationFrame(draw); + }; + window.addEventListener('resize', onResize); + return () => { + window.removeEventListener('resize', onResize); + cancelAnimationFrame(animFrame.current); + }; + }, [draw]); + + const handleMouseMove = useCallback( + (e: React.MouseEvent) => { + const canvas = canvasRef.current; + const tip = tooltipRef.current; + if (!canvas || !tip || data.length === 0) return; + + const meta = (canvas as any).__chartMeta; + if (!meta) return; + + const rect = canvas.getBoundingClientRect(); + const mx = e.clientX - rect.left; + const idx = Math.floor((mx - meta.ml) / meta.barW); + + if (idx >= 0 && idx < data.length) { + const bar = data[idx]; + tooltipState.current = { visible: true, x: e.clientX - rect.left, y: e.clientY - rect.top, bar }; + tip.style.display = 'block'; + tip.style.left = `${Math.min(mx + 14, rect.width - 180)}px`; + tip.style.top = `${Math.max(e.clientY - rect.top - 80, 8)}px`; + tip.innerHTML = ` +
${formatDate(bar.date)}
+
+ Open${formatPrice(bar.open)} + High${formatPrice(bar.high)} + Low${formatPrice(bar.low)} + Close${formatPrice(bar.close)} + Vol${bar.volume.toLocaleString()} +
`; + } else { + tip.style.display = 'none'; + } + }, + [data] + ); + + const handleMouseLeave = useCallback(() => { + const tip = tooltipRef.current; + if (tip) tip.style.display = 'none'; + }, []); + + if (data.length === 0) { + return ( +
+ No OHLCV data available +
+ ); + } + + return ( +
+ +
+
+ ); +} diff --git a/frontend/src/components/layout/AppShell.tsx b/frontend/src/components/layout/AppShell.tsx new file mode 100644 index 0000000..7bf8e59 --- /dev/null +++ b/frontend/src/components/layout/AppShell.tsx @@ -0,0 +1,17 @@ +import { Outlet } from 'react-router-dom'; +import Sidebar from './Sidebar'; +import MobileNav from './MobileNav'; + +export default function AppShell() { + return ( +
+ +
+ +
+ +
+
+
+ ); +} diff --git a/frontend/src/components/layout/MobileNav.tsx b/frontend/src/components/layout/MobileNav.tsx new file mode 100644 index 0000000..575a17f --- /dev/null +++ b/frontend/src/components/layout/MobileNav.tsx @@ -0,0 +1,88 @@ +import { useState } from 'react'; +import { NavLink } from 'react-router-dom'; +import { useAuthStore } from '../../stores/authStore'; + +const navItems = [ + { to: '/watchlist', label: 'Watchlist' }, + { to: '/scanner', label: 'Scanner' }, + { to: '/rankings', label: 'Rankings' }, +]; + +export default function MobileNav() { + const [open, setOpen] = useState(false); + const { role, username, logout } = useAuthStore(); + + return ( +
+
+

Signal Dashboard

+ +
+ +
+ +
+ {username && ( +

{username}

+ )} + +
+
+
+ ); +} diff --git a/frontend/src/components/layout/Sidebar.tsx b/frontend/src/components/layout/Sidebar.tsx new file mode 100644 index 0000000..51f6829 --- /dev/null +++ b/frontend/src/components/layout/Sidebar.tsx @@ -0,0 +1,90 @@ +import { NavLink } from 'react-router-dom'; +import { useQuery } from '@tanstack/react-query'; +import { useAuthStore } from '../../stores/authStore'; +import { check as healthCheck } from '../../api/health'; + +const navItems = [ + { to: '/watchlist', label: 'Watchlist', icon: '◈' }, + { to: '/scanner', label: 'Scanner', icon: '⬡' }, + { to: '/rankings', label: 'Rankings', icon: '△' }, +]; + +export default function Sidebar() { + const { role, username, logout } = useAuthStore(); + + const health = useQuery({ + queryKey: ['health'], + queryFn: healthCheck, + refetchInterval: 30_000, + retry: 1, + }); + + const isBackendUp = health.isSuccess; + + return ( + + ); +} diff --git a/frontend/src/components/rankings/RankingsTable.tsx b/frontend/src/components/rankings/RankingsTable.tsx new file mode 100644 index 0000000..735c33c --- /dev/null +++ b/frontend/src/components/rankings/RankingsTable.tsx @@ -0,0 +1,57 @@ +import { Link } from 'react-router-dom'; +import type { RankingEntry } from '../../lib/types'; + +interface RankingsTableProps { + rankings: RankingEntry[]; +} + +function scoreColor(score: number): string { + if (score > 70) return 'text-emerald-400'; + if (score >= 40) return 'text-amber-400'; + return 'text-red-400'; +} + +export function RankingsTable({ rankings }: RankingsTableProps) { + if (rankings.length === 0) { + return

No rankings available.

; + } + + const dimensionNames = rankings.length > 0 ? rankings[0].dimensions.map((d) => d.dimension) : []; + + return ( +
+ + + + + + + {dimensionNames.map((dim) => ( + + ))} + + + + {rankings.map((entry, index) => ( + + + + + {entry.dimensions.map((dim) => ( + + ))} + + ))} + +
RankSymbolComposite{dim}
{index + 1} + + {entry.symbol} + + + {Math.round(entry.composite_score)} + + {Math.round(dim.score)} +
+
+ ); +} diff --git a/frontend/src/components/rankings/WeightsForm.tsx b/frontend/src/components/rankings/WeightsForm.tsx new file mode 100644 index 0000000..95eaeda --- /dev/null +++ b/frontend/src/components/rankings/WeightsForm.tsx @@ -0,0 +1,50 @@ +import { useState, type FormEvent } from 'react'; +import { useUpdateWeights } from '../../hooks/useScores'; + +interface WeightsFormProps { + weights: Record; +} + +export function WeightsForm({ weights }: WeightsFormProps) { + const [localWeights, setLocalWeights] = useState>(weights); + const updateWeights = useUpdateWeights(); + + const handleChange = (key: string, value: string) => { + const num = parseFloat(value); + if (!isNaN(num)) setLocalWeights((prev) => ({ ...prev, [key]: num })); + }; + + const handleSubmit = (e: FormEvent) => { + e.preventDefault(); + updateWeights.mutate(localWeights); + }; + + return ( +
+

+ Scoring Weights +

+
+ {Object.keys(weights).map((key) => ( + + ))} +
+ +
+ ); +} diff --git a/frontend/src/components/scanner/TradeTable.tsx b/frontend/src/components/scanner/TradeTable.tsx new file mode 100644 index 0000000..0935dc8 --- /dev/null +++ b/frontend/src/components/scanner/TradeTable.tsx @@ -0,0 +1,81 @@ +import { Link } from 'react-router-dom'; +import type { TradeSetup } from '../../lib/types'; +import { formatPrice, formatDateTime } from '../../lib/format'; + +export type SortColumn = 'symbol' | 'direction' | 'entry_price' | 'stop_loss' | 'target' | 'rr_ratio' | 'composite_score' | 'detected_at'; +export type SortDirection = 'asc' | 'desc'; + +interface TradeTableProps { + trades: TradeSetup[]; + sortColumn: SortColumn; + sortDirection: SortDirection; + onSort: (column: SortColumn) => void; +} + +const columns: { key: SortColumn; label: string }[] = [ + { key: 'symbol', label: 'Symbol' }, + { key: 'direction', label: 'Direction' }, + { key: 'entry_price', label: 'Entry' }, + { key: 'stop_loss', label: 'Stop Loss' }, + { key: 'target', label: 'Target' }, + { key: 'rr_ratio', label: 'R:R' }, + { key: 'composite_score', label: 'Score' }, + { key: 'detected_at', label: 'Detected' }, +]; + +function sortIndicator(column: SortColumn, active: SortColumn, dir: SortDirection) { + if (column !== active) return ''; + return dir === 'asc' ? ' ▲' : ' ▼'; +} + +export function TradeTable({ trades, sortColumn, sortDirection, onSort }: TradeTableProps) { + if (trades.length === 0) { + return

No trade setups match the current filters.

; + } + + return ( +
+ + + + {columns.map((col) => ( + + ))} + + + + {trades.map((trade) => ( + + + + + + + + + + + ))} + +
onSort(col.key)} + > + {col.label}{sortIndicator(col.key, sortColumn, sortDirection)} +
+ + {trade.symbol} + + + + {trade.direction} + + {formatPrice(trade.entry_price)}{formatPrice(trade.stop_loss)}{formatPrice(trade.target)}{trade.rr_ratio.toFixed(2)} + 70 ? 'text-emerald-400' : trade.composite_score >= 40 ? 'text-amber-400' : 'text-red-400'}`}> + {Math.round(trade.composite_score)} + + {formatDateTime(trade.detected_at)}
+
+ ); +} diff --git a/frontend/src/components/ticker/FundamentalsPanel.tsx b/frontend/src/components/ticker/FundamentalsPanel.tsx new file mode 100644 index 0000000..e74ea99 --- /dev/null +++ b/frontend/src/components/ticker/FundamentalsPanel.tsx @@ -0,0 +1,34 @@ +import { formatPercent, formatLargeNumber } from '../../lib/format'; +import type { FundamentalResponse } from '../../lib/types'; + +interface FundamentalsPanelProps { + data: FundamentalResponse; +} + +export function FundamentalsPanel({ data }: FundamentalsPanelProps) { + const items = [ + { label: 'P/E Ratio', value: data.pe_ratio !== null ? data.pe_ratio.toFixed(2) : '—' }, + { label: 'Revenue Growth', value: data.revenue_growth !== null ? formatPercent(data.revenue_growth) : '—' }, + { label: 'Earnings Surprise', value: data.earnings_surprise !== null ? formatPercent(data.earnings_surprise) : '—' }, + { label: 'Market Cap', value: data.market_cap !== null ? formatLargeNumber(data.market_cap) : '—' }, + ]; + + return ( +
+

Fundamentals

+
+ {items.map((item) => ( +
+ {item.label} + {item.value} +
+ ))} + {data.fetched_at && ( +

+ Updated {new Date(data.fetched_at).toLocaleDateString()} +

+ )} +
+
+ ); +} diff --git a/frontend/src/components/ticker/IndicatorSelector.tsx b/frontend/src/components/ticker/IndicatorSelector.tsx new file mode 100644 index 0000000..276b040 --- /dev/null +++ b/frontend/src/components/ticker/IndicatorSelector.tsx @@ -0,0 +1,133 @@ +import { useState } from 'react'; +import { useQuery } from '@tanstack/react-query'; +import { getIndicator, getEMACross } from '../../api/indicators'; +import type { IndicatorResult, EMACrossResult } from '../../lib/types'; + +const INDICATOR_TYPES = ['ADX', 'EMA', 'RSI', 'ATR', 'volume_profile', 'pivot_points'] as const; + +interface IndicatorSelectorProps { + symbol: string; +} + +const signalColors: Record = { + bullish: 'text-emerald-400', + bearish: 'text-red-400', + neutral: 'text-gray-300', +}; + +function IndicatorResultDisplay({ result }: { result: IndicatorResult }) { + return ( +
+
+ Type + {result.indicator_type} +
+
+ Normalized Score + {result.score.toFixed(2)} +
+
+ Bars Used + {result.bars_used} +
+ {Object.keys(result.values).length > 0 && ( +
+

Values

+ {Object.entries(result.values).map(([key, val]) => ( +
+ {key} + {typeof val === 'number' ? val.toFixed(4) : String(val)} +
+ ))} +
+ )} +
+ ); +} + +function EMACrossDisplay({ result }: { result: EMACrossResult }) { + return ( +
+
+ Signal + {result.signal} +
+
+ Short EMA ({result.short_period}) + {result.short_ema.toFixed(2)} +
+
+ Long EMA ({result.long_period}) + {result.long_ema.toFixed(2)} +
+
+ ); +} + +export function IndicatorSelector({ symbol }: IndicatorSelectorProps) { + const [selectedType, setSelectedType] = useState(''); + const [showEMACross, setShowEMACross] = useState(false); + + const indicatorQuery = useQuery({ + queryKey: ['indicator', symbol, selectedType], + queryFn: () => getIndicator(symbol, selectedType), + enabled: !!symbol && !!selectedType, + }); + + const emaCrossQuery = useQuery({ + queryKey: ['ema-cross', symbol], + queryFn: () => getEMACross(symbol), + enabled: !!symbol && showEMACross, + }); + + return ( +
+

Indicators

+ +
+ +
+ + {selectedType && indicatorQuery.isLoading && ( +
+
+
+
+
+ )} + {selectedType && indicatorQuery.isError && ( +

+ {indicatorQuery.error instanceof Error ? indicatorQuery.error.message : 'Failed to load indicator'} +

+ )} + {indicatorQuery.data && } + +
+ + {emaCrossQuery.isError && ( +

+ {emaCrossQuery.error instanceof Error ? emaCrossQuery.error.message : 'Failed to load EMA cross'} +

+ )} + {emaCrossQuery.data && ( +
+ )} +
+
+ ); +} diff --git a/frontend/src/components/ticker/SROverlay.tsx b/frontend/src/components/ticker/SROverlay.tsx new file mode 100644 index 0000000..229b64d --- /dev/null +++ b/frontend/src/components/ticker/SROverlay.tsx @@ -0,0 +1,32 @@ +import { ReferenceLine } from 'recharts'; +import type { SRLevel } from '../../lib/types'; +import { formatPrice } from '../../lib/format'; + +interface SROverlayProps { + levels: SRLevel[]; +} + +export function SROverlay({ levels }: SROverlayProps) { + return ( + <> + {levels.map((level) => { + const isSupport = level.type === 'support'; + return ( + + ); + })} + + ); +} diff --git a/frontend/src/components/ticker/SentimentPanel.tsx b/frontend/src/components/ticker/SentimentPanel.tsx new file mode 100644 index 0000000..bf3e6d8 --- /dev/null +++ b/frontend/src/components/ticker/SentimentPanel.tsx @@ -0,0 +1,46 @@ +import { formatPercent } from '../../lib/format'; +import type { SentimentResponse } from '../../lib/types'; + +interface SentimentPanelProps { + data: SentimentResponse; +} + +const classificationColors: Record = { + bullish: 'text-emerald-400', + bearish: 'text-red-400', + neutral: 'text-gray-300', +}; + +export function SentimentPanel({ data }: SentimentPanelProps) { + const latest = data.scores[0]; + + return ( +
+

Sentiment

+ {latest ? ( +
+
+ Classification + + {latest.classification} + +
+
+ Confidence + {formatPercent(latest.confidence)} +
+
+ Dimension Score + {data.dimension_score !== null ? Math.round(data.dimension_score) : '—'} +
+
+ Sources + {data.count} +
+
+ ) : ( +

No sentiment data available

+ )} +
+ ); +} diff --git a/frontend/src/components/ui/Badge.tsx b/frontend/src/components/ui/Badge.tsx new file mode 100644 index 0000000..6618911 --- /dev/null +++ b/frontend/src/components/ui/Badge.tsx @@ -0,0 +1,18 @@ +const variantStyles: Record = { + auto: 'bg-blue-500/15 text-blue-400 border-blue-500/20', + manual: 'bg-violet-500/15 text-violet-400 border-violet-500/20', + default: 'bg-white/[0.06] text-gray-400 border-white/[0.08]', +}; + +interface BadgeProps { + label: string; + variant?: 'auto' | 'manual' | 'default'; +} + +export function Badge({ label, variant = 'default' }: BadgeProps) { + return ( + + {label} + + ); +} diff --git a/frontend/src/components/ui/ConfirmDialog.tsx b/frontend/src/components/ui/ConfirmDialog.tsx new file mode 100644 index 0000000..1236a32 --- /dev/null +++ b/frontend/src/components/ui/ConfirmDialog.tsx @@ -0,0 +1,35 @@ +interface ConfirmDialogProps { + open: boolean; + title: string; + message: string; + onConfirm: () => void; + onCancel: () => void; +} + +export function ConfirmDialog({ open, title, message, onConfirm, onCancel }: ConfirmDialogProps) { + if (!open) return null; + + return ( +
+
+
+

{title}

+

{message}

+
+ + +
+
+
+ ); +} diff --git a/frontend/src/components/ui/ScoreCard.tsx b/frontend/src/components/ui/ScoreCard.tsx new file mode 100644 index 0000000..078d12d --- /dev/null +++ b/frontend/src/components/ui/ScoreCard.tsx @@ -0,0 +1,89 @@ +interface ScoreCardProps { + compositeScore: number | null; + dimensions: { dimension: string; score: number }[]; +} + +function scoreColor(score: number): string { + if (score > 70) return 'text-emerald-400'; + if (score >= 40) return 'text-amber-400'; + return 'text-red-400'; +} + +function ringGradient(score: number): string { + if (score > 70) return '#10b981'; + if (score >= 40) return '#f59e0b'; + return '#ef4444'; +} + +function barGradient(score: number): string { + if (score > 70) return 'from-emerald-500 to-emerald-400'; + if (score >= 40) return 'from-amber-500 to-amber-400'; + return 'from-red-500 to-red-400'; +} + +function ScoreRing({ score }: { score: number }) { + const radius = 36; + const circumference = 2 * Math.PI * radius; + const clamped = Math.max(0, Math.min(100, score)); + const offset = circumference - (clamped / 100) * circumference; + const color = ringGradient(score); + + return ( +
+ + + + + + {Math.round(score)} + +
+ ); +} + +export function ScoreCard({ compositeScore, dimensions }: ScoreCardProps) { + return ( +
+
+ {compositeScore !== null ? ( + + ) : ( +
N/A
+ )} +
+

Composite Score

+

+ {compositeScore !== null ? Math.round(compositeScore) : '—'} +

+
+
+ + {dimensions.length > 0 && ( +
+

Dimensions

+ {dimensions.map((d) => ( +
+ {d.dimension} +
+
+
+
+ + {Math.round(d.score)} + +
+
+ ))} +
+ )} +
+ ); +} diff --git a/frontend/src/components/ui/Skeleton.tsx b/frontend/src/components/ui/Skeleton.tsx new file mode 100644 index 0000000..0fbc57b --- /dev/null +++ b/frontend/src/components/ui/Skeleton.tsx @@ -0,0 +1,23 @@ +const pulse = 'animate-pulse rounded-lg bg-white/[0.05]'; + +export function SkeletonLine({ className = '' }: { className?: string }) { + return
; +} + +export function SkeletonCard({ className = '' }: { className?: string }) { + return
; +} + +export function SkeletonTable({ rows = 5, cols = 4, className = '' }: { rows?: number; cols?: number; className?: string }) { + return ( +
+ {Array.from({ length: rows }, (_, r) => ( +
+ {Array.from({ length: cols }, (_, c) => ( +
+ ))} +
+ ))} +
+ ); +} diff --git a/frontend/src/components/ui/Toast.tsx b/frontend/src/components/ui/Toast.tsx new file mode 100644 index 0000000..68f603b --- /dev/null +++ b/frontend/src/components/ui/Toast.tsx @@ -0,0 +1,87 @@ +import { createContext, useCallback, useContext, useEffect, useRef, useState } from 'react'; +import { createPortal } from 'react-dom'; + +type ToastType = 'success' | 'error' | 'info'; + +interface Toast { + id: string; + type: ToastType; + message: string; +} + +interface ToastContextValue { + addToast: (type: ToastType, message: string) => void; +} + +const ToastContext = createContext(null); + +const MAX_VISIBLE = 3; +const AUTO_DISMISS_MS = 8000; + +const typeStyles: Record = { + error: 'border-red-500/30 bg-red-500/10 text-red-300', + success: 'border-emerald-500/30 bg-emerald-500/10 text-emerald-300', + info: 'border-blue-500/30 bg-blue-500/10 text-blue-300', +}; + +export function ToastProvider({ children }: { children: React.ReactNode }) { + const [toasts, setToasts] = useState([]); + const nextId = useRef(0); + + const removeToast = useCallback((id: string) => { + setToasts((prev) => prev.filter((t) => t.id !== id)); + }, []); + + const addToast = useCallback((type: ToastType, message: string) => { + const id = String(nextId.current++); + setToasts((prev) => { + const next = [...prev, { id, type, message }]; + return next.length > MAX_VISIBLE ? next.slice(next.length - MAX_VISIBLE) : next; + }); + }, []); + + return ( + + {children} + {createPortal( +
+ {toasts.map((toast) => ( + + ))} +
, + document.body, + )} +
+ ); +} + +function ToastItem({ toast, onDismiss }: { toast: Toast; onDismiss: (id: string) => void }) { + useEffect(() => { + const timer = setTimeout(() => onDismiss(toast.id), AUTO_DISMISS_MS); + return () => clearTimeout(timer); + }, [toast.id, onDismiss]); + + return ( +
+
+

{toast.message}

+ +
+
+ ); +} + +export function useToast(): ToastContextValue { + const ctx = useContext(ToastContext); + if (!ctx) throw new Error('useToast must be used within a ToastProvider'); + return ctx; +} diff --git a/frontend/src/components/watchlist/AddTickerForm.tsx b/frontend/src/components/watchlist/AddTickerForm.tsx new file mode 100644 index 0000000..13244f6 --- /dev/null +++ b/frontend/src/components/watchlist/AddTickerForm.tsx @@ -0,0 +1,33 @@ +import { FormEvent, useState } from 'react'; +import { useAddToWatchlist } from '../../hooks/useWatchlist'; + +export function AddTickerForm() { + const [symbol, setSymbol] = useState(''); + const addMutation = useAddToWatchlist(); + + function handleSubmit(e: FormEvent) { + e.preventDefault(); + const trimmed = symbol.trim().toUpperCase(); + if (!trimmed) return; + addMutation.mutate(trimmed, { onSuccess: () => setSymbol('') }); + } + + return ( +
+ setSymbol(e.target.value)} + placeholder="Add symbol (e.g. AAPL)" + className="input-glass px-3 py-2 text-sm" + /> + +
+ ); +} diff --git a/frontend/src/components/watchlist/WatchlistTable.tsx b/frontend/src/components/watchlist/WatchlistTable.tsx new file mode 100644 index 0000000..799c570 --- /dev/null +++ b/frontend/src/components/watchlist/WatchlistTable.tsx @@ -0,0 +1,131 @@ +import { Link } from 'react-router-dom'; +import type { WatchlistEntry } from '../../lib/types'; +import { formatPrice } from '../../lib/format'; +import { Badge } from '../ui/Badge'; +import { useRemoveFromWatchlist } from '../../hooks/useWatchlist'; + +function scoreColor(score: number): string { + if (score > 70) return 'text-emerald-400'; + if (score >= 40) return 'text-amber-400'; + return 'text-red-400'; +} + +interface WatchlistTableProps { + entries: WatchlistEntry[]; +} + +export function WatchlistTable({ entries }: WatchlistTableProps) { + const removeMutation = useRemoveFromWatchlist(); + + if (entries.length === 0) { + return ( +

+ No watchlist entries yet. Add a symbol above to get started. +

+ ); + } + + return ( +
+ + + + + + + + + + + + + + + {entries.map((entry) => ( + + + + + + + + + + + ))} + +
SymbolTypeScoreDimensionsR:RDirectionS/R Levels
+ + {entry.symbol} + + + + + {entry.composite_score !== null ? ( + + {Math.round(entry.composite_score)} + + ) : ( + + )} + + {entry.dimensions.length > 0 ? ( +
+ {entry.dimensions.map((d) => ( + + {d.dimension.slice(0, 3).toUpperCase()} {Math.round(d.score)} + + ))} +
+ ) : ( + + )} +
+ {entry.rr_ratio !== null ? ( + {entry.rr_ratio.toFixed(2)} + ) : ( + + )} + + {entry.rr_direction ? ( + + {entry.rr_direction} + + ) : ( + + )} + + {entry.sr_levels.length > 0 ? ( +
+ {entry.sr_levels.map((level, i) => ( + + {formatPrice(level.price_level)} + + ))} +
+ ) : ( + + )} +
+ +
+
+ ); +} diff --git a/frontend/src/hooks/useAdmin.ts b/frontend/src/hooks/useAdmin.ts new file mode 100644 index 0000000..660d922 --- /dev/null +++ b/frontend/src/hooks/useAdmin.ts @@ -0,0 +1,148 @@ +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import * as adminApi from '../api/admin'; +import { useToast } from '../components/ui/Toast'; + +// ── Users ── + +export function useUsers() { + return useQuery({ + queryKey: ['admin', 'users'], + queryFn: () => adminApi.listUsers(), + }); +} + +export function useCreateUser() { + const qc = useQueryClient(); + const { addToast } = useToast(); + + return useMutation({ + mutationFn: (data: { + username: string; + password: string; + role: string; + has_access: boolean; + }) => adminApi.createUser(data), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ['admin', 'users'] }); + }, + onError: (error: Error) => { + addToast('error', error.message || 'Failed to create user'); + }, + }); +} + +export function useUpdateAccess() { + const qc = useQueryClient(); + const { addToast } = useToast(); + + return useMutation({ + mutationFn: ({ userId, hasAccess }: { userId: number; hasAccess: boolean }) => + adminApi.updateAccess(userId, hasAccess), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ['admin', 'users'] }); + }, + onError: (error: Error) => { + addToast('error', error.message || 'Failed to update access'); + }, + }); +} + +export function useResetPassword() { + const qc = useQueryClient(); + const { addToast } = useToast(); + + return useMutation({ + mutationFn: ({ userId, password }: { userId: number; password: string }) => + adminApi.resetPassword(userId, password), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ['admin', 'users'] }); + addToast('success', 'Password reset successfully'); + }, + onError: (error: Error) => { + addToast('error', error.message || 'Failed to reset password'); + }, + }); +} + +// ── Settings ── + +export function useSettings() { + return useQuery({ + queryKey: ['admin', 'settings'], + queryFn: () => adminApi.listSettings(), + }); +} + +export function useUpdateSetting() { + const qc = useQueryClient(); + const { addToast } = useToast(); + + return useMutation({ + mutationFn: ({ key, value }: { key: string; value: string }) => + adminApi.updateSetting(key, value), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ['admin', 'settings'] }); + }, + onError: (error: Error) => { + addToast('error', error.message || 'Failed to update setting'); + }, + }); +} + +// ── Jobs ── + +export function useJobs() { + return useQuery({ + queryKey: ['admin', 'jobs'], + queryFn: () => adminApi.listJobs(), + refetchInterval: 15_000, + }); +} + +export function useToggleJob() { + const qc = useQueryClient(); + const { addToast } = useToast(); + + return useMutation({ + mutationFn: ({ jobName, enabled }: { jobName: string; enabled: boolean }) => + adminApi.toggleJob(jobName, enabled), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ['admin', 'jobs'] }); + }, + onError: (error: Error) => { + addToast('error', error.message || 'Failed to toggle job'); + }, + }); +} + +export function useTriggerJob() { + const qc = useQueryClient(); + const { addToast } = useToast(); + + return useMutation({ + mutationFn: (jobName: string) => adminApi.triggerJob(jobName), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ['admin', 'jobs'] }); + addToast('success', 'Job triggered successfully'); + }, + onError: (error: Error) => { + addToast('error', error.message || 'Failed to trigger job'); + }, + }); +} + +// ── Data Cleanup ── + +export function useCleanupData() { + const { addToast } = useToast(); + + return useMutation({ + mutationFn: (olderThanDays: number) => adminApi.cleanupData(olderThanDays), + onSuccess: (data) => { + addToast('success', (data as { message: string }).message || 'Cleanup completed'); + }, + onError: (error: Error) => { + addToast('error', error.message || 'Failed to cleanup data'); + }, + }); +} diff --git a/frontend/src/hooks/useAuth.ts b/frontend/src/hooks/useAuth.ts new file mode 100644 index 0000000..ee15a30 --- /dev/null +++ b/frontend/src/hooks/useAuth.ts @@ -0,0 +1,22 @@ +import { useMutation } from '@tanstack/react-query'; +import * as authApi from '../api/auth'; +import { useAuthStore } from '../stores/authStore'; + +export function useLogin() { + const storeLogin = useAuthStore((s) => s.login); + + return useMutation({ + mutationFn: ({ username, password }: { username: string; password: string }) => + authApi.login(username, password), + onSuccess: (data) => { + storeLogin(data.access_token); + }, + }); +} + +export function useRegister() { + return useMutation({ + mutationFn: ({ username, password }: { username: string; password: string }) => + authApi.register(username, password), + }); +} diff --git a/frontend/src/hooks/useScores.ts b/frontend/src/hooks/useScores.ts new file mode 100644 index 0000000..79fb999 --- /dev/null +++ b/frontend/src/hooks/useScores.ts @@ -0,0 +1,26 @@ +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import * as scoresApi from '../api/scores'; +import { useToast } from '../components/ui/Toast'; + +export function useRankings() { + return useQuery({ + queryKey: ['rankings'], + queryFn: () => scoresApi.getRankings(), + }); +} + +export function useUpdateWeights() { + const qc = useQueryClient(); + const { addToast } = useToast(); + + return useMutation({ + mutationFn: (weights: Record) => scoresApi.updateWeights(weights), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ['rankings'] }); + addToast('success', 'Weights updated successfully'); + }, + onError: (error: Error) => { + addToast('error', error.message || 'Failed to update weights'); + }, + }); +} diff --git a/frontend/src/hooks/useTickerDetail.ts b/frontend/src/hooks/useTickerDetail.ts new file mode 100644 index 0000000..a3e1ea0 --- /dev/null +++ b/frontend/src/hooks/useTickerDetail.ts @@ -0,0 +1,40 @@ +import { useQuery } from '@tanstack/react-query'; +import { getOHLCV } from '../api/ohlcv'; +import { getScores } from '../api/scores'; +import { getLevels } from '../api/sr-levels'; +import { getSentiment } from '../api/sentiment'; +import { getFundamentals } from '../api/fundamentals'; + +export function useTickerDetail(symbol: string) { + const ohlcv = useQuery({ + queryKey: ['ohlcv', symbol], + queryFn: () => getOHLCV(symbol), + enabled: !!symbol, + }); + + const scores = useQuery({ + queryKey: ['scores', symbol], + queryFn: () => getScores(symbol), + enabled: !!symbol, + }); + + const srLevels = useQuery({ + queryKey: ['sr-levels', symbol], + queryFn: () => getLevels(symbol), + enabled: !!symbol, + }); + + const sentiment = useQuery({ + queryKey: ['sentiment', symbol], + queryFn: () => getSentiment(symbol), + enabled: !!symbol, + }); + + const fundamentals = useQuery({ + queryKey: ['fundamentals', symbol], + queryFn: () => getFundamentals(symbol), + enabled: !!symbol, + }); + + return { ohlcv, scores, srLevels, sentiment, fundamentals }; +} diff --git a/frontend/src/hooks/useTickers.ts b/frontend/src/hooks/useTickers.ts new file mode 100644 index 0000000..32f3bfd --- /dev/null +++ b/frontend/src/hooks/useTickers.ts @@ -0,0 +1,40 @@ +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import * as tickersApi from '../api/tickers'; +import { useToast } from '../components/ui/Toast'; + +export function useTickers() { + return useQuery({ + queryKey: ['tickers'], + queryFn: () => tickersApi.list(), + }); +} + +export function useAddTicker() { + const qc = useQueryClient(); + const { addToast } = useToast(); + + return useMutation({ + mutationFn: (symbol: string) => tickersApi.create(symbol), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ['tickers'] }); + }, + onError: (error: Error) => { + addToast('error', error.message || 'Failed to add ticker'); + }, + }); +} + +export function useDeleteTicker() { + const qc = useQueryClient(); + const { addToast } = useToast(); + + return useMutation({ + mutationFn: (symbol: string) => tickersApi.deleteTicker(symbol), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ['tickers'] }); + }, + onError: (error: Error) => { + addToast('error', error.message || 'Failed to delete ticker'); + }, + }); +} diff --git a/frontend/src/hooks/useTrades.ts b/frontend/src/hooks/useTrades.ts new file mode 100644 index 0000000..ac1105e --- /dev/null +++ b/frontend/src/hooks/useTrades.ts @@ -0,0 +1,9 @@ +import { useQuery } from '@tanstack/react-query'; +import * as tradesApi from '../api/trades'; + +export function useTrades() { + return useQuery({ + queryKey: ['trades'], + queryFn: () => tradesApi.list(), + }); +} diff --git a/frontend/src/hooks/useWatchlist.ts b/frontend/src/hooks/useWatchlist.ts new file mode 100644 index 0000000..20d3db1 --- /dev/null +++ b/frontend/src/hooks/useWatchlist.ts @@ -0,0 +1,40 @@ +import { useMutation, useQuery, useQueryClient } from '@tanstack/react-query'; +import * as watchlistApi from '../api/watchlist'; +import { useToast } from '../components/ui/Toast'; + +export function useWatchlist() { + return useQuery({ + queryKey: ['watchlist'], + queryFn: () => watchlistApi.list(), + }); +} + +export function useAddToWatchlist() { + const qc = useQueryClient(); + const { addToast } = useToast(); + + return useMutation({ + mutationFn: (symbol: string) => watchlistApi.add(symbol), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ['watchlist'] }); + }, + onError: (error: Error) => { + addToast('error', error.message || 'Failed to add to watchlist'); + }, + }); +} + +export function useRemoveFromWatchlist() { + const qc = useQueryClient(); + const { addToast } = useToast(); + + return useMutation({ + mutationFn: (symbol: string) => watchlistApi.remove(symbol), + onSuccess: () => { + qc.invalidateQueries({ queryKey: ['watchlist'] }); + }, + onError: (error: Error) => { + addToast('error', error.message || 'Failed to remove from watchlist'); + }, + }); +} diff --git a/frontend/src/lib/format.ts b/frontend/src/lib/format.ts new file mode 100644 index 0000000..b754e5c --- /dev/null +++ b/frontend/src/lib/format.ts @@ -0,0 +1,74 @@ +/** + * Format a number as a price string with 2 decimal places and thousands separators. + * e.g. 1234.5 → "1,234.50" + */ +export function formatPrice(n: number): string { + return n.toLocaleString('en-US', { + minimumFractionDigits: 2, + maximumFractionDigits: 2, + }); +} + +/** + * Format a number as a percentage string with 2 decimal places. + * e.g. 12.345 → "12.35%" + */ +export function formatPercent(n: number): string { + return `${n.toLocaleString('en-US', { + minimumFractionDigits: 2, + maximumFractionDigits: 2, + })}%`; +} + +/** + * Format a large number with K/M/B suffix. + * Values >= 1_000_000_000 → "1.23B" + * Values >= 1_000_000 → "456.7M" + * Values >= 1_000 → "12.3K" + * Values < 1_000 → plain number, no suffix + */ +export function formatLargeNumber(n: number): string { + const abs = Math.abs(n); + const sign = n < 0 ? '-' : ''; + + if (abs >= 1_000_000_000) { + return `${sign}${(abs / 1_000_000_000).toFixed(2).replace(/\.?0+$/, '')}B`; + } + if (abs >= 1_000_000) { + return `${sign}${(abs / 1_000_000).toFixed(1).replace(/\.?0+$/, '')}M`; + } + if (abs >= 1_000) { + return `${sign}${(abs / 1_000).toFixed(1).replace(/\.?0+$/, '')}K`; + } + return n.toString(); +} + +/** + * Format an ISO date string as a short date. + * e.g. "2025-01-15T14:30:00Z" → "Jan 15, 2025" + */ +export function formatDate(d: string): string { + const date = new Date(d); + return date.toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric', + }); +} + +/** + * Format an ISO date string as a date with time. + * e.g. "2025-01-15T14:30:00Z" → "Jan 15, 2025 2:30 PM" + */ +export function formatDateTime(d: string): string { + const date = new Date(d); + return `${date.toLocaleDateString('en-US', { + month: 'short', + day: 'numeric', + year: 'numeric', + })} ${date.toLocaleTimeString('en-US', { + hour: 'numeric', + minute: '2-digit', + hour12: true, + })}`; +} diff --git a/frontend/src/lib/types.ts b/frontend/src/lib/types.ts new file mode 100644 index 0000000..d6079ec --- /dev/null +++ b/frontend/src/lib/types.ts @@ -0,0 +1,172 @@ +// API envelope (before unwrapping) +export interface APIEnvelope { + status: 'success' | 'error'; + data: T | null; + error: string | null; +} + +// Auth +export interface TokenResponse { + access_token: string; + token_type: string; +} + +// Watchlist +export interface WatchlistEntry { + symbol: string; + entry_type: 'auto' | 'manual'; + composite_score: number | null; + dimensions: DimensionScore[]; + rr_ratio: number | null; + rr_direction: string | null; + sr_levels: SRLevelSummary[]; + added_at: string; +} + +export interface DimensionScore { + dimension: string; + score: number; +} + +export interface SRLevelSummary { + price_level: number; + type: 'support' | 'resistance'; + strength: number; +} + +// OHLCV +export interface OHLCVBar { + id: number; + ticker_id: number; + date: string; + open: number; + high: number; + low: number; + close: number; + volume: number; + created_at: string; +} + +// Scores +export interface ScoreResponse { + symbol: string; + composite_score: number | null; + composite_stale: boolean; + weights: Record; + dimensions: DimensionScoreDetail[]; + missing_dimensions: string[]; + computed_at: string | null; +} + +export interface DimensionScoreDetail { + dimension: string; + score: number; + is_stale: boolean; + computed_at: string | null; +} + +export interface RankingEntry { + symbol: string; + composite_score: number; + dimensions: DimensionScoreDetail[]; +} + +export interface RankingsResponse { + rankings: RankingEntry[]; + weights: Record; +} + +// Trade Setups +export interface TradeSetup { + id: number; + symbol: string; + direction: string; + entry_price: number; + stop_loss: number; + target: number; + rr_ratio: number; + composite_score: number; + detected_at: string; +} + +// S/R Levels +export interface SRLevel { + id: number; + price_level: number; + type: 'support' | 'resistance'; + strength: number; + detection_method: string; + created_at: string; +} + +export interface SRLevelResponse { + symbol: string; + levels: SRLevel[]; + count: number; +} + +// Sentiment +export interface SentimentScore { + id: number; + classification: 'bullish' | 'bearish' | 'neutral'; + confidence: number; + source: string; + timestamp: string; +} + +export interface SentimentResponse { + symbol: string; + scores: SentimentScore[]; + count: number; + dimension_score: number | null; + lookback_hours: number; +} + +// Fundamentals +export interface FundamentalResponse { + symbol: string; + pe_ratio: number | null; + revenue_growth: number | null; + earnings_surprise: number | null; + market_cap: number | null; + fetched_at: string | null; +} + +// Indicators +export interface IndicatorResult { + indicator_type: string; + values: Record; + score: number; + bars_used: number; +} + +export interface EMACrossResult { + short_ema: number; + long_ema: number; + short_period: number; + long_period: number; + signal: 'bullish' | 'bearish' | 'neutral'; +} + +// Tickers +export interface Ticker { + id: number; + symbol: string; + created_at: string; +} + +// Admin +export interface AdminUser { + id: number; + username: string; + role: string; + has_access: boolean; + created_at: string | null; + updated_at: string | null; +} + +export interface SystemSetting { + key: string; + value: string; + updated_at: string | null; +} diff --git a/frontend/src/main.tsx b/frontend/src/main.tsx new file mode 100644 index 0000000..abc65da --- /dev/null +++ b/frontend/src/main.tsx @@ -0,0 +1,21 @@ +import { StrictMode } from 'react'; +import { createRoot } from 'react-dom/client'; +import { BrowserRouter } from 'react-router-dom'; +import { QueryClient, QueryClientProvider } from '@tanstack/react-query'; +import { ToastProvider } from './components/ui/Toast'; +import App from './App'; +import './styles/globals.css'; + +const queryClient = new QueryClient(); + +createRoot(document.getElementById('root')!).render( + + + + + + + + + , +); diff --git a/frontend/src/pages/AdminPage.tsx b/frontend/src/pages/AdminPage.tsx new file mode 100644 index 0000000..d736d4f --- /dev/null +++ b/frontend/src/pages/AdminPage.tsx @@ -0,0 +1,48 @@ +import { useState } from 'react'; +import { DataCleanup } from '../components/admin/DataCleanup'; +import { JobControls } from '../components/admin/JobControls'; +import { SettingsForm } from '../components/admin/SettingsForm'; +import { TickerManagement } from '../components/admin/TickerManagement'; +import { UserTable } from '../components/admin/UserTable'; + +const tabs = ['Users', 'Tickers', 'Settings', 'Jobs', 'Cleanup'] as const; +type Tab = (typeof tabs)[number]; + +export default function AdminPage() { + const [activeTab, setActiveTab] = useState('Users'); + + return ( +
+
+

Admin

+

System management

+
+ + {/* Tab bar */} +
+ {tabs.map((tab) => ( + + ))} +
+ + {/* Tab content */} +
+ {activeTab === 'Users' && } + {activeTab === 'Tickers' && } + {activeTab === 'Settings' && } + {activeTab === 'Jobs' && } + {activeTab === 'Cleanup' && } +
+
+ ); +} diff --git a/frontend/src/pages/LoginPage.tsx b/frontend/src/pages/LoginPage.tsx new file mode 100644 index 0000000..aa850cc --- /dev/null +++ b/frontend/src/pages/LoginPage.tsx @@ -0,0 +1,91 @@ +import { useState, type FormEvent } from 'react'; +import { Link, useNavigate } from 'react-router-dom'; +import { useLogin } from '../hooks/useAuth'; + +export default function LoginPage() { + const [username, setUsername] = useState(''); + const [password, setPassword] = useState(''); + const [error, setError] = useState(null); + const navigate = useNavigate(); + const login = useLogin(); + + const handleSubmit = (e: FormEvent) => { + e.preventDefault(); + setError(null); + login.mutate( + { username, password }, + { + onSuccess: () => navigate('/watchlist'), + onError: (err) => setError(err instanceof Error ? err.message : 'Login failed'), + }, + ); + }; + + return ( +
+ {/* Ambient glow orbs */} +
+
+ +
+
+

Signal Dashboard

+

Sign in to your account

+
+ +
+ {error && ( +
+ {error} +
+ )} + +
+ + setUsername(e.target.value)} + required + className="w-full input-glass px-3 py-2.5 text-sm" + placeholder="Enter username" + /> +
+ +
+ + setPassword(e.target.value)} + required + className="w-full input-glass px-3 py-2.5 text-sm" + placeholder="Enter password" + /> +
+ + +
+ +

+ Don't have an account?{' '} + + Register + +

+
+
+ ); +} diff --git a/frontend/src/pages/RankingsPage.tsx b/frontend/src/pages/RankingsPage.tsx new file mode 100644 index 0000000..941aaf6 --- /dev/null +++ b/frontend/src/pages/RankingsPage.tsx @@ -0,0 +1,41 @@ +import { useRankings } from '../hooks/useScores'; +import { RankingsTable } from '../components/rankings/RankingsTable'; +import { WeightsForm } from '../components/rankings/WeightsForm'; +import { SkeletonTable } from '../components/ui/Skeleton'; + +export default function RankingsPage() { + const { data, isLoading, isError, error } = useRankings(); + + if (isLoading) { + return ( +
+

Rankings

+ +
+ ); + } + + if (isError) { + return ( +
+

Rankings

+

+ Failed to load rankings: {(error as Error).message} +

+
+ ); + } + + if (!data) return null; + + return ( +
+
+

Rankings

+

Composite scoring leaderboard

+
+ + +
+ ); +} diff --git a/frontend/src/pages/RegisterPage.tsx b/frontend/src/pages/RegisterPage.tsx new file mode 100644 index 0000000..d87614d --- /dev/null +++ b/frontend/src/pages/RegisterPage.tsx @@ -0,0 +1,122 @@ +import { useState, type FormEvent } from 'react'; +import { Link } from 'react-router-dom'; +import { useRegister } from '../hooks/useAuth'; + +export default function RegisterPage() { + const [username, setUsername] = useState(''); + const [password, setPassword] = useState(''); + const [error, setError] = useState(null); + const [success, setSuccess] = useState(false); + const [validationErrors, setValidationErrors] = useState<{ username?: string; password?: string }>({}); + const register = useRegister(); + + const validate = (): boolean => { + const errors: { username?: string; password?: string } = {}; + if (username.length < 1) errors.username = 'Username is required'; + if (password.length < 6) errors.password = 'Password must be at least 6 characters'; + setValidationErrors(errors); + return Object.keys(errors).length === 0; + }; + + const handleSubmit = (e: FormEvent) => { + e.preventDefault(); + setError(null); + if (!validate()) return; + register.mutate( + { username, password }, + { + onSuccess: () => setSuccess(true), + onError: (err) => setError(err instanceof Error ? err.message : 'Registration failed'), + }, + ); + }; + + if (success) { + return ( +
+
+
+ Account created successfully! +
+ + Go to Login + +
+
+ ); + } + + return ( +
+
+
+ +
+
+

Signal Dashboard

+

Create a new account

+
+ +
+ {error && ( +
+ {error} +
+ )} + +
+ + setUsername(e.target.value)} + className="w-full input-glass px-3 py-2.5 text-sm" + placeholder="Enter username" + /> + {validationErrors.username && ( +

{validationErrors.username}

+ )} +
+ +
+ + setPassword(e.target.value)} + className="w-full input-glass px-3 py-2.5 text-sm" + placeholder="Min 6 characters" + /> + {validationErrors.password && ( +

{validationErrors.password}

+ )} +
+ + +
+ +

+ Already have an account?{' '} + + Sign in + +

+
+
+ ); +} diff --git a/frontend/src/pages/ScannerPage.tsx b/frontend/src/pages/ScannerPage.tsx new file mode 100644 index 0000000..4dc8f93 --- /dev/null +++ b/frontend/src/pages/ScannerPage.tsx @@ -0,0 +1,125 @@ +import { useMemo, useState } from 'react'; +import { useTrades } from '../hooks/useTrades'; +import { TradeTable, type SortColumn, type SortDirection } from '../components/scanner/TradeTable'; +import { SkeletonTable } from '../components/ui/Skeleton'; +import type { TradeSetup } from '../lib/types'; + +type DirectionFilter = 'both' | 'long' | 'short'; + +function filterTrades( + trades: TradeSetup[], + minRR: number, + direction: DirectionFilter, +): TradeSetup[] { + return trades.filter((t) => { + if (t.rr_ratio < minRR) return false; + if (direction !== 'both' && t.direction !== direction) return false; + return true; + }); +} + +function sortTrades( + trades: TradeSetup[], + column: SortColumn, + direction: SortDirection, +): TradeSetup[] { + const sorted = [...trades].sort((a, b) => { + let cmp = 0; + switch (column) { + case 'symbol': + cmp = a.symbol.localeCompare(b.symbol); + break; + case 'direction': + cmp = a.direction.localeCompare(b.direction); + break; + case 'detected_at': + cmp = new Date(a.detected_at).getTime() - new Date(b.detected_at).getTime(); + break; + default: + cmp = (a[column] as number) - (b[column] as number); + } + return direction === 'asc' ? cmp : -cmp; + }); + return sorted; +} + +export default function ScannerPage() { + const { data: trades, isLoading, isError, error } = useTrades(); + + const [minRR, setMinRR] = useState(0); + const [directionFilter, setDirectionFilter] = useState('both'); + const [sortColumn, setSortColumn] = useState('rr_ratio'); + const [sortDirection, setSortDirection] = useState('desc'); + + const handleSort = (column: SortColumn) => { + if (column === sortColumn) { + setSortDirection((prev) => (prev === 'asc' ? 'desc' : 'asc')); + } else { + setSortColumn(column); + setSortDirection('asc'); + } + }; + + const processed = useMemo(() => { + if (!trades) return []; + const filtered = filterTrades(trades, minRR, directionFilter); + return sortTrades(filtered, sortColumn, sortDirection); + }, [trades, minRR, directionFilter, sortColumn, sortDirection]); + + return ( +
+

Trade Scanner

+ + {/* Filter controls */} +
+
+ + setMinRR(Number(e.target.value) || 0)} + className="w-24 rounded border border-gray-700 bg-gray-800 px-3 py-1.5 text-sm text-gray-200 focus:border-blue-500 focus:outline-none transition-colors duration-150" + /> +
+
+ + +
+
+ + {/* Content */} + {isLoading && } + + {isError && ( +
+ {error instanceof Error ? error.message : 'Failed to load trade setups'} +
+ )} + + {trades && ( + + )} +
+ ); +} diff --git a/frontend/src/pages/TickerDetailPage.tsx b/frontend/src/pages/TickerDetailPage.tsx new file mode 100644 index 0000000..2c3af05 --- /dev/null +++ b/frontend/src/pages/TickerDetailPage.tsx @@ -0,0 +1,260 @@ +import { useMemo } from 'react'; +import { useParams } from 'react-router-dom'; +import { useMutation, useQueryClient } from '@tanstack/react-query'; +import { useTickerDetail } from '../hooks/useTickerDetail'; +import { CandlestickChart } from '../components/charts/CandlestickChart'; +import { ScoreCard } from '../components/ui/ScoreCard'; +import { SkeletonCard } from '../components/ui/Skeleton'; +import { SentimentPanel } from '../components/ticker/SentimentPanel'; +import { FundamentalsPanel } from '../components/ticker/FundamentalsPanel'; +import { IndicatorSelector } from '../components/ticker/IndicatorSelector'; +import { useToast } from '../components/ui/Toast'; +import { fetchData } from '../api/ingestion'; +import { formatPrice } from '../lib/format'; + +function SectionError({ message, onRetry }: { message: string; onRetry?: () => void }) { + return ( +
+

{message}

+ {onRetry && ( + + )} +
+ ); +} + +function timeAgo(iso: string): string { + const diff = Date.now() - new Date(iso).getTime(); + const mins = Math.floor(diff / 60_000); + if (mins < 1) return 'just now'; + if (mins < 60) return `${mins}m ago`; + const hrs = Math.floor(mins / 60); + if (hrs < 24) return `${hrs}h ago`; + const days = Math.floor(hrs / 24); + return `${days}d ago`; +} + +interface DataStatusItem { + label: string; + available: boolean; + timestamp?: string | null; +} + +function DataFreshnessBar({ items }: { items: DataStatusItem[] }) { + return ( +
+ {items.map((item) => ( +
+ + {item.label} + {item.available && item.timestamp && ( + {timeAgo(item.timestamp)} + )} + {!item.available && ( + no data + )} +
+ ))} +
+ ); +} + +export default function TickerDetailPage() { + const { symbol = '' } = useParams<{ symbol: string }>(); + const { ohlcv, scores, srLevels, sentiment, fundamentals } = useTickerDetail(symbol); + const queryClient = useQueryClient(); + const { addToast } = useToast(); + + const ingestion = useMutation({ + mutationFn: () => fetchData(symbol), + onSuccess: (result: any) => { + // Show per-source status breakdown + const sources = result?.sources; + if (sources) { + const parts: string[] = []; + for (const [name, info] of Object.entries(sources) as [string, any][]) { + const label = name.charAt(0).toUpperCase() + name.slice(1); + if (info.status === 'ok') { + parts.push(`${label} ✓`); + } else if (info.status === 'skipped') { + parts.push(`${label}: skipped (${info.message})`); + } else { + parts.push(`${label} ✗: ${info.message}`); + } + } + const hasError = Object.values(sources).some((s: any) => s.status === 'error'); + const hasSkip = Object.values(sources).some((s: any) => s.status === 'skipped'); + const toastType = hasError ? 'error' : hasSkip ? 'info' : 'success'; + addToast(toastType, parts.join(' · ')); + } else { + addToast('success', `Data fetched for ${symbol.toUpperCase()}`); + } + queryClient.invalidateQueries({ queryKey: ['ohlcv', symbol] }); + queryClient.invalidateQueries({ queryKey: ['sentiment', symbol] }); + queryClient.invalidateQueries({ queryKey: ['fundamentals', symbol] }); + queryClient.invalidateQueries({ queryKey: ['sr-levels', symbol] }); + queryClient.invalidateQueries({ queryKey: ['scores', symbol] }); + }, + onError: (err: Error) => { + addToast('error', err.message || 'Failed to fetch data'); + }, + }); + + const dataStatus: DataStatusItem[] = useMemo(() => [ + { + label: 'OHLCV', + available: !!ohlcv.data && ohlcv.data.length > 0, + timestamp: ohlcv.data?.[ohlcv.data.length - 1]?.created_at, + }, + { + label: 'Sentiment', + available: !!sentiment.data && sentiment.data.count > 0, + timestamp: sentiment.data?.scores?.[0]?.timestamp, + }, + { + label: 'Fundamentals', + available: !!fundamentals.data && fundamentals.data.fetched_at !== null, + timestamp: fundamentals.data?.fetched_at, + }, + { + label: 'S/R Levels', + available: !!srLevels.data && srLevels.data.count > 0, + timestamp: srLevels.data?.levels?.[0]?.created_at, + }, + { + label: 'Scores', + available: !!scores.data && scores.data.composite_score !== null, + timestamp: scores.data?.computed_at, + }, + ], [ohlcv.data, sentiment.data, fundamentals.data, srLevels.data, scores.data]); + + // Sort S/R levels by strength for the table + const sortedLevels = useMemo(() => { + if (!srLevels.data?.levels) return []; + return [...srLevels.data.levels].sort((a, b) => b.strength - a.strength); + }, [srLevels.data]); + + return ( +
+ {/* Header */} +
+
+

{symbol.toUpperCase()}

+

Ticker Detail

+
+ +
+ + {/* Data freshness bar */} + + + {/* Chart Section */} +
+

Price Chart

+ {ohlcv.isLoading && } + {ohlcv.isError && ( + ohlcv.refetch()} + /> + )} + {ohlcv.data && ( +
+ + {srLevels.isError && ( +

S/R levels unavailable — chart shown without overlays

+ )} +
+ )} +
+ + {/* Scores + Side Panels */} +
+
+

Scores

+ {scores.isLoading && } + {scores.isError && ( + scores.refetch()} /> + )} + {scores.data && ( + ({ dimension: d.dimension, score: d.score }))} /> + )} +
+ +
+

Sentiment

+ {sentiment.isLoading && } + {sentiment.isError && ( + sentiment.refetch()} /> + )} + {sentiment.data && } +
+ +
+

Fundamentals

+ {fundamentals.isLoading && } + {fundamentals.isError && ( + fundamentals.refetch()} /> + )} + {fundamentals.data && } +
+
+ + {/* Indicators */} +
+

Technical Indicators

+ +
+ + {/* S/R Levels Table — sorted by strength */} + {sortedLevels.length > 0 && ( +
+

+ Support & Resistance Levels + sorted by strength +

+
+ + + + + + + + + + + {sortedLevels.map((level) => ( + + + + + + + ))} + +
TypePrice LevelStrengthMethod
+ {level.type} + {formatPrice(level.price_level)}{level.strength}{level.detection_method}
+
+
+ )} +
+ ); +} diff --git a/frontend/src/pages/WatchlistPage.tsx b/frontend/src/pages/WatchlistPage.tsx new file mode 100644 index 0000000..ac4d5d7 --- /dev/null +++ b/frontend/src/pages/WatchlistPage.tsx @@ -0,0 +1,30 @@ +import { useWatchlist } from '../hooks/useWatchlist'; +import { WatchlistTable } from '../components/watchlist/WatchlistTable'; +import { AddTickerForm } from '../components/watchlist/AddTickerForm'; +import { SkeletonTable } from '../components/ui/Skeleton'; + +export default function WatchlistPage() { + const { data, isLoading, isError, error } = useWatchlist(); + + return ( +
+
+
+

Watchlist

+

Track your favorite tickers

+
+ +
+ + {isLoading && } + + {isError && ( +
+ {error?.message || 'Failed to load watchlist'} +
+ )} + + {data && } +
+ ); +} diff --git a/frontend/src/stores/authStore.ts b/frontend/src/stores/authStore.ts new file mode 100644 index 0000000..1b61196 --- /dev/null +++ b/frontend/src/stores/authStore.ts @@ -0,0 +1,49 @@ +import { create } from 'zustand'; + +export interface AuthState { + token: string | null; + username: string | null; + role: 'admin' | 'user' | null; + login: (token: string) => void; + logout: () => void; +} + +function decodeJwtPayload(token: string): { sub?: string; role?: string } { + try { + const base64 = token.split('.')[1]; + const json = atob(base64); + return JSON.parse(json); + } catch { + return {}; + } +} + +export const useAuthStore = create()((set) => ({ + token: localStorage.getItem('token'), + username: (() => { + const t = localStorage.getItem('token'); + if (!t) return null; + return decodeJwtPayload(t).sub ?? null; + })(), + role: (() => { + const t = localStorage.getItem('token'); + if (!t) return null; + const r = decodeJwtPayload(t).role; + return r === 'admin' ? 'admin' : r === 'user' ? 'user' : null; + })(), + + login: (token: string) => { + const payload = decodeJwtPayload(token); + localStorage.setItem('token', token); + set({ + token, + username: payload.sub ?? null, + role: payload.role === 'admin' ? 'admin' : 'user', + }); + }, + + logout: () => { + localStorage.removeItem('token'); + set({ token: null, username: null, role: null }); + }, +})); diff --git a/frontend/src/styles/globals.css b/frontend/src/styles/globals.css new file mode 100644 index 0000000..d6a696c --- /dev/null +++ b/frontend/src/styles/globals.css @@ -0,0 +1,134 @@ +@tailwind base; +@tailwind components; +@tailwind utilities; + +@layer base { + body { + background: #0a0e1a; + min-height: 100vh; + } + + /* Mesh gradient background */ + #root { + position: relative; + min-height: 100vh; + } + + #root::before { + content: ''; + position: fixed; + inset: 0; + z-index: -1; + background: + radial-gradient(ellipse 80% 60% at 10% 20%, rgba(56, 189, 248, 0.08) 0%, transparent 60%), + radial-gradient(ellipse 60% 50% at 80% 10%, rgba(139, 92, 246, 0.07) 0%, transparent 50%), + radial-gradient(ellipse 50% 40% at 50% 80%, rgba(16, 185, 129, 0.05) 0%, transparent 50%); + pointer-events: none; + } +} + +@layer components { + /* Glass card — the core building block */ + .glass { + background: rgba(255, 255, 255, 0.04); + backdrop-filter: blur(16px); + -webkit-backdrop-filter: blur(16px); + border: 1px solid rgba(255, 255, 255, 0.07); + border-radius: 1rem; + } + + .glass-sm { + background: rgba(255, 255, 255, 0.03); + backdrop-filter: blur(12px); + -webkit-backdrop-filter: blur(12px); + border: 1px solid rgba(255, 255, 255, 0.06); + border-radius: 0.75rem; + } + + .glass-hover { + transition: all 0.2s ease; + } + .glass-hover:hover { + background: rgba(255, 255, 255, 0.07); + border-color: rgba(255, 255, 255, 0.12); + box-shadow: 0 8px 32px rgba(0, 0, 0, 0.2); + } + + /* Gradient text */ + .text-gradient { + background: linear-gradient(135deg, #38bdf8, #818cf8, #a78bfa); + -webkit-background-clip: text; + -webkit-text-fill-color: transparent; + background-clip: text; + } + + /* Gradient buttons */ + .btn-gradient { + background: linear-gradient(135deg, #3b82f6, #8b5cf6); + color: white; + border: none; + border-radius: 0.5rem; + font-weight: 500; + transition: all 0.2s ease; + position: relative; + overflow: hidden; + } + .btn-gradient::before { + content: ''; + position: absolute; + inset: 0; + background: linear-gradient(135deg, #60a5fa, #a78bfa); + opacity: 0; + transition: opacity 0.2s ease; + } + .btn-gradient:hover::before { + opacity: 1; + } + .btn-gradient > * { + position: relative; + z-index: 1; + } + + /* Glow accent for active states */ + .glow-blue { + box-shadow: 0 0 20px rgba(59, 130, 246, 0.3), 0 0 60px rgba(59, 130, 246, 0.1); + } + + .glow-green { + box-shadow: 0 0 20px rgba(16, 185, 129, 0.3), 0 0 60px rgba(16, 185, 129, 0.1); + } + + /* Glass input */ + .input-glass { + background: rgba(255, 255, 255, 0.04); + border: 1px solid rgba(255, 255, 255, 0.1); + border-radius: 0.5rem; + color: #e2e8f0; + transition: all 0.2s ease; + } + .input-glass::placeholder { + color: rgba(148, 163, 184, 0.5); + } + .input-glass:focus { + outline: none; + border-color: rgba(99, 102, 241, 0.5); + box-shadow: 0 0 0 3px rgba(99, 102, 241, 0.15), 0 0 20px rgba(99, 102, 241, 0.1); + background: rgba(255, 255, 255, 0.06); + } + + /* Scrollbar styling */ + ::-webkit-scrollbar { + width: 6px; + height: 6px; + } + ::-webkit-scrollbar-track { + background: transparent; + } + ::-webkit-scrollbar-thumb { + background: rgba(255, 255, 255, 0.1); + border-radius: 3px; + } + ::-webkit-scrollbar-thumb:hover { + background: rgba(255, 255, 255, 0.2); + } +} diff --git a/frontend/src/vite-env.d.ts b/frontend/src/vite-env.d.ts new file mode 100644 index 0000000..11f02fe --- /dev/null +++ b/frontend/src/vite-env.d.ts @@ -0,0 +1 @@ +/// diff --git a/frontend/tailwind.config.ts b/frontend/tailwind.config.ts new file mode 100644 index 0000000..7f59519 --- /dev/null +++ b/frontend/tailwind.config.ts @@ -0,0 +1,44 @@ +import type { Config } from 'tailwindcss'; + +export default { + content: ['./index.html', './src/**/*.{ts,tsx}'], + darkMode: 'class', + theme: { + extend: { + fontFamily: { + sans: ['Inter', 'system-ui', 'sans-serif'], + }, + colors: { + surface: { + DEFAULT: 'rgba(255, 255, 255, 0.05)', + hover: 'rgba(255, 255, 255, 0.08)', + active: 'rgba(255, 255, 255, 0.12)', + border: 'rgba(255, 255, 255, 0.08)', + }, + }, + backdropBlur: { + glass: '16px', + }, + animation: { + 'glow-pulse': 'glow-pulse 3s ease-in-out infinite', + 'fade-in': 'fade-in 0.3s ease-out', + 'slide-up': 'slide-up 0.4s ease-out', + }, + keyframes: { + 'glow-pulse': { + '0%, 100%': { opacity: '0.4' }, + '50%': { opacity: '0.8' }, + }, + 'fade-in': { + from: { opacity: '0' }, + to: { opacity: '1' }, + }, + 'slide-up': { + from: { opacity: '0', transform: 'translateY(12px)' }, + to: { opacity: '1', transform: 'translateY(0)' }, + }, + }, + }, + }, + plugins: [], +} satisfies Config; diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json new file mode 100644 index 0000000..30f5dc1 --- /dev/null +++ b/frontend/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + "target": "ES2020", + "useDefineForClassFields": true, + "lib": ["ES2020", "DOM", "DOM.Iterable"], + "module": "ESNext", + "skipLibCheck": true, + "moduleResolution": "bundler", + "allowImportingTsExtensions": true, + "isolatedModules": true, + "moduleDetection": "force", + "noEmit": true, + "jsx": "react-jsx", + "strict": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "noFallthroughCasesInSwitch": true, + "forceConsistentCasingInFileNames": true, + "resolveJsonModule": true, + "esModuleInterop": true + }, + "include": ["src"] +} diff --git a/frontend/tsconfig.tsbuildinfo b/frontend/tsconfig.tsbuildinfo new file mode 100644 index 0000000..b4e095a --- /dev/null +++ b/frontend/tsconfig.tsbuildinfo @@ -0,0 +1 @@ +{"root":["./src/app.tsx","./src/main.tsx","./src/vite-env.d.ts","./src/api/admin.ts","./src/api/auth.ts","./src/api/client.ts","./src/api/fundamentals.ts","./src/api/health.ts","./src/api/indicators.ts","./src/api/ingestion.ts","./src/api/ohlcv.ts","./src/api/scores.ts","./src/api/sentiment.ts","./src/api/sr-levels.ts","./src/api/tickers.ts","./src/api/trades.ts","./src/api/watchlist.ts","./src/components/admin/datacleanup.tsx","./src/components/admin/jobcontrols.tsx","./src/components/admin/settingsform.tsx","./src/components/admin/tickermanagement.tsx","./src/components/admin/usertable.tsx","./src/components/auth/protectedroute.tsx","./src/components/charts/candlestickchart.tsx","./src/components/layout/appshell.tsx","./src/components/layout/mobilenav.tsx","./src/components/layout/sidebar.tsx","./src/components/rankings/rankingstable.tsx","./src/components/rankings/weightsform.tsx","./src/components/scanner/tradetable.tsx","./src/components/ticker/fundamentalspanel.tsx","./src/components/ticker/indicatorselector.tsx","./src/components/ticker/sroverlay.tsx","./src/components/ticker/sentimentpanel.tsx","./src/components/ui/badge.tsx","./src/components/ui/confirmdialog.tsx","./src/components/ui/scorecard.tsx","./src/components/ui/skeleton.tsx","./src/components/ui/toast.tsx","./src/components/watchlist/addtickerform.tsx","./src/components/watchlist/watchlisttable.tsx","./src/hooks/useadmin.ts","./src/hooks/useauth.ts","./src/hooks/usescores.ts","./src/hooks/usetickerdetail.ts","./src/hooks/usetickers.ts","./src/hooks/usetrades.ts","./src/hooks/usewatchlist.ts","./src/lib/format.ts","./src/lib/types.ts","./src/pages/adminpage.tsx","./src/pages/loginpage.tsx","./src/pages/rankingspage.tsx","./src/pages/registerpage.tsx","./src/pages/scannerpage.tsx","./src/pages/tickerdetailpage.tsx","./src/pages/watchlistpage.tsx","./src/stores/authstore.ts"],"version":"5.6.3"} \ No newline at end of file diff --git a/frontend/vite.config.ts b/frontend/vite.config.ts new file mode 100644 index 0000000..a50439f --- /dev/null +++ b/frontend/vite.config.ts @@ -0,0 +1,43 @@ +/** + * Production Nginx configuration snippet for signal.thiessen.io: + * + * server { + * listen 443 ssl; + * server_name signal.thiessen.io; + * + * root /path/to/frontend/dist; + * index index.html; + * + * # SPA client-side routing — fallback to index.html + * location / { + * try_files $uri $uri/ /index.html; + * } + * + * # Proxy API requests to FastAPI backend + * location /api/v1/ { + * proxy_pass http://127.0.0.1:8000; + * proxy_set_header Host $host; + * proxy_set_header X-Real-IP $remote_addr; + * proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; + * proxy_set_header X-Forwarded-Proto $scheme; + * } + * } + */ + +import { defineConfig } from 'vite'; +import react from '@vitejs/plugin-react'; + +export default defineConfig({ + plugins: [react()], + build: { + outDir: 'dist', + }, + server: { + proxy: { + '/api/v1/': { + target: 'http://127.0.0.1:8000', + changeOrigin: true, + }, + }, + }, +}); diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..ed93774 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,39 @@ +[build-system] +requires = ["setuptools>=68.0", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "stock-data-backend" +version = "0.1.0" +description = "Investing-signal platform for NASDAQ stocks" +requires-python = ">=3.12" +dependencies = [ + "fastapi>=0.115.0", + "uvicorn[standard]>=0.30.0", + "sqlalchemy[asyncio]>=2.0.0", + "asyncpg>=0.30.0", + "alembic>=1.14.0", + "pydantic-settings>=2.0.0", + "python-jose[cryptography]>=3.3.0", + "passlib[bcrypt]>=1.7.4", + "apscheduler>=3.10.0", + "httpx>=0.27.0", + "alpaca-py>=0.30.0", + "google-genai>=1.0.0", +] + +[project.optional-dependencies] +dev = [ + "pytest>=8.0.0", + "pytest-asyncio>=0.24.0", + "hypothesis>=6.100.0", + "httpx>=0.27.0", + "aiosqlite>=0.20.0", +] + +[tool.setuptools.packages.find] +include = ["app*"] + +[tool.pytest.ini_options] +asyncio_mode = "auto" +testpaths = ["tests"] diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..a4307f0 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,260 @@ +"""Shared test fixtures and hypothesis strategies for the stock-data-backend test suite.""" + +from __future__ import annotations + +import string +from datetime import date, datetime, timedelta, timezone +from typing import Any + +import pytest +from httpx import ASGITransport, AsyncClient +from hypothesis import strategies as st +from sqlalchemy.ext.asyncio import ( + AsyncSession, + async_sessionmaker, + create_async_engine, +) + +from app.database import Base +from app.providers.protocol import OHLCVData + +# --------------------------------------------------------------------------- +# Test database (SQLite in-memory, async via aiosqlite) +# --------------------------------------------------------------------------- + +TEST_DATABASE_URL = "sqlite+aiosqlite://" + +_test_engine = create_async_engine(TEST_DATABASE_URL, echo=False) +_test_session_factory = async_sessionmaker( + _test_engine, + class_=AsyncSession, + expire_on_commit=False, +) + + +@pytest.fixture(autouse=True) +async def _setup_db(): + """Create all tables before each test and drop them after.""" + async with _test_engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + yield + async with _test_engine.begin() as conn: + await conn.run_sync(Base.metadata.drop_all) + + +@pytest.fixture +async def db_session() -> AsyncSession: + """Provide a transactional DB session that rolls back after the test.""" + async with _test_session_factory() as session: + async with session.begin(): + yield session + await session.rollback() + + +# --------------------------------------------------------------------------- +# FastAPI test client +# --------------------------------------------------------------------------- + + +@pytest.fixture +async def client(db_session: AsyncSession) -> AsyncClient: + """Async HTTP test client wired to the FastAPI app with the test DB session.""" + from app.dependencies import get_db + from app.main import app + + async def _override_get_db(): + yield db_session + + app.dependency_overrides[get_db] = _override_get_db + transport = ASGITransport(app=app) + async with AsyncClient(transport=transport, base_url="http://test") as ac: + yield ac + app.dependency_overrides.clear() + + +# --------------------------------------------------------------------------- +# Mock MarketDataProvider +# --------------------------------------------------------------------------- + + +class MockMarketDataProvider: + """Configurable mock that satisfies the MarketDataProvider protocol.""" + + def __init__( + self, + ohlcv_data: list[OHLCVData] | None = None, + error: Exception | None = None, + ) -> None: + self.ohlcv_data = ohlcv_data or [] + self.error = error + self.calls: list[dict[str, Any]] = [] + + async def fetch_ohlcv( + self, ticker: str, start_date: date, end_date: date + ) -> list[OHLCVData]: + self.calls.append( + {"ticker": ticker, "start_date": start_date, "end_date": end_date} + ) + if self.error is not None: + raise self.error + return [r for r in self.ohlcv_data if r.ticker == ticker] + + +@pytest.fixture +def mock_provider() -> MockMarketDataProvider: + """Return a fresh MockMarketDataProvider instance.""" + return MockMarketDataProvider() + + +# --------------------------------------------------------------------------- +# Hypothesis custom strategies +# --------------------------------------------------------------------------- + +_TICKER_ALPHABET = string.ascii_uppercase + string.digits + + +@st.composite +def valid_ticker_symbols(draw: st.DrawFn) -> str: + """Generate uppercase alphanumeric ticker symbols (1-10 chars).""" + return draw( + st.text(alphabet=_TICKER_ALPHABET, min_size=1, max_size=10) + ) + + +@st.composite +def whitespace_strings(draw: st.DrawFn) -> str: + """Generate strings composed entirely of whitespace (including empty).""" + return draw( + st.text(alphabet=" \t\n\r\x0b\x0c", min_size=0, max_size=20) + ) + + +@st.composite +def valid_ohlcv_records(draw: st.DrawFn) -> OHLCVData: + """Generate valid OHLCV records (high >= low, prices >= 0, volume >= 0, date <= today).""" + ticker = draw(valid_ticker_symbols()) + low = draw(st.floats(min_value=0.01, max_value=10000.0, allow_nan=False, allow_infinity=False)) + high = draw(st.floats(min_value=low, max_value=10000.0, allow_nan=False, allow_infinity=False)) + open_ = draw(st.floats(min_value=low, max_value=high, allow_nan=False, allow_infinity=False)) + close = draw(st.floats(min_value=low, max_value=high, allow_nan=False, allow_infinity=False)) + volume = draw(st.integers(min_value=0, max_value=10**12)) + record_date = draw( + st.dates(min_value=date(2000, 1, 1), max_value=date.today()) + ) + return OHLCVData( + ticker=ticker, + date=record_date, + open=open_, + high=high, + low=low, + close=close, + volume=volume, + ) + + +@st.composite +def invalid_ohlcv_records(draw: st.DrawFn) -> OHLCVData: + """Generate OHLCV records that violate at least one constraint.""" + ticker = draw(valid_ticker_symbols()) + violation = draw(st.sampled_from(["high_lt_low", "negative_price", "negative_volume", "future_date"])) + + if violation == "high_lt_low": + high = draw(st.floats(min_value=0.01, max_value=100.0, allow_nan=False, allow_infinity=False)) + low = draw(st.floats(min_value=high + 0.01, max_value=200.0, allow_nan=False, allow_infinity=False)) + return OHLCVData( + ticker=ticker, date=date.today(), + open=high, high=high, low=low, close=high, volume=100, + ) + elif violation == "negative_price": + neg = draw(st.floats(min_value=-10000.0, max_value=-0.01, allow_nan=False, allow_infinity=False)) + return OHLCVData( + ticker=ticker, date=date.today(), + open=neg, high=abs(neg), low=abs(neg), close=abs(neg), volume=100, + ) + elif violation == "negative_volume": + price = draw(st.floats(min_value=0.01, max_value=100.0, allow_nan=False, allow_infinity=False)) + neg_vol = draw(st.integers(min_value=-10**9, max_value=-1)) + return OHLCVData( + ticker=ticker, date=date.today(), + open=price, high=price, low=price, close=price, volume=neg_vol, + ) + else: # future_date + future = date.today() + timedelta(days=draw(st.integers(min_value=1, max_value=365))) + price = draw(st.floats(min_value=0.01, max_value=100.0, allow_nan=False, allow_infinity=False)) + return OHLCVData( + ticker=ticker, date=future, + open=price, high=price, low=price, close=price, volume=100, + ) + + +_DIMENSIONS = ["technical", "sr_quality", "sentiment", "fundamental", "momentum"] + + +@st.composite +def dimension_scores(draw: st.DrawFn) -> float: + """Generate float values in [0, 100] for dimension scores.""" + return draw(st.floats(min_value=0.0, max_value=100.0, allow_nan=False, allow_infinity=False)) + + +@st.composite +def weight_configs(draw: st.DrawFn) -> dict[str, float]: + """Generate dicts of dimension → positive float weight.""" + dims = draw(st.lists(st.sampled_from(_DIMENSIONS), min_size=1, max_size=5, unique=True)) + weights: dict[str, float] = {} + for dim in dims: + weights[dim] = draw(st.floats(min_value=0.01, max_value=10.0, allow_nan=False, allow_infinity=False)) + return weights + + +@st.composite +def sr_levels(draw: st.DrawFn) -> dict[str, Any]: + """Generate SR level data (price, type, strength, detection_method).""" + return { + "price_level": draw(st.floats(min_value=0.01, max_value=10000.0, allow_nan=False, allow_infinity=False)), + "type": draw(st.sampled_from(["support", "resistance"])), + "strength": draw(st.integers(min_value=0, max_value=100)), + "detection_method": draw(st.sampled_from(["volume_profile", "pivot_point", "merged"])), + } + + +@st.composite +def sentiment_scores(draw: st.DrawFn) -> dict[str, Any]: + """Generate sentiment data (classification, confidence, source, timestamp).""" + naive_dt = draw( + st.datetimes( + min_value=datetime(2020, 1, 1), + max_value=datetime.now(), + ) + ) + return { + "classification": draw(st.sampled_from(["bullish", "bearish", "neutral"])), + "confidence": draw(st.integers(min_value=0, max_value=100)), + "source": draw(st.text(alphabet=string.ascii_lowercase, min_size=3, max_size=20)), + "timestamp": naive_dt.replace(tzinfo=timezone.utc), + } + + +@st.composite +def trade_setups(draw: st.DrawFn) -> dict[str, Any]: + """Generate trade setup data (direction, entry, stop, target, rr_ratio, composite_score).""" + direction = draw(st.sampled_from(["long", "short"])) + entry = draw(st.floats(min_value=1.0, max_value=10000.0, allow_nan=False, allow_infinity=False)) + atr_dist = draw(st.floats(min_value=0.01, max_value=entry * 0.2, allow_nan=False, allow_infinity=False)) + + if direction == "long": + stop = entry - atr_dist + target = entry + atr_dist * draw(st.floats(min_value=3.0, max_value=10.0, allow_nan=False, allow_infinity=False)) + else: + stop = entry + atr_dist + target = entry - atr_dist * draw(st.floats(min_value=3.0, max_value=10.0, allow_nan=False, allow_infinity=False)) + + rr_ratio = abs(target - entry) / abs(entry - stop) if abs(entry - stop) > 0 else 0.0 + + return { + "direction": direction, + "entry_price": entry, + "stop_loss": stop, + "target": target, + "rr_ratio": rr_ratio, + "composite_score": draw(st.floats(min_value=0.0, max_value=100.0, allow_nan=False, allow_infinity=False)), + } diff --git a/tests/property/__init__.py b/tests/property/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/property/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1 @@ + diff --git a/tests/unit/test_cache.py b/tests/unit/test_cache.py new file mode 100644 index 0000000..c667bb3 --- /dev/null +++ b/tests/unit/test_cache.py @@ -0,0 +1,102 @@ +"""Unit tests for app.cache LRU cache wrapper.""" + +from datetime import date + +from app.cache import LRUCache + + +def _key(ticker: str, indicator: str = "RSI") -> tuple: + return (ticker, date(2024, 1, 1), date(2024, 6, 1), indicator) + + +class TestLRUCacheBasics: + def test_get_miss_returns_none(self): + cache = LRUCache() + assert cache.get(_key("AAPL")) is None + + def test_set_and_get_round_trip(self): + cache = LRUCache() + cache.set(_key("AAPL"), {"score": 72}) + assert cache.get(_key("AAPL")) == {"score": 72} + + def test_set_overwrites_existing(self): + cache = LRUCache() + cache.set(_key("AAPL"), 1) + cache.set(_key("AAPL"), 2) + assert cache.get(_key("AAPL")) == 2 + assert cache.size == 1 + + def test_size_and_clear(self): + cache = LRUCache(max_size=5) + for i in range(3): + cache.set((f"T{i}", None, None, "RSI"), i) + assert cache.size == 3 + cache.clear() + assert cache.size == 0 + + +class TestLRUEviction: + def test_evicts_lru_when_full(self): + cache = LRUCache(max_size=3) + cache.set(_key("A"), 1) + cache.set(_key("B"), 2) + cache.set(_key("C"), 3) + # A is LRU — inserting D should evict A + cache.set(_key("D"), 4) + assert cache.get(_key("A")) is None + assert cache.size == 3 + + def test_access_promotes_entry(self): + cache = LRUCache(max_size=3) + cache.set(_key("A"), 1) + cache.set(_key("B"), 2) + cache.set(_key("C"), 3) + # Access A so B becomes LRU + cache.get(_key("A")) + cache.set(_key("D"), 4) + assert cache.get(_key("B")) is None + assert cache.get(_key("A")) == 1 + + def test_update_promotes_entry(self): + cache = LRUCache(max_size=3) + cache.set(_key("A"), 1) + cache.set(_key("B"), 2) + cache.set(_key("C"), 3) + # Update A so B becomes LRU + cache.set(_key("A"), 10) + cache.set(_key("D"), 4) + assert cache.get(_key("B")) is None + assert cache.get(_key("A")) == 10 + + +class TestTickerInvalidation: + def test_invalidate_removes_all_entries_for_ticker(self): + cache = LRUCache() + cache.set(("AAPL", date(2024, 1, 1), date(2024, 6, 1), "RSI"), 1) + cache.set(("AAPL", date(2024, 1, 1), date(2024, 6, 1), "ADX"), 2) + cache.set(("MSFT", date(2024, 1, 1), date(2024, 6, 1), "RSI"), 3) + removed = cache.invalidate_ticker("AAPL") + assert removed == 2 + assert cache.get(("AAPL", date(2024, 1, 1), date(2024, 6, 1), "RSI")) is None + assert cache.get(("AAPL", date(2024, 1, 1), date(2024, 6, 1), "ADX")) is None + assert cache.get(("MSFT", date(2024, 1, 1), date(2024, 6, 1), "RSI")) == 3 + + def test_invalidate_nonexistent_ticker_returns_zero(self): + cache = LRUCache() + cache.set(_key("AAPL"), 1) + assert cache.invalidate_ticker("GOOG") == 0 + assert cache.size == 1 + + def test_invalidate_on_empty_cache(self): + cache = LRUCache() + assert cache.invalidate_ticker("AAPL") == 0 + + +class TestMaxSizeProperty: + def test_default_max_size(self): + cache = LRUCache() + assert cache.max_size == 1000 + + def test_custom_max_size(self): + cache = LRUCache(max_size=50) + assert cache.max_size == 50 diff --git a/tests/unit/test_exceptions_and_middleware.py b/tests/unit/test_exceptions_and_middleware.py new file mode 100644 index 0000000..3bb8a0d --- /dev/null +++ b/tests/unit/test_exceptions_and_middleware.py @@ -0,0 +1,201 @@ +"""Tests for the exception hierarchy and global exception handlers.""" + +import pytest +from fastapi import FastAPI +from fastapi.testclient import TestClient + +from app.exceptions import ( + AppError, + AuthenticationError, + AuthorizationError, + DuplicateError, + NotFoundError, + ProviderError, + RateLimitError, + ValidationError, +) +from app.middleware import register_exception_handlers +from app.schemas.common import APIEnvelope + + +# ── Exception hierarchy tests ── + + +def test_app_error_defaults(): + err = AppError() + assert err.status_code == 500 + assert err.message == "Internal server error" + assert str(err) == "Internal server error" + + +def test_app_error_custom_message(): + err = AppError("something broke") + assert err.message == "something broke" + assert str(err) == "something broke" + + +@pytest.mark.parametrize( + "cls,code,default_msg", + [ + (ValidationError, 400, "Validation error"), + (NotFoundError, 404, "Resource not found"), + (DuplicateError, 409, "Resource already exists"), + (AuthenticationError, 401, "Authentication required"), + (AuthorizationError, 403, "Insufficient permissions"), + (ProviderError, 502, "Market data provider unavailable"), + (RateLimitError, 429, "Rate limited"), + ], +) +def test_subclass_defaults(cls, code, default_msg): + err = cls() + assert err.status_code == code + assert err.message == default_msg + + +def test_subclass_custom_message(): + err = NotFoundError("Ticker not found: AAPL") + assert err.status_code == 404 + assert err.message == "Ticker not found: AAPL" + + +def test_all_subclasses_are_app_errors(): + for cls in ( + ValidationError, + NotFoundError, + DuplicateError, + AuthenticationError, + AuthorizationError, + ProviderError, + RateLimitError, + ): + assert issubclass(cls, AppError) + + +# ── APIEnvelope schema tests ── + + +def test_envelope_success(): + env = APIEnvelope(status="success", data={"id": 1}) + assert env.status == "success" + assert env.data == {"id": 1} + assert env.error is None + + +def test_envelope_error(): + env = APIEnvelope(status="error", error="bad request") + assert env.status == "error" + assert env.data is None + assert env.error == "bad request" + + +# ── Middleware integration tests ── + + +def _make_app() -> FastAPI: + """Create a minimal FastAPI app with exception handlers and test routes.""" + app = FastAPI() + register_exception_handlers(app) + + @app.get("/raise-not-found") + async def _raise_not_found(): + raise NotFoundError("Ticker not found: XYZ") + + @app.get("/raise-validation") + async def _raise_validation(): + raise ValidationError("high < low") + + @app.get("/raise-duplicate") + async def _raise_duplicate(): + raise DuplicateError("Ticker already exists: AAPL") + + @app.get("/raise-auth") + async def _raise_auth(): + raise AuthenticationError() + + @app.get("/raise-authz") + async def _raise_authz(): + raise AuthorizationError() + + @app.get("/raise-provider") + async def _raise_provider(): + raise ProviderError() + + @app.get("/raise-rate-limit") + async def _raise_rate_limit(): + raise RateLimitError("Rate limited. Ingested 42 records. Resume available.") + + @app.get("/raise-unhandled") + async def _raise_unhandled(): + raise RuntimeError("unexpected") + + return app + + +@pytest.fixture +def client(): + return TestClient(_make_app()) + + +def test_middleware_not_found(client): + resp = client.get("/raise-not-found") + assert resp.status_code == 404 + body = resp.json() + assert body["status"] == "error" + assert body["data"] is None + assert body["error"] == "Ticker not found: XYZ" + + +def test_middleware_validation(client): + resp = client.get("/raise-validation") + assert resp.status_code == 400 + body = resp.json() + assert body["status"] == "error" + assert body["error"] == "high < low" + + +def test_middleware_duplicate(client): + resp = client.get("/raise-duplicate") + assert resp.status_code == 409 + body = resp.json() + assert body["status"] == "error" + assert "already exists" in body["error"] + + +def test_middleware_authentication(client): + resp = client.get("/raise-auth") + assert resp.status_code == 401 + body = resp.json() + assert body["status"] == "error" + + +def test_middleware_authorization(client): + resp = client.get("/raise-authz") + assert resp.status_code == 403 + body = resp.json() + assert body["status"] == "error" + + +def test_middleware_provider_error(client): + resp = client.get("/raise-provider") + assert resp.status_code == 502 + body = resp.json() + assert body["status"] == "error" + + +def test_middleware_rate_limit(client): + resp = client.get("/raise-rate-limit") + assert resp.status_code == 429 + body = resp.json() + assert body["status"] == "error" + assert "42 records" in body["error"] + + +def test_middleware_unhandled_exception(): + app = _make_app() + with TestClient(app, raise_server_exceptions=False) as c: + resp = c.get("/raise-unhandled") + assert resp.status_code == 500 + body = resp.json() + assert body["status"] == "error" + assert body["data"] is None + assert body["error"] == "Internal server error" diff --git a/tests/unit/test_indicator_service.py b/tests/unit/test_indicator_service.py new file mode 100644 index 0000000..8c0bad5 --- /dev/null +++ b/tests/unit/test_indicator_service.py @@ -0,0 +1,205 @@ +"""Unit tests for app.services.indicator_service pure computation functions.""" + +import pytest + +from app.exceptions import ValidationError +from app.services.indicator_service import ( + compute_adx, + compute_atr, + compute_ema, + compute_ema_cross, + compute_pivot_points, + compute_rsi, + compute_volume_profile, +) + + +# --------------------------------------------------------------------------- +# Helpers: generate synthetic OHLCV data +# --------------------------------------------------------------------------- + +def _rising_closes(n: int, start: float = 100.0, step: float = 1.0) -> list[float]: + return [start + i * step for i in range(n)] + + +def _flat_closes(n: int, price: float = 100.0) -> list[float]: + return [price] * n + + +def _ohlcv_from_closes(closes: list[float], spread: float = 2.0): + """Generate highs/lows/volumes from a close series.""" + highs = [c + spread for c in closes] + lows = [c - spread for c in closes] + volumes = [1000] * len(closes) + return highs, lows, closes, volumes + + +# --------------------------------------------------------------------------- +# EMA +# --------------------------------------------------------------------------- + +class TestComputeEMA: + def test_basic_ema(self): + closes = _rising_closes(25) + result = compute_ema(closes, period=20) + assert "ema" in result + assert "score" in result + assert 0 <= result["score"] <= 100 + + def test_insufficient_data_raises(self): + closes = _rising_closes(5) + with pytest.raises(ValidationError, match="EMA.*requires at least"): + compute_ema(closes, period=20) + + def test_price_above_ema_high_score(self): + # Rising prices → latest close above EMA → score > 50 + closes = _rising_closes(30, start=100, step=2) + result = compute_ema(closes, period=20) + assert result["score"] > 50 + + def test_price_below_ema_low_score(self): + # Falling prices → latest close below EMA → score < 50 + closes = list(reversed(_rising_closes(30, start=100, step=2))) + result = compute_ema(closes, period=20) + assert result["score"] < 50 + + +# --------------------------------------------------------------------------- +# RSI +# --------------------------------------------------------------------------- + +class TestComputeRSI: + def test_basic_rsi(self): + closes = _rising_closes(20) + result = compute_rsi(closes) + assert "rsi" in result + assert 0 <= result["score"] <= 100 + + def test_all_gains_rsi_100(self): + closes = _rising_closes(20, step=1) + result = compute_rsi(closes) + assert result["rsi"] == 100.0 + + def test_all_losses_rsi_0(self): + closes = list(reversed(_rising_closes(20, step=1))) + result = compute_rsi(closes) + assert result["rsi"] == pytest.approx(0.0, abs=0.5) + + def test_insufficient_data_raises(self): + with pytest.raises(ValidationError, match="RSI requires"): + compute_rsi([100.0] * 5) + + +# --------------------------------------------------------------------------- +# ATR +# --------------------------------------------------------------------------- + +class TestComputeATR: + def test_basic_atr(self): + closes = _rising_closes(20) + highs, lows, _, _ = _ohlcv_from_closes(closes) + result = compute_atr(highs, lows, closes) + assert "atr" in result + assert result["atr"] > 0 + assert 0 <= result["score"] <= 100 + + def test_insufficient_data_raises(self): + closes = [100.0] * 5 + highs, lows, _, _ = _ohlcv_from_closes(closes) + with pytest.raises(ValidationError, match="ATR requires"): + compute_atr(highs, lows, closes) + + +# --------------------------------------------------------------------------- +# ADX +# --------------------------------------------------------------------------- + +class TestComputeADX: + def test_basic_adx(self): + closes = _rising_closes(30) + highs, lows, _, _ = _ohlcv_from_closes(closes) + result = compute_adx(highs, lows, closes) + assert "adx" in result + assert "plus_di" in result + assert "minus_di" in result + assert 0 <= result["score"] <= 100 + + def test_insufficient_data_raises(self): + closes = _rising_closes(10) + highs, lows, _, _ = _ohlcv_from_closes(closes) + with pytest.raises(ValidationError, match="ADX requires"): + compute_adx(highs, lows, closes) + + +# --------------------------------------------------------------------------- +# Volume Profile +# --------------------------------------------------------------------------- + +class TestComputeVolumeProfile: + def test_basic_volume_profile(self): + closes = _rising_closes(25) + highs, lows, _, volumes = _ohlcv_from_closes(closes) + result = compute_volume_profile(highs, lows, closes, volumes) + assert "poc" in result + assert "value_area_low" in result + assert "value_area_high" in result + assert "hvn" in result + assert "lvn" in result + assert 0 <= result["score"] <= 100 + + def test_insufficient_data_raises(self): + closes = [100.0] * 10 + highs, lows, _, volumes = _ohlcv_from_closes(closes) + with pytest.raises(ValidationError, match="Volume Profile requires"): + compute_volume_profile(highs, lows, closes, volumes) + + +# --------------------------------------------------------------------------- +# Pivot Points +# --------------------------------------------------------------------------- + +class TestComputePivotPoints: + def test_basic_pivot_points(self): + # Create data with clear swing highs/lows + closes = [10, 15, 20, 15, 10, 15, 20, 15, 10, 15] + highs = [c + 1 for c in closes] + lows = [c - 1 for c in closes] + result = compute_pivot_points(highs, lows, closes) + assert "swing_highs" in result + assert "swing_lows" in result + assert 0 <= result["score"] <= 100 + + def test_insufficient_data_raises(self): + with pytest.raises(ValidationError, match="Pivot Points requires"): + compute_pivot_points([1, 2], [0, 1], [0.5, 1.5]) + + +# --------------------------------------------------------------------------- +# EMA Cross +# --------------------------------------------------------------------------- + +class TestComputeEMACross: + def test_bullish_signal(self): + # Rising prices → short EMA > long EMA → bullish + closes = _rising_closes(60, step=2) + result = compute_ema_cross(closes, short_period=20, long_period=50) + assert result["signal"] == "bullish" + assert result["short_ema"] > result["long_ema"] + + def test_bearish_signal(self): + # Falling prices → short EMA < long EMA → bearish + closes = list(reversed(_rising_closes(60, step=2))) + result = compute_ema_cross(closes, short_period=20, long_period=50) + assert result["signal"] == "bearish" + assert result["short_ema"] < result["long_ema"] + + def test_neutral_signal(self): + # Flat prices → EMAs converge → neutral + closes = _flat_closes(60) + result = compute_ema_cross(closes, short_period=20, long_period=50) + assert result["signal"] == "neutral" + + def test_insufficient_data_raises(self): + closes = _rising_closes(30) + with pytest.raises(ValidationError, match="EMA Cross requires"): + compute_ema_cross(closes, short_period=20, long_period=50) diff --git a/tests/unit/test_scheduler.py b/tests/unit/test_scheduler.py new file mode 100644 index 0000000..6c1e8ad --- /dev/null +++ b/tests/unit/test_scheduler.py @@ -0,0 +1,95 @@ +"""Unit tests for app.scheduler module.""" + +import pytest + +from app.scheduler import ( + _is_job_enabled, + _parse_frequency, + _resume_tickers, + _last_successful, + configure_scheduler, + scheduler, +) + + +class TestParseFrequency: + def test_hourly(self): + assert _parse_frequency("hourly") == {"hours": 1} + + def test_daily(self): + assert _parse_frequency("daily") == {"hours": 24} + + def test_case_insensitive(self): + assert _parse_frequency("Hourly") == {"hours": 1} + assert _parse_frequency("DAILY") == {"hours": 24} + + def test_unknown_defaults_to_daily(self): + assert _parse_frequency("weekly") == {"hours": 24} + assert _parse_frequency("") == {"hours": 24} + + +class TestResumeTickers: + def test_no_previous_returns_full_list(self): + symbols = ["AAPL", "GOOG", "MSFT"] + _last_successful["test_job"] = None + result = _resume_tickers(symbols, "test_job") + assert result == ["AAPL", "GOOG", "MSFT"] + + def test_resume_after_first(self): + symbols = ["AAPL", "GOOG", "MSFT"] + _last_successful["test_job"] = "AAPL" + result = _resume_tickers(symbols, "test_job") + # Should start from GOOG, then wrap around + assert result == ["GOOG", "MSFT", "AAPL"] + + def test_resume_after_middle(self): + symbols = ["AAPL", "GOOG", "MSFT", "TSLA"] + _last_successful["test_job"] = "GOOG" + result = _resume_tickers(symbols, "test_job") + assert result == ["MSFT", "TSLA", "AAPL", "GOOG"] + + def test_resume_after_last(self): + symbols = ["AAPL", "GOOG", "MSFT"] + _last_successful["test_job"] = "MSFT" + result = _resume_tickers(symbols, "test_job") + # All already processed, wraps to full list + assert result == ["AAPL", "GOOG", "MSFT"] + + def test_unknown_last_returns_full_list(self): + symbols = ["AAPL", "GOOG", "MSFT"] + _last_successful["test_job"] = "NVDA" + result = _resume_tickers(symbols, "test_job") + assert result == ["AAPL", "GOOG", "MSFT"] + + def test_empty_list(self): + _last_successful["test_job"] = "AAPL" + result = _resume_tickers([], "test_job") + assert result == [] + + +class TestConfigureScheduler: + def test_configure_adds_four_jobs(self): + # Remove any existing jobs first + scheduler.remove_all_jobs() + configure_scheduler() + jobs = scheduler.get_jobs() + job_ids = {j.id for j in jobs} + assert job_ids == { + "data_collector", + "sentiment_collector", + "fundamental_collector", + "rr_scanner", + } + + def test_configure_is_idempotent(self): + scheduler.remove_all_jobs() + configure_scheduler() + configure_scheduler() # Should replace, not duplicate + job_ids = [j.id for j in scheduler.get_jobs()] + # Each ID should appear exactly once + assert sorted(job_ids) == sorted([ + "data_collector", + "fundamental_collector", + "rr_scanner", + "sentiment_collector", + ])