first commit
Some checks failed
Deploy / lint (push) Failing after 7s
Deploy / test (push) Has been skipped
Deploy / deploy (push) Has been skipped

This commit is contained in:
Dennis Thiessen
2026-02-20 17:31:01 +01:00
commit 61ab24490d
160 changed files with 17034 additions and 0 deletions

67
alembic/env.py Normal file
View File

@@ -0,0 +1,67 @@
import asyncio
from logging.config import fileConfig
from alembic import context
from sqlalchemy import pool
from sqlalchemy.ext.asyncio import async_engine_from_config
from app.config import settings
from app.database import Base
# Import all models so they register with Base.metadata
import app.models # noqa: F401
config = context.config
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# Override sqlalchemy.url with the app's database URL
config.set_main_option("sqlalchemy.url", settings.database_url)
target_metadata = Base.metadata
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode."""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection):
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in 'online' mode with async engine."""
connectable = async_engine_from_config(
config.get_section(config.config_ini_section, {}),
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

26
alembic/script.py.mako Normal file
View File

@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}
def upgrade() -> None:
${upgrades if upgrades else "pass"}
def downgrade() -> None:
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,180 @@
"""initial_schema
Revision ID: 001
Revises:
Create Date: 2025-01-01 00:00:00.000000
"""
from typing import Sequence, Union
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision: str = "001"
down_revision: Union[str, None] = None
branch_labels: Union[str, Sequence[str], None] = None
depends_on: Union[str, Sequence[str], None] = None
def upgrade() -> None:
# Independent tables (no foreign keys)
op.create_table(
"system_settings",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("key", sa.String(length=100), nullable=False),
sa.Column("value", sa.Text(), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("key"),
)
op.create_table(
"tickers",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("symbol", sa.String(length=10), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("symbol"),
)
op.create_table(
"users",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("username", sa.String(length=100), nullable=False),
sa.Column("password_hash", sa.String(length=255), nullable=False),
sa.Column("role", sa.String(length=20), nullable=False),
sa.Column("has_access", sa.Boolean(), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("username"),
)
# Tables with FK to tickers
op.create_table(
"composite_scores",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("ticker_id", sa.Integer(), nullable=False),
sa.Column("score", sa.Float(), nullable=False),
sa.Column("is_stale", sa.Boolean(), nullable=False),
sa.Column("weights_json", sa.Text(), nullable=False),
sa.Column("computed_at", sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"dimension_scores",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("ticker_id", sa.Integer(), nullable=False),
sa.Column("dimension", sa.String(length=50), nullable=False),
sa.Column("score", sa.Float(), nullable=False),
sa.Column("is_stale", sa.Boolean(), nullable=False),
sa.Column("computed_at", sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"fundamental_data",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("ticker_id", sa.Integer(), nullable=False),
sa.Column("pe_ratio", sa.Float(), nullable=True),
sa.Column("revenue_growth", sa.Float(), nullable=True),
sa.Column("earnings_surprise", sa.Float(), nullable=True),
sa.Column("market_cap", sa.Float(), nullable=True),
sa.Column("fetched_at", sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"ingestion_progress",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("ticker_id", sa.Integer(), nullable=False),
sa.Column("last_ingested_date", sa.Date(), nullable=False),
sa.Column("updated_at", sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("ticker_id", name="uq_ingestion_progress_ticker"),
)
op.create_table(
"ohlcv_records",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("ticker_id", sa.Integer(), nullable=False),
sa.Column("date", sa.Date(), nullable=False),
sa.Column("open", sa.Float(), nullable=False),
sa.Column("high", sa.Float(), nullable=False),
sa.Column("low", sa.Float(), nullable=False),
sa.Column("close", sa.Float(), nullable=False),
sa.Column("volume", sa.BigInteger(), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("ticker_id", "date", name="uq_ohlcv_ticker_date"),
)
op.create_index("ix_ohlcv_ticker_date", "ohlcv_records", ["ticker_id", "date"], unique=False)
op.create_table(
"sentiment_scores",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("ticker_id", sa.Integer(), nullable=False),
sa.Column("classification", sa.String(length=20), nullable=False),
sa.Column("confidence", sa.Integer(), nullable=False),
sa.Column("source", sa.String(length=100), nullable=False),
sa.Column("timestamp", sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"sr_levels",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("ticker_id", sa.Integer(), nullable=False),
sa.Column("price_level", sa.Float(), nullable=False),
sa.Column("type", sa.String(length=20), nullable=False),
sa.Column("strength", sa.Integer(), nullable=False),
sa.Column("detection_method", sa.String(length=50), nullable=False),
sa.Column("created_at", sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"trade_setups",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("ticker_id", sa.Integer(), nullable=False),
sa.Column("direction", sa.String(length=10), nullable=False),
sa.Column("entry_price", sa.Float(), nullable=False),
sa.Column("stop_loss", sa.Float(), nullable=False),
sa.Column("target", sa.Float(), nullable=False),
sa.Column("rr_ratio", sa.Float(), nullable=False),
sa.Column("composite_score", sa.Float(), nullable=False),
sa.Column("detected_at", sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
)
# Table with FKs to both users and tickers
op.create_table(
"watchlist_entries",
sa.Column("id", sa.Integer(), nullable=False),
sa.Column("user_id", sa.Integer(), nullable=False),
sa.Column("ticker_id", sa.Integer(), nullable=False),
sa.Column("entry_type", sa.String(length=10), nullable=False),
sa.Column("added_at", sa.DateTime(timezone=True), nullable=False),
sa.ForeignKeyConstraint(["ticker_id"], ["tickers.id"], ondelete="CASCADE"),
sa.ForeignKeyConstraint(["user_id"], ["users.id"], ondelete="CASCADE"),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("user_id", "ticker_id", name="uq_watchlist_user_ticker"),
)
def downgrade() -> None:
op.drop_table("watchlist_entries")
op.drop_table("trade_setups")
op.drop_table("sr_levels")
op.drop_table("sentiment_scores")
op.drop_index("ix_ohlcv_ticker_date", table_name="ohlcv_records")
op.drop_table("ohlcv_records")
op.drop_table("ingestion_progress")
op.drop_table("fundamental_data")
op.drop_table("dimension_scores")
op.drop_table("composite_scores")
op.drop_table("users")
op.drop_table("tickers")
op.drop_table("system_settings")