390 lines
12 KiB
Python
390 lines
12 KiB
Python
"""S/R Detector service.
|
|
|
|
Detects support/resistance levels from Volume Profile (HVN/LVN) and
|
|
Pivot Points (swing highs/lows), assigns strength scores, merges nearby
|
|
levels, tags as support/resistance, and persists to DB.
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
from datetime import datetime
|
|
|
|
from sqlalchemy import delete, select
|
|
from sqlalchemy.ext.asyncio import AsyncSession
|
|
|
|
from app.exceptions import NotFoundError, ValidationError
|
|
from app.models.sr_level import SRLevel
|
|
from app.models.ticker import Ticker
|
|
from app.services.indicator_service import (
|
|
_extract_ohlcv,
|
|
compute_pivot_points,
|
|
compute_volume_profile,
|
|
)
|
|
from app.services.price_service import query_ohlcv
|
|
|
|
DEFAULT_TOLERANCE = 0.005 # 0.5%
|
|
|
|
|
|
async def _get_ticker(db: AsyncSession, symbol: str) -> Ticker:
|
|
"""Look up a ticker by symbol."""
|
|
normalised = symbol.strip().upper()
|
|
result = await db.execute(select(Ticker).where(Ticker.symbol == normalised))
|
|
ticker = result.scalar_one_or_none()
|
|
if ticker is None:
|
|
raise NotFoundError(f"Ticker not found: {normalised}")
|
|
return ticker
|
|
|
|
|
|
def _count_price_touches(
|
|
price_level: float,
|
|
highs: list[float],
|
|
lows: list[float],
|
|
closes: list[float],
|
|
tolerance: float = DEFAULT_TOLERANCE,
|
|
) -> int:
|
|
"""Count how many bars touched/respected a price level within tolerance."""
|
|
count = 0
|
|
tol = price_level * tolerance if price_level != 0 else tolerance
|
|
for i in range(len(closes)):
|
|
# A bar "touches" the level if the level is within the bar's range
|
|
# (within tolerance)
|
|
if lows[i] - tol <= price_level <= highs[i] + tol:
|
|
count += 1
|
|
return count
|
|
|
|
|
|
def _strength_from_touches(touches: int, total_bars: int) -> int:
|
|
"""Convert touch count to a 0-100 strength score.
|
|
|
|
More touches relative to total bars = higher strength.
|
|
Cap at 100.
|
|
"""
|
|
if total_bars == 0:
|
|
return 0
|
|
# Scale: each touch contributes proportionally, with a multiplier
|
|
# so that a level touched ~20% of bars gets score ~100
|
|
raw = (touches / total_bars) * 500.0
|
|
return max(0, min(100, int(round(raw))))
|
|
|
|
|
|
def _extract_candidate_levels(
|
|
highs: list[float],
|
|
lows: list[float],
|
|
closes: list[float],
|
|
volumes: list[int],
|
|
) -> list[tuple[float, str]]:
|
|
"""Extract candidate S/R levels from Volume Profile and Pivot Points.
|
|
|
|
Returns list of (price_level, detection_method) tuples.
|
|
"""
|
|
candidates: list[tuple[float, str]] = []
|
|
|
|
# Volume Profile: HVN and LVN as candidate levels
|
|
try:
|
|
vp = compute_volume_profile(highs, lows, closes, volumes)
|
|
for price in vp.get("hvn", []):
|
|
candidates.append((price, "volume_profile"))
|
|
for price in vp.get("lvn", []):
|
|
candidates.append((price, "volume_profile"))
|
|
except ValidationError:
|
|
pass # Not enough data for volume profile
|
|
|
|
# Pivot Points: swing highs and lows
|
|
try:
|
|
pp = compute_pivot_points(highs, lows, closes)
|
|
for price in pp.get("swing_highs", []):
|
|
candidates.append((price, "pivot_point"))
|
|
for price in pp.get("swing_lows", []):
|
|
candidates.append((price, "pivot_point"))
|
|
except ValidationError:
|
|
pass # Not enough data for pivot points
|
|
|
|
return candidates
|
|
|
|
|
|
def _merge_levels(
|
|
levels: list[dict],
|
|
tolerance: float = DEFAULT_TOLERANCE,
|
|
) -> list[dict]:
|
|
"""Merge levels within tolerance into consolidated levels.
|
|
|
|
Levels from different methods within tolerance are merged.
|
|
Merged levels combine strength scores (capped at 100) and get
|
|
detection_method = "merged".
|
|
"""
|
|
if not levels:
|
|
return []
|
|
|
|
# Sort by price
|
|
sorted_levels = sorted(levels, key=lambda x: x["price_level"])
|
|
merged: list[dict] = []
|
|
|
|
for level in sorted_levels:
|
|
if not merged:
|
|
merged.append(dict(level))
|
|
continue
|
|
|
|
last = merged[-1]
|
|
ref_price = last["price_level"]
|
|
tol = ref_price * tolerance if ref_price != 0 else tolerance
|
|
|
|
if abs(level["price_level"] - ref_price) <= tol:
|
|
# Merge: average price, combine strength, mark as merged
|
|
combined_strength = min(100, last["strength"] + level["strength"])
|
|
avg_price = (last["price_level"] + level["price_level"]) / 2.0
|
|
method = (
|
|
"merged"
|
|
if last["detection_method"] != level["detection_method"]
|
|
else last["detection_method"]
|
|
)
|
|
last["price_level"] = round(avg_price, 4)
|
|
last["strength"] = combined_strength
|
|
last["detection_method"] = method
|
|
else:
|
|
merged.append(dict(level))
|
|
|
|
return merged
|
|
|
|
|
|
def _tag_levels(
|
|
levels: list[dict],
|
|
current_price: float,
|
|
) -> list[dict]:
|
|
"""Tag each level as 'support' or 'resistance' relative to current price."""
|
|
for level in levels:
|
|
if level["price_level"] < current_price:
|
|
level["type"] = "support"
|
|
else:
|
|
level["type"] = "resistance"
|
|
return levels
|
|
|
|
|
|
def detect_sr_levels(
|
|
highs: list[float],
|
|
lows: list[float],
|
|
closes: list[float],
|
|
volumes: list[int],
|
|
tolerance: float = DEFAULT_TOLERANCE,
|
|
) -> list[dict]:
|
|
"""Detect, score, merge, and tag S/R levels from OHLCV data.
|
|
|
|
Returns list of dicts with keys: price_level, type, strength,
|
|
detection_method — sorted by strength descending.
|
|
"""
|
|
if not closes:
|
|
return []
|
|
|
|
candidates = _extract_candidate_levels(highs, lows, closes, volumes)
|
|
if not candidates:
|
|
return []
|
|
|
|
total_bars = len(closes)
|
|
current_price = closes[-1]
|
|
|
|
# Build level dicts with strength scores
|
|
raw_levels: list[dict] = []
|
|
for price, method in candidates:
|
|
touches = _count_price_touches(price, highs, lows, closes, tolerance)
|
|
strength = _strength_from_touches(touches, total_bars)
|
|
raw_levels.append({
|
|
"price_level": price,
|
|
"strength": strength,
|
|
"detection_method": method,
|
|
"type": "", # will be tagged after merge
|
|
})
|
|
|
|
# Merge nearby levels
|
|
merged = _merge_levels(raw_levels, tolerance)
|
|
|
|
# Tag as support/resistance
|
|
tagged = _tag_levels(merged, current_price)
|
|
|
|
# Sort by strength descending
|
|
tagged.sort(key=lambda x: x["strength"], reverse=True)
|
|
|
|
return tagged
|
|
|
|
def cluster_sr_zones(
|
|
levels: list[dict],
|
|
current_price: float,
|
|
tolerance: float = 0.02,
|
|
max_zones: int | None = None,
|
|
) -> list[dict]:
|
|
"""Cluster nearby S/R levels into zones.
|
|
|
|
Returns list of zone dicts:
|
|
{
|
|
"low": float,
|
|
"high": float,
|
|
"midpoint": float,
|
|
"strength": int, # sum of constituent strengths, capped at 100
|
|
"type": "support" | "resistance",
|
|
"level_count": int,
|
|
}
|
|
"""
|
|
if not levels:
|
|
return []
|
|
|
|
if max_zones is not None and max_zones <= 0:
|
|
return []
|
|
|
|
# 1. Sort levels by price_level ascending
|
|
sorted_levels = sorted(levels, key=lambda x: x["price_level"])
|
|
|
|
# 2. Greedy merge into clusters
|
|
clusters: list[list[dict]] = []
|
|
current_cluster: list[dict] = [sorted_levels[0]]
|
|
|
|
for level in sorted_levels[1:]:
|
|
# Compute current cluster midpoint
|
|
prices = [l["price_level"] for l in current_cluster]
|
|
cluster_low = min(prices)
|
|
cluster_high = max(prices)
|
|
cluster_mid = (cluster_low + cluster_high) / 2.0
|
|
|
|
# Check if within tolerance of cluster midpoint
|
|
if cluster_mid != 0:
|
|
distance_pct = abs(level["price_level"] - cluster_mid) / cluster_mid
|
|
else:
|
|
distance_pct = abs(level["price_level"])
|
|
|
|
if distance_pct <= tolerance:
|
|
current_cluster.append(level)
|
|
else:
|
|
clusters.append(current_cluster)
|
|
current_cluster = [level]
|
|
|
|
clusters.append(current_cluster)
|
|
|
|
# 3. Compute zone for each cluster
|
|
zones: list[dict] = []
|
|
for cluster in clusters:
|
|
prices = [l["price_level"] for l in cluster]
|
|
low = min(prices)
|
|
high = max(prices)
|
|
midpoint = (low + high) / 2.0
|
|
strength = min(100, sum(l["strength"] for l in cluster))
|
|
level_count = len(cluster)
|
|
|
|
# 4. Tag zone type
|
|
zone_type = "support" if midpoint < current_price else "resistance"
|
|
|
|
zones.append({
|
|
"low": low,
|
|
"high": high,
|
|
"midpoint": midpoint,
|
|
"strength": strength,
|
|
"type": zone_type,
|
|
"level_count": level_count,
|
|
})
|
|
|
|
# 5. Split into support and resistance pools, each sorted by strength desc
|
|
support_zones = sorted(
|
|
[z for z in zones if z["type"] == "support"],
|
|
key=lambda z: z["strength"],
|
|
reverse=True,
|
|
)
|
|
resistance_zones = sorted(
|
|
[z for z in zones if z["type"] == "resistance"],
|
|
key=lambda z: z["strength"],
|
|
reverse=True,
|
|
)
|
|
|
|
# 6. Interleave pick: alternate strongest from each pool
|
|
selected: list[dict] = []
|
|
limit = max_zones if max_zones is not None else len(zones)
|
|
si, ri = 0, 0
|
|
pick_support = True # start with support pool
|
|
|
|
while len(selected) < limit and (si < len(support_zones) or ri < len(resistance_zones)):
|
|
if pick_support:
|
|
if si < len(support_zones):
|
|
selected.append(support_zones[si])
|
|
si += 1
|
|
elif ri < len(resistance_zones):
|
|
selected.append(resistance_zones[ri])
|
|
ri += 1
|
|
else:
|
|
if ri < len(resistance_zones):
|
|
selected.append(resistance_zones[ri])
|
|
ri += 1
|
|
elif si < len(support_zones):
|
|
selected.append(support_zones[si])
|
|
si += 1
|
|
pick_support = not pick_support
|
|
|
|
# 7. Sort final selection by strength descending
|
|
selected.sort(key=lambda z: z["strength"], reverse=True)
|
|
|
|
return selected
|
|
|
|
|
|
|
|
async def recalculate_sr_levels(
|
|
db: AsyncSession,
|
|
symbol: str,
|
|
tolerance: float = DEFAULT_TOLERANCE,
|
|
) -> list[SRLevel]:
|
|
"""Recalculate S/R levels for a ticker and persist to DB.
|
|
|
|
1. Fetch OHLCV data
|
|
2. Detect levels
|
|
3. Delete old levels for ticker
|
|
4. Insert new levels
|
|
5. Return new levels sorted by strength desc
|
|
"""
|
|
ticker = await _get_ticker(db, symbol)
|
|
|
|
records = await query_ohlcv(db, symbol)
|
|
if not records:
|
|
# No OHLCV data — clear any existing levels
|
|
await db.execute(
|
|
delete(SRLevel).where(SRLevel.ticker_id == ticker.id)
|
|
)
|
|
await db.commit()
|
|
return []
|
|
|
|
_, highs, lows, closes, volumes = _extract_ohlcv(records)
|
|
|
|
levels = detect_sr_levels(highs, lows, closes, volumes, tolerance)
|
|
|
|
# Delete old levels
|
|
await db.execute(
|
|
delete(SRLevel).where(SRLevel.ticker_id == ticker.id)
|
|
)
|
|
|
|
# Insert new levels
|
|
now = datetime.utcnow()
|
|
new_models: list[SRLevel] = []
|
|
for lvl in levels:
|
|
model = SRLevel(
|
|
ticker_id=ticker.id,
|
|
price_level=lvl["price_level"],
|
|
type=lvl["type"],
|
|
strength=lvl["strength"],
|
|
detection_method=lvl["detection_method"],
|
|
created_at=now,
|
|
)
|
|
db.add(model)
|
|
new_models.append(model)
|
|
|
|
await db.commit()
|
|
|
|
# Refresh to get IDs
|
|
for m in new_models:
|
|
await db.refresh(m)
|
|
|
|
return new_models
|
|
|
|
|
|
async def get_sr_levels(
|
|
db: AsyncSession,
|
|
symbol: str,
|
|
tolerance: float = DEFAULT_TOLERANCE,
|
|
) -> list[SRLevel]:
|
|
"""Get S/R levels for a ticker, recalculating on every request (MVP).
|
|
|
|
Returns levels sorted by strength descending.
|
|
"""
|
|
return await recalculate_sr_levels(db, symbol, tolerance)
|