continuazione refactoring

This commit is contained in:
fredmaloggia
2025-11-25 21:41:59 +01:00
parent 8716d80ecd
commit 07111c67c0
6 changed files with 192 additions and 191 deletions

View File

@@ -54,32 +54,32 @@ from shared_utils import (
)
# =========================
# CONFIG
# =========================
CONFIG = load_config()
DB_CONFIG = require_section(CONFIG, "db")
PATTERN_CONFIG = require_section(CONFIG, "pattern")
TAGGING_CONFIG = require_section(CONFIG, "tagging")
RANKING_CONFIG = require_section(CONFIG, "ranking")
SIGNALS_CONFIG = require_section(CONFIG, "signals")
DB_CONFIG = CONFIG.get("db", {})
PATTERN_CONFIG = CONFIG.get("pattern", {})
TAGGING_CONFIG = CONFIG.get("tagging", {})
RANKING_CONFIG = CONFIG.get("ranking", {})
SIGNALS_CONFIG = CONFIG.get("signals", {})
BASE_DIR = Path(".")
OUTPUT_DIR = BASE_DIR / "output"
# Universe now expected inside Input folder
UNIVERSO_XLSX = BASE_DIR / "Input" / "Universo per Trading System.xlsx"
CONNECTION_TXT = BASE_DIR / "connection.txt"
AUDIT_LOG_CSV = OUTPUT_DIR / "trades_audit_log.csv"
OPEN_TRADES_DIR = BASE_DIR / "open_trades"
DROPBOX_EXPORT_DIR = Path(r"C:\Users\Admin\Dropbox\Condivisa Lavoro\Segnali di trading su ETF")
def _dated_signals_filename() -> Path:
date_prefix = pd.Timestamp.today().strftime("%Y%m%d")
return OUTPUT_DIR / f"{date_prefix}_signals.xlsx"
# CONFIG
# =========================
CONFIG = load_config()
DB_CONFIG = require_section(CONFIG, "db")
PATTERN_CONFIG = require_section(CONFIG, "pattern")
TAGGING_CONFIG = require_section(CONFIG, "tagging")
RANKING_CONFIG = require_section(CONFIG, "ranking")
SIGNALS_CONFIG = require_section(CONFIG, "signals")
PATHS_CONFIG = require_section(CONFIG, "paths")
HURST_CONFIG = CONFIG.get("hurst", {})
PRICES_CONFIG = CONFIG.get("prices", {})
RUN_CONFIG = CONFIG.get("run", {})
BASE_DIR = Path(PATHS_CONFIG.get("base_dir", ".")).resolve()
OUTPUT_DIR = BASE_DIR / PATHS_CONFIG.get("output_dir", "output")
PLOT_DIR = BASE_DIR / PATHS_CONFIG.get("plot_dir", "plot")
# Universe now expected inside Input folder
UNIVERSO_XLSX = BASE_DIR / PATHS_CONFIG.get("input_universe", "Input/Universo per Trading System.xlsx")
CONNECTION_TXT = BASE_DIR / PATHS_CONFIG.get("connection_txt", "connection.txt")
AUDIT_LOG_CSV = BASE_DIR / PATHS_CONFIG.get("audit_log_csv", OUTPUT_DIR / "trades_audit_log.csv")
OPEN_TRADES_DIR = BASE_DIR / PATHS_CONFIG.get("open_trades_dir", "open_trades")
DROPBOX_EXPORT_DIR = Path(r"C:\Users\Admin\Dropbox\Condivisa Lavoro\Segnali di trading su ETF")
def _dated_signals_filename() -> Path:
date_prefix = pd.Timestamp.today().strftime("%Y%m%d")
return OUTPUT_DIR / f"{date_prefix}_signals.xlsx"
# Stored procedure / parametri DB
SP_NAME_DEFAULT = str(require_value(DB_CONFIG, "stored_proc", "db"))
@@ -115,45 +115,17 @@ else:
RP_MAX_WEIGHT = float(RP_MAX_WEIGHT)
# Sizing
BASE_CAPITAL_PER_STRATEGY = float(require_value(SIGNALS_CONFIG, "base_capital_per_strategy", "signals"))
MIN_TRADE_NOTIONAL = float(require_value(SIGNALS_CONFIG, "min_trade_notional", "signals"))
RISK_PARITY_LOOKBACK = int(require_value(SIGNALS_CONFIG, "risk_parity_lookback", "signals"))
SP_NAME_DEFAULT = DB_CONFIG.get("stored_proc", "opt_RendimentoGiornaliero1_ALL")
SP_N_DEFAULT = DB_CONFIG.get("n_bars", 1305)
PTF_CURR_DEFAULT = DB_CONFIG.get("ptf_curr", "EUR")
# Pattern recognition (come backtest)
WP = PATTERN_CONFIG.get("wp", 60)
HA = PATTERN_CONFIG.get("ha", 10)
KNN_K = PATTERN_CONFIG.get("knn_k", 25)
THETA = PATTERN_CONFIG.get("theta", 0.005) # 0,005% in decimali (identico al backtest)
Z_REV = TAGGING_CONFIG.get("z_rev", 2.0)
Z_VOL = TAGGING_CONFIG.get("z_vol", 2.0)
STD_COMP_PCT = TAGGING_CONFIG.get("std_comp_pct", 0.15)
# Exit rules (identiche al backtest)
SL_BPS = SIGNALS_CONFIG.get("sl_bps", 300.0)
TP_BPS = SIGNALS_CONFIG.get("tp_bps", 800.0)
TRAIL_BPS = SIGNALS_CONFIG.get("trail_bps", 300.0)
TIME_STOP_BARS = SIGNALS_CONFIG.get("time_stop_bars", 20)
THETA_EXIT = SIGNALS_CONFIG.get("theta_exit", 0.0) # soglia debolezza
WEAK_DAYS_EXIT = SIGNALS_CONFIG.get("weak_days_exit") # uscita IMMEDIATA in caso di debolezza (come backtest)
# Ranking e selezione Top-N per APERTURE
MAX_OPEN = SIGNALS_CONFIG.get("max_open", 15) # cap strumenti aperti oggi (come backtest)
# Allineamento al backtest v3.1.5 per il cap del Risk Parity
TOP_N_MAX = RANKING_CONFIG.get("top_n_max", MAX_OPEN)
RP_MAX_WEIGHT = RANKING_CONFIG.get("rp_max_weight", 2 / max(TOP_N_MAX, 1)) # ≈ 0.1333 = 13,33% per singolo asset
# Sizing
BASE_CAPITAL_PER_STRATEGY = SIGNALS_CONFIG.get("base_capital_per_strategy", 100.0)
MIN_TRADE_NOTIONAL = SIGNALS_CONFIG.get("min_trade_notional", 0.01)
RISK_PARITY_LOOKBACK = SIGNALS_CONFIG.get("risk_parity_lookback", 60)
# Calendario
BUSINESS_DAYS_ONLY = True
SEED = 42
BASE_CAPITAL_PER_STRATEGY = float(require_value(SIGNALS_CONFIG, "base_capital_per_strategy", "signals"))
MIN_TRADE_NOTIONAL = float(require_value(SIGNALS_CONFIG, "min_trade_notional", "signals"))
RISK_PARITY_LOOKBACK = int(require_value(SIGNALS_CONFIG, "risk_parity_lookback", "signals"))
HURST_LOOKBACK = HURST_CONFIG.get("lookback", None)
HURST_MIN_LENGTH = int(HURST_CONFIG.get("min_length", 200))
OPEN_PRICE_BASE_URL = str(PRICES_CONFIG.get("base_url", "https://fin.scorer.app/finance/euronext/price"))
OPEN_MAX_RETRY = int(PRICES_CONFIG.get("max_retry", 3))
OPEN_SLEEP_SEC = float(PRICES_CONFIG.get("sleep_sec", 0.1))
OPEN_TIMEOUT = float(PRICES_CONFIG.get("timeout", 10))
BUSINESS_DAYS_ONLY = bool(RUN_CONFIG.get("business_days_only", True))
SEED = int(RUN_CONFIG.get("seed", 42))
warnings.filterwarnings("ignore")
np.random.seed(SEED)
@@ -241,34 +213,30 @@ def _db_fetch_returns(conn_str: str,
out_all = out_all.dropna(subset=["Date"]).sort_values(["ISIN", "Date"]).reset_index(drop=True)
return out_all[["Date", "ISIN", "Ret"]]
# =========================
# UNIVERSO + OPEN PRICE API (schema checker)
# =========================
OPEN_MAX_RETRY = 3
OPEN_SLEEP_SEC = 0.1
OPEN_TIMEOUT = 10
def load_universe(path: Path) -> pd.DataFrame:
df = pd.read_excel(path)
if "ISIN" not in df.columns:
raise KeyError("Nel file Universo manca la colonna 'ISIN'")
df["ISIN"] = df["ISIN"].astype(str).str.strip()
# =========================
# UNIVERSO + OPEN PRICE API (schema checker)
# =========================
def load_universe(path: Path) -> pd.DataFrame:
df = pd.read_excel(path)
if "ISIN" not in df.columns:
raise KeyError("Nel file Universo manca la colonna 'ISIN'")
df["ISIN"] = df["ISIN"].astype(str).str.strip()
for col in ["Asset Class", "Mercato", "TickerOpen"]:
if col not in df.columns:
df[col] = ""
df[col] = df[col].astype(str).str.strip()
return df
def _build_symbol_euronext(row: pd.Series) -> Tuple[str, str]:
isin = str(row.get("ISIN", "")).strip()
venue = str(row.get("Mercato", "")).strip()
tok = str(row.get("TickerOpen", "") or "").strip()
base = "https://fin.scorer.app/finance/euronext/price"
if tok and "-" in tok and tok.split("-")[0].upper() == isin.upper():
return base, tok
if isin and venue:
return base, f"{isin}-{venue}"
return base, isin
def _build_symbol_euronext(row: pd.Series) -> Tuple[str, str]:
isin = str(row.get("ISIN", "")).strip()
venue = str(row.get("Mercato", "")).strip()
tok = str(row.get("TickerOpen", "") or "").strip()
base = OPEN_PRICE_BASE_URL
if tok and "-" in tok and tok.split("-")[0].upper() == isin.upper():
return base, tok
if isin and venue:
return base, f"{isin}-{venue}"
return base, isin
def get_open_price(isin: str, universe: pd.DataFrame) -> Optional[float]:
"""
@@ -732,11 +700,15 @@ def main_run(run_date: Optional[dt.date] = None):
n_bars=SP_N_DEFAULT,
ptf_curr=PTF_CURR_DEFAULT,
)
if returns_long.empty:
raise RuntimeError("Nessun rendimento disponibile dal DB (SP vuota?).")
# 2b) Hurst map per ISIN (stessa logica concettuale del backtest)
hurst_map = build_hurst_map(returns_long, lookback=252)
if returns_long.empty:
raise RuntimeError("Nessun rendimento disponibile dal DB (SP vuota?).")
# 2b) Hurst map per ISIN (stessa logica concettuale del backtest)
hurst_map = build_hurst_map(
returns_long,
lookback=HURST_LOOKBACK or SP_N_DEFAULT,
min_length=HURST_MIN_LENGTH,
)
# 3) Segnali EOD su D con THETA = Hurst/100 per ISIN
sig_df = generate_signals_today(universe, returns_long, today, hurst_map=hurst_map)