Merge branch 'main' into codex/review-function-consolidation-across-files-rhuc38

This commit is contained in:
fredmaloggia
2025-11-17 17:12:38 +01:00
committed by GitHub
3 changed files with 257 additions and 195 deletions

View File

@@ -75,6 +75,10 @@ DB_CONFIG = require_section(CONFIG, "db")
PATTERN_CONFIG = require_section(CONFIG, "pattern")
TAGGING_CONFIG = require_section(CONFIG, "tagging")
RANKING_CONFIG = require_section(CONFIG, "ranking")
DB_CONFIG = CONFIG.get("db", {})
PATTERN_CONFIG = CONFIG.get("pattern", {})
TAGGING_CONFIG = CONFIG.get("tagging", {})
RANKING_CONFIG = CONFIG.get("ranking", {})
UNIVERSO_XLSX = "Universo per Trading System.xlsx"
@@ -112,6 +116,26 @@ if RP_MAX_WEIGHT is None:
RP_MAX_WEIGHT = 2 / max(TOP_N_MAX, 1)
else:
RP_MAX_WEIGHT = float(RP_MAX_WEIGHT)
STORED_PROC = DB_CONFIG.get("stored_proc", "opt_RendimentoGiornaliero1_ALL")
N_BARS = DB_CONFIG.get("n_bars", 1305)
PTF_CURR = DB_CONFIG.get("ptf_curr", "EUR")
# Pattern-matching (iper-parametri)
WP = PATTERN_CONFIG.get("wp", 60) # lunghezza finestra pattern (barre)
HA = PATTERN_CONFIG.get("ha", 10) # orizzonte outcome (barre)
KNN_K = PATTERN_CONFIG.get("knn_k", 25) # numero di vicini
THETA = PATTERN_CONFIG.get("theta", 0.005) # soglia su outcome per generare segnale
EMBARGO = PATTERN_CONFIG.get("embargo", WP + HA)
# Tagging rule-based (soglie)
Z_REV = TAGGING_CONFIG.get("z_rev", 2.0)
Z_VOL = TAGGING_CONFIG.get("z_vol", 2.0)
STD_COMP_PCT = TAGGING_CONFIG.get("std_comp_pct", 0.15)
DAYS_PER_YEAR = 252
TOP_N_MAX = RANKING_CONFIG.get("top_n_max", 15) # numero massimo di asset ammessi
RP_MAX_WEIGHT = RANKING_CONFIG.get("rp_max_weight", 2 / max(TOP_N_MAX, 1)) # 2 x 1/15 ≈ 0.1333 = 13,33%
# =========================================
# UTILS GENERALI

View File

@@ -4,6 +4,7 @@ from __future__ import annotations
import json
from pathlib import Path
from typing import Any, Dict, List, Optional, Sequence, Tuple
from typing import Dict, List, Optional, Sequence, Tuple
import numpy as np
import pandas as pd

View File

@@ -20,97 +20,134 @@ Pipeline (giorno D, EOD -> t+1 OPEN):
from __future__ import annotations
import os
import ssl
import json
import time
import shutil
import warnings
import datetime as dt
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Optional, Tuple, Iterable, Set
import os
import ssl
import json
import time
import shutil
import warnings
import datetime as dt
from dataclasses import dataclass
from pathlib import Path
from typing import Dict, List, Optional, Tuple, Iterable, Set
import numpy as np
import pandas as pd
from urllib.request import urlopen
from urllib.error import URLError, HTTPError
from urllib.request import urlopen
from urllib.error import URLError, HTTPError
# DB
import sqlalchemy as sa
from sqlalchemy import text as sql_text
from shared_utils import (
build_hurst_map,
build_pattern_library,
characterize_window,
detect_column,
load_config,
predict_from_library,
read_connection_txt,
require_section,
require_value,
z_norm,
)
import sqlalchemy as sa
from sqlalchemy import text as sql_text
from shared_utils import (
build_hurst_map,
build_pattern_library,
characterize_window,
detect_column,
load_config,
predict_from_library,
read_connection_txt,
require_section,
require_value,
z_norm,
)
# =========================
# CONFIG
# =========================
CONFIG = load_config()
DB_CONFIG = require_section(CONFIG, "db")
PATTERN_CONFIG = require_section(CONFIG, "pattern")
TAGGING_CONFIG = require_section(CONFIG, "tagging")
RANKING_CONFIG = require_section(CONFIG, "ranking")
SIGNALS_CONFIG = require_section(CONFIG, "signals")
BASE_DIR = Path(".")
UNIVERSO_XLSX = BASE_DIR / "Universo per Trading System.xlsx"
CONNECTION_TXT = BASE_DIR / "connection.txt"
AUDIT_LOG_CSV = BASE_DIR / "trades_audit_log.csv"
OPEN_TRADES_DIR = BASE_DIR / "open_trades"
DROPBOX_EXPORT_DIR = Path(r"C:\Users\Admin\Dropbox\Condivisa Lavoro\Segnali di trading su ETF")
CONFIG = load_config()
DB_CONFIG = require_section(CONFIG, "db")
PATTERN_CONFIG = require_section(CONFIG, "pattern")
TAGGING_CONFIG = require_section(CONFIG, "tagging")
RANKING_CONFIG = require_section(CONFIG, "ranking")
SIGNALS_CONFIG = require_section(CONFIG, "signals")
DB_CONFIG = CONFIG.get("db", {})
PATTERN_CONFIG = CONFIG.get("pattern", {})
TAGGING_CONFIG = CONFIG.get("tagging", {})
RANKING_CONFIG = CONFIG.get("ranking", {})
SIGNALS_CONFIG = CONFIG.get("signals", {})
BASE_DIR = Path(".")
UNIVERSO_XLSX = BASE_DIR / "Universo per Trading System.xlsx"
CONNECTION_TXT = BASE_DIR / "connection.txt"
AUDIT_LOG_CSV = BASE_DIR / "trades_audit_log.csv"
OPEN_TRADES_DIR = BASE_DIR / "open_trades"
DROPBOX_EXPORT_DIR = Path(r"C:\Users\Admin\Dropbox\Condivisa Lavoro\Segnali di trading su ETF")
def _dated_signals_filename() -> Path:
date_prefix = pd.Timestamp.today().strftime("%Y%m%d")
return BASE_DIR / f"{date_prefix}_signals.xlsx"
# Stored procedure / parametri DB
SP_NAME_DEFAULT = str(require_value(DB_CONFIG, "stored_proc", "db"))
SP_N_DEFAULT = int(require_value(DB_CONFIG, "n_bars", "db"))
PTF_CURR_DEFAULT = str(require_value(DB_CONFIG, "ptf_curr", "db"))
SP_NAME_DEFAULT = str(require_value(DB_CONFIG, "stored_proc", "db"))
SP_N_DEFAULT = int(require_value(DB_CONFIG, "n_bars", "db"))
PTF_CURR_DEFAULT = str(require_value(DB_CONFIG, "ptf_curr", "db"))
# Pattern recognition (come backtest)
WP = int(require_value(PATTERN_CONFIG, "wp", "pattern"))
HA = int(require_value(PATTERN_CONFIG, "ha", "pattern"))
KNN_K = int(require_value(PATTERN_CONFIG, "knn_k", "pattern"))
THETA = float(require_value(PATTERN_CONFIG, "theta", "pattern")) # 0,005% in decimali (identico al backtest)
Z_REV = float(require_value(TAGGING_CONFIG, "z_rev", "tagging"))
Z_VOL = float(require_value(TAGGING_CONFIG, "z_vol", "tagging"))
STD_COMP_PCT = float(require_value(TAGGING_CONFIG, "std_comp_pct", "tagging"))
WP = int(require_value(PATTERN_CONFIG, "wp", "pattern"))
HA = int(require_value(PATTERN_CONFIG, "ha", "pattern"))
KNN_K = int(require_value(PATTERN_CONFIG, "knn_k", "pattern"))
THETA = float(require_value(PATTERN_CONFIG, "theta", "pattern")) # 0,005% in decimali (identico al backtest)
Z_REV = float(require_value(TAGGING_CONFIG, "z_rev", "tagging"))
Z_VOL = float(require_value(TAGGING_CONFIG, "z_vol", "tagging"))
STD_COMP_PCT = float(require_value(TAGGING_CONFIG, "std_comp_pct", "tagging"))
# Exit rules (identiche al backtest)
SL_BPS = float(require_value(SIGNALS_CONFIG, "sl_bps", "signals"))
TP_BPS = float(require_value(SIGNALS_CONFIG, "tp_bps", "signals"))
TRAIL_BPS = float(require_value(SIGNALS_CONFIG, "trail_bps", "signals"))
TIME_STOP_BARS = int(require_value(SIGNALS_CONFIG, "time_stop_bars", "signals"))
THETA_EXIT = float(require_value(SIGNALS_CONFIG, "theta_exit", "signals")) # soglia debolezza
WEAK_DAYS_EXIT = require_value(SIGNALS_CONFIG, "weak_days_exit", "signals") # uscita IMMEDIATA in caso di debolezza (come backtest)
SL_BPS = float(require_value(SIGNALS_CONFIG, "sl_bps", "signals"))
TP_BPS = float(require_value(SIGNALS_CONFIG, "tp_bps", "signals"))
TRAIL_BPS = float(require_value(SIGNALS_CONFIG, "trail_bps", "signals"))
TIME_STOP_BARS = int(require_value(SIGNALS_CONFIG, "time_stop_bars", "signals"))
THETA_EXIT = float(require_value(SIGNALS_CONFIG, "theta_exit", "signals")) # soglia debolezza
WEAK_DAYS_EXIT = require_value(SIGNALS_CONFIG, "weak_days_exit", "signals") # uscita IMMEDIATA in caso di debolezza (come backtest)
# Ranking e selezione Top-N per APERTURE
MAX_OPEN = int(require_value(SIGNALS_CONFIG, "max_open", "signals")) # cap strumenti aperti oggi (come backtest)
MAX_OPEN = int(require_value(SIGNALS_CONFIG, "max_open", "signals")) # cap strumenti aperti oggi (come backtest)
# Allineamento al backtest v3.1.5 per il cap del Risk Parity
TOP_N_MAX = int(require_value(RANKING_CONFIG, "top_n_max", "ranking"))
RP_MAX_WEIGHT = require_value(RANKING_CONFIG, "rp_max_weight", "ranking") # ≈ 0.1333 = 13,33% per singolo asset
if RP_MAX_WEIGHT is None:
RP_MAX_WEIGHT = 2 / max(TOP_N_MAX, 1)
else:
RP_MAX_WEIGHT = float(RP_MAX_WEIGHT)
TOP_N_MAX = int(require_value(RANKING_CONFIG, "top_n_max", "ranking"))
RP_MAX_WEIGHT = require_value(RANKING_CONFIG, "rp_max_weight", "ranking") # ≈ 0.1333 = 13,33% per singolo asset
if RP_MAX_WEIGHT is None:
RP_MAX_WEIGHT = 2 / max(TOP_N_MAX, 1)
else:
RP_MAX_WEIGHT = float(RP_MAX_WEIGHT)
# Sizing
BASE_CAPITAL_PER_STRATEGY = float(require_value(SIGNALS_CONFIG, "base_capital_per_strategy", "signals"))
MIN_TRADE_NOTIONAL = float(require_value(SIGNALS_CONFIG, "min_trade_notional", "signals"))
RISK_PARITY_LOOKBACK = int(require_value(SIGNALS_CONFIG, "risk_parity_lookback", "signals"))
BASE_CAPITAL_PER_STRATEGY = float(require_value(SIGNALS_CONFIG, "base_capital_per_strategy", "signals"))
MIN_TRADE_NOTIONAL = float(require_value(SIGNALS_CONFIG, "min_trade_notional", "signals"))
RISK_PARITY_LOOKBACK = int(require_value(SIGNALS_CONFIG, "risk_parity_lookback", "signals"))
SP_NAME_DEFAULT = DB_CONFIG.get("stored_proc", "opt_RendimentoGiornaliero1_ALL")
SP_N_DEFAULT = DB_CONFIG.get("n_bars", 1305)
PTF_CURR_DEFAULT = DB_CONFIG.get("ptf_curr", "EUR")
# Pattern recognition (come backtest)
WP = PATTERN_CONFIG.get("wp", 60)
HA = PATTERN_CONFIG.get("ha", 10)
KNN_K = PATTERN_CONFIG.get("knn_k", 25)
THETA = PATTERN_CONFIG.get("theta", 0.005) # 0,005% in decimali (identico al backtest)
Z_REV = TAGGING_CONFIG.get("z_rev", 2.0)
Z_VOL = TAGGING_CONFIG.get("z_vol", 2.0)
STD_COMP_PCT = TAGGING_CONFIG.get("std_comp_pct", 0.15)
# Exit rules (identiche al backtest)
SL_BPS = SIGNALS_CONFIG.get("sl_bps", 300.0)
TP_BPS = SIGNALS_CONFIG.get("tp_bps", 800.0)
TRAIL_BPS = SIGNALS_CONFIG.get("trail_bps", 300.0)
TIME_STOP_BARS = SIGNALS_CONFIG.get("time_stop_bars", 20)
THETA_EXIT = SIGNALS_CONFIG.get("theta_exit", 0.0) # soglia debolezza
WEAK_DAYS_EXIT = SIGNALS_CONFIG.get("weak_days_exit") # uscita IMMEDIATA in caso di debolezza (come backtest)
# Ranking e selezione Top-N per APERTURE
MAX_OPEN = SIGNALS_CONFIG.get("max_open", 15) # cap strumenti aperti oggi (come backtest)
# Allineamento al backtest v3.1.5 per il cap del Risk Parity
TOP_N_MAX = RANKING_CONFIG.get("top_n_max", MAX_OPEN)
RP_MAX_WEIGHT = RANKING_CONFIG.get("rp_max_weight", 2 / max(TOP_N_MAX, 1)) # ≈ 0.1333 = 13,33% per singolo asset
# Sizing
BASE_CAPITAL_PER_STRATEGY = SIGNALS_CONFIG.get("base_capital_per_strategy", 100.0)
MIN_TRADE_NOTIONAL = SIGNALS_CONFIG.get("min_trade_notional", 0.01)
RISK_PARITY_LOOKBACK = SIGNALS_CONFIG.get("risk_parity_lookback", 60)
# Calendario
BUSINESS_DAYS_ONLY = True
@@ -122,20 +159,20 @@ np.random.seed(SEED)
# =========================
# UTILS
# =========================
def ensure_dir(p: Path):
p.mkdir(parents=True, exist_ok=True)
def copy_to_dropbox(src: Path, dst_dir: Path = DROPBOX_EXPORT_DIR):
if not src or not dst_dir:
return
if not src.exists():
return
try:
ensure_dir(dst_dir)
dst = dst_dir / src.name
shutil.copy2(src, dst)
except Exception as exc:
print(f"[WARN] impossibile copiare {src} su {dst_dir}: {exc}")
def ensure_dir(p: Path):
p.mkdir(parents=True, exist_ok=True)
def copy_to_dropbox(src: Path, dst_dir: Path = DROPBOX_EXPORT_DIR):
if not src or not dst_dir:
return
if not src.exists():
return
try:
ensure_dir(dst_dir)
dst = dst_dir / src.name
shutil.copy2(src, dst)
except Exception as exc:
print(f"[WARN] impossibile copiare {src} su {dst_dir}: {exc}")
def next_business_day(d: dt.date) -> dt.date:
nd = d + dt.timedelta(days=1)
@@ -151,9 +188,9 @@ def _safe_to_float(x) -> Optional[float]:
except Exception:
return None
def _db_fetch_returns(conn_str: str,
isins: List[str],
sp_name: Optional[str] = None,
def _db_fetch_returns(conn_str: str,
isins: List[str],
sp_name: Optional[str] = None,
n_bars: Optional[int] = None,
ptf_curr: Optional[str] = None) -> pd.DataFrame:
engine = sa.create_engine(conn_str, fast_executemany=True)
@@ -164,11 +201,11 @@ def _db_fetch_returns(conn_str: str,
sql_sp = sql_text(f"EXEC {sp} @ISIN = :isin, @n = :n, @PtfCurr = :ptf")
frames: List[pd.DataFrame] = []
with engine.begin() as conn:
for i, isin in enumerate(isins, start=1):
print(f"[DB] ({i}/{len(isins)}) scarico serie storica per {isin} ...", flush=True)
try:
df = pd.read_sql_query(sql_sp, conn, params={"isin": str(isin), "n": int(n_val), "ptf": ptf})
with engine.begin() as conn:
for i, isin in enumerate(isins, start=1):
print(f"[DB] ({i}/{len(isins)}) scarico serie storica per {isin} ...", flush=True)
try:
df = pd.read_sql_query(sql_sp, conn, params={"isin": str(isin), "n": int(n_val), "ptf": ptf})
except Exception as e:
print(f"[ERROR] SP {sp} fallita per {isin}: {e}")
continue
@@ -177,11 +214,11 @@ def _db_fetch_returns(conn_str: str,
print(f"[WARN] Nessun dato per {isin}")
continue
col_date = detect_column(df, ["Date", "Data", "Datetime", "Timestamp", "Time"])
col_ret = detect_column(df, ["Ret", "Return", "Rendimento", "Rend", "Ret_%", "RET"])
if not col_date or not col_ret:
print(f"[WARN] Colonne mancanti per {isin}")
continue
col_date = detect_column(df, ["Date", "Data", "Datetime", "Timestamp", "Time"])
col_ret = detect_column(df, ["Ret", "Return", "Rendimento", "Rend", "Ret_%", "RET"])
if not col_date or not col_ret:
print(f"[WARN] Colonne mancanti per {isin}")
continue
out = df[[col_date, col_ret]].copy()
out.columns = ["Date", "Ret"]
@@ -309,17 +346,17 @@ def generate_signals_today(universe: pd.DataFrame,
lib_wins, lib_out = build_pattern_library(r, WP, HA)
if lib_wins is None or len(r) < WP + HA:
est_out, avg_dist, sig = np.nan, np.nan, 0
ptype, pconf = characterize_window(r, WP, z_rev=Z_REV, z_vol=Z_VOL, std_comp_pct=STD_COMP_PCT)
ptype, pconf = characterize_window(r, WP, z_rev=Z_REV, z_vol=Z_VOL, std_comp_pct=STD_COMP_PCT)
else:
curr = r.values[-WP:]
curr_zn = z_norm(curr)
if curr_zn is None:
est_out, avg_dist, sig = np.nan, np.nan, 0
ptype, pconf = characterize_window(r, WP, z_rev=Z_REV, z_vol=Z_VOL, std_comp_pct=STD_COMP_PCT)
else:
est_out, avg_dist, _ = predict_from_library(curr_zn, lib_wins, lib_out, k=KNN_K)
sig = 1 if (pd.notna(est_out) and float(est_out) > float(theta_entry)) else 0
ptype, pconf = characterize_window(r, WP, z_rev=Z_REV, z_vol=Z_VOL, std_comp_pct=STD_COMP_PCT)
if curr_zn is None:
est_out, avg_dist, sig = np.nan, np.nan, 0
ptype, pconf = characterize_window(r, WP, z_rev=Z_REV, z_vol=Z_VOL, std_comp_pct=STD_COMP_PCT)
else:
est_out, avg_dist, _ = predict_from_library(curr_zn, lib_wins, lib_out, k=KNN_K)
sig = 1 if (pd.notna(est_out) and float(est_out) > float(theta_entry)) else 0
ptype, pconf = characterize_window(r, WP, z_rev=Z_REV, z_vol=Z_VOL, std_comp_pct=STD_COMP_PCT)
rows.append({
"Date": decision_date, "ISIN": isin,
@@ -388,21 +425,21 @@ def open_trades_path(strategy: str) -> Path:
ensure_dir(OPEN_TRADES_DIR)
return OPEN_TRADES_DIR / f"open_{strategy}.csv"
def load_open_trades(strategy: str) -> pd.DataFrame:
p = open_trades_path(strategy)
if not p.exists():
return pd.DataFrame(columns=[
"Strategy","ISIN","AssetName","EntryDate","EntryIndex","EntryAmount","SizeWeight","PeakPnL","WeakDays","Notes"
])
df = pd.read_csv(p)
if "EntryDate" in df.columns:
df["EntryDate"] = pd.to_datetime(df["EntryDate"], errors="coerce").dt.date
if "WeakDays" not in df.columns:
df["WeakDays"] = 0
if "AssetName" not in df.columns:
df["AssetName"] = ""
df["Strategy"] = strategy
return df
def load_open_trades(strategy: str) -> pd.DataFrame:
p = open_trades_path(strategy)
if not p.exists():
return pd.DataFrame(columns=[
"Strategy","ISIN","AssetName","EntryDate","EntryIndex","EntryAmount","SizeWeight","PeakPnL","WeakDays","Notes"
])
df = pd.read_csv(p)
if "EntryDate" in df.columns:
df["EntryDate"] = pd.to_datetime(df["EntryDate"], errors="coerce").dt.date
if "WeakDays" not in df.columns:
df["WeakDays"] = 0
if "AssetName" not in df.columns:
df["AssetName"] = ""
df["Strategy"] = strategy
return df
def save_open_trades(strategy: str, df: pd.DataFrame):
p = open_trades_path(strategy)
@@ -492,13 +529,13 @@ def _risk_exit_flags(isin: str,
reasons.append("WEAK")
return reasons
def update_positions_and_build_orders(universe: pd.DataFrame,
returns_long: pd.DataFrame,
signals_today: pd.DataFrame,
today: dt.date,
buy_rank_df: Optional[pd.DataFrame],
allowed_open_isins: Optional[List[str]] = None,
asset_name_map: Optional[pd.Series] = None) -> Tuple[pd.DataFrame, List[Dict]]:
def update_positions_and_build_orders(universe: pd.DataFrame,
returns_long: pd.DataFrame,
signals_today: pd.DataFrame,
today: dt.date,
buy_rank_df: Optional[pd.DataFrame],
allowed_open_isins: Optional[List[str]] = None,
asset_name_map: Optional[pd.Series] = None) -> Tuple[pd.DataFrame, List[Dict]]:
"""
- decision_date = ultima data disponibile (EOD)
- target giornaliero = primi MAX_OPEN del ranking buy (uguale per tutte le strategie)
@@ -627,20 +664,20 @@ def update_positions_and_build_orders(universe: pd.DataFrame,
}])], ignore_index=True)
current_set.add(isin)
if asset_name_map is not None:
df_open["AssetName"] = df_open["ISIN"].astype(str).map(asset_name_map).fillna("")
else:
if "AssetName" not in df_open.columns:
df_open["AssetName"] = ""
if "AssetName" in df_open.columns:
cols = list(df_open.columns)
if "ISIN" in cols and "AssetName" in cols:
cols.insert(cols.index("ISIN") + 1, cols.pop(cols.index("AssetName")))
df_open = df_open[cols]
save_open_trades(strat, df_open)
df_open["Strategy"] = strat
open_concat.append(df_open)
if asset_name_map is not None:
df_open["AssetName"] = df_open["ISIN"].astype(str).map(asset_name_map).fillna("")
else:
if "AssetName" not in df_open.columns:
df_open["AssetName"] = ""
if "AssetName" in df_open.columns:
cols = list(df_open.columns)
if "ISIN" in cols and "AssetName" in cols:
cols.insert(cols.index("ISIN") + 1, cols.pop(cols.index("AssetName")))
df_open = df_open[cols]
save_open_trades(strat, df_open)
df_open["Strategy"] = strat
open_concat.append(df_open)
# ---- FETCH UNA VOLTA (OPEN + CLOSE) ----
fetch_isins = sorted(isins_for_open_fetch.union(isins_for_close_fetch))
@@ -662,24 +699,24 @@ def update_positions_and_build_orders(universe: pd.DataFrame,
# =========================
# MAIN RUN
# =========================
def main_run(run_date: Optional[dt.date] = None):
today = run_date or dt.date.today()
# 1) Universo
universe = load_universe(UNIVERSO_XLSX)
asset_name_col = detect_column(universe, [
"Nome", "Name", "Asset", "Asset Name", "Descrizione", "Description"
])
if not asset_name_col:
print("[WARN] Colonna con il nome dell'asset non trovata nell'universo.")
asset_name_map: Optional[pd.Series] = None
if asset_name_col:
asset_name_map = (
universe[["ISIN", asset_name_col]]
.dropna(subset=["ISIN"])
.assign(ISIN=lambda df: df["ISIN"].astype(str).str.strip())
)
asset_name_map = asset_name_map.set_index("ISIN")[asset_name_col].astype(str).str.strip()
def main_run(run_date: Optional[dt.date] = None):
today = run_date or dt.date.today()
# 1) Universo
universe = load_universe(UNIVERSO_XLSX)
asset_name_col = detect_column(universe, [
"Nome", "Name", "Asset", "Asset Name", "Descrizione", "Description"
])
if not asset_name_col:
print("[WARN] Colonna con il nome dell'asset non trovata nell'universo.")
asset_name_map: Optional[pd.Series] = None
if asset_name_col:
asset_name_map = (
universe[["ISIN", asset_name_col]]
.dropna(subset=["ISIN"])
.assign(ISIN=lambda df: df["ISIN"].astype(str).str.strip())
)
asset_name_map = asset_name_map.set_index("ISIN")[asset_name_col].astype(str).str.strip()
# 2) Ritorni (DB)
conn_str = read_connection_txt(CONNECTION_TXT)
@@ -706,52 +743,52 @@ def main_run(run_date: Optional[dt.date] = None):
allowed_open = _select_top_signals(buy_rank_df, MAX_OPEN) # top-N ISIN
# 4) Posizioni + audit (OPEN/CLOSE) con target Top-N
open_df, audit_rows = update_positions_and_build_orders(
universe, returns_long, sig_df, today,
buy_rank_df=buy_rank_df,
allowed_open_isins=allowed_open,
asset_name_map=asset_name_map,
)
open_df, audit_rows = update_positions_and_build_orders(
universe, returns_long, sig_df, today,
buy_rank_df=buy_rank_df,
allowed_open_isins=allowed_open,
asset_name_map=asset_name_map,
)
# 5) Append audit log (TUTTE le strategie operative)
if audit_rows:
append_audit_rows(audit_rows)
# 6) Snapshot Excel datato — fogli con nomi completi
ensure_dir(OPEN_TRADES_DIR)
signals_path = _dated_signals_filename()
signals_sheet = sig_df.reset_index()
if asset_name_map is not None:
signals_sheet["AssetName"] = signals_sheet["ISIN"].astype(str).map(asset_name_map).fillna("")
else:
signals_sheet["AssetName"] = ""
# inserisci la colonna subito dopo l'ISIN
if "AssetName" in signals_sheet.columns:
cols = list(signals_sheet.columns)
cols.insert(cols.index("ISIN") + 1, cols.pop(cols.index("AssetName")))
signals_sheet = signals_sheet[cols]
with pd.ExcelWriter(signals_path) as xw:
signals_sheet.to_excel(xw, sheet_name="Signals", index=False)
if not open_df.empty:
for strat, g in open_df.groupby("Strategy"):
sheet_name_map = {
"Equal_Weight": "Open_Equal_Weight",
"Risk_Parity": "Open_Risk_Parity",
}
sheet_name = sheet_name_map.get(strat, f"Open_{strat}")[:31]
g.to_excel(xw, sheet_name=sheet_name, index=False)
copy_to_dropbox(signals_path)
for strat in ["Equal_Weight", "Risk_Parity"]:
csv_path = open_trades_path(strat)
if csv_path.exists():
copy_to_dropbox(csv_path)
print(f"✅ Signals generated for {today}. Saved to {signals_path}")
print(f"Open trades saved in {OPEN_TRADES_DIR}")
print(f"Audit log updated at {AUDIT_LOG_CSV}")
ensure_dir(OPEN_TRADES_DIR)
signals_path = _dated_signals_filename()
signals_sheet = sig_df.reset_index()
if asset_name_map is not None:
signals_sheet["AssetName"] = signals_sheet["ISIN"].astype(str).map(asset_name_map).fillna("")
else:
signals_sheet["AssetName"] = ""
# inserisci la colonna subito dopo l'ISIN
if "AssetName" in signals_sheet.columns:
cols = list(signals_sheet.columns)
cols.insert(cols.index("ISIN") + 1, cols.pop(cols.index("AssetName")))
signals_sheet = signals_sheet[cols]
with pd.ExcelWriter(signals_path) as xw:
signals_sheet.to_excel(xw, sheet_name="Signals", index=False)
if not open_df.empty:
for strat, g in open_df.groupby("Strategy"):
sheet_name_map = {
"Equal_Weight": "Open_Equal_Weight",
"Risk_Parity": "Open_Risk_Parity",
}
sheet_name = sheet_name_map.get(strat, f"Open_{strat}")[:31]
g.to_excel(xw, sheet_name=sheet_name, index=False)
copy_to_dropbox(signals_path)
for strat in ["Equal_Weight", "Risk_Parity"]:
csv_path = open_trades_path(strat)
if csv_path.exists():
copy_to_dropbox(csv_path)
print(f"✅ Signals generated for {today}. Saved to {signals_path}")
print(f"Open trades saved in {OPEN_TRADES_DIR}")
print(f"Audit log updated at {AUDIT_LOG_CSV}")
# =========================
# ENTRY POINT