164 lines
5.1 KiB
Python
164 lines
5.1 KiB
Python
import json
|
||
import pandas as pd
|
||
import numpy as np
|
||
import matplotlib.pyplot as plt
|
||
|
||
DATA_FILE = "/home/jerome/Perso/freqtradeDocker/user_data/data/binance/BTC_USDC-4h.feather"
|
||
|
||
|
||
def compute_crash_risk_index(df):
|
||
# VOLATILITÉ
|
||
df['H-L'] = df['high'] - df['low']
|
||
df['ATR'] = df['H-L'].rolling(14).mean()
|
||
df['atr_norm'] = df['ATR'] / df['close']
|
||
|
||
# DRAWDOWN (critique)
|
||
df['rolling_max'] = df['close'].rolling(48).max()
|
||
df['drawdown'] = (df['close'] - df['rolling_max']) / df['rolling_max']
|
||
df['dd_score'] = np.clip(-df['drawdown'] / 0.10, 0, 1)
|
||
|
||
# TENDANCE (slope)
|
||
df['MA7'] = df['close'].rolling(7).mean()
|
||
df['MA14'] = df['close'].rolling(14).mean()
|
||
df['slope'] = df['MA7'] - df['MA14']
|
||
df['slope_score'] = np.clip(1 - (df['slope'] / df['close']), 0, 1)
|
||
|
||
# NÉGATIVE STREAK
|
||
df['neg_streak'] = df['close'].pct_change().apply(lambda x: min(x, 0)).rolling(24).sum()
|
||
df['neg_score'] = np.clip(-df['neg_streak'] / 0.05, 0, 1)
|
||
|
||
# COMPOSANTS COURT TERME
|
||
df['pct_change_3'] = df['close'].pct_change(3)
|
||
df['pct_change_3_smooth'] = df['pct_change_3'].rolling(6).mean()
|
||
df['crash_score'] = np.clip(1 + (df['pct_change_3_smooth'] / 0.05), 0, 1)
|
||
|
||
df['speed'] = df['close'].diff().rolling(6).mean()
|
||
df['accel'] = df['speed'].diff().rolling(6).mean()
|
||
df['STD20'] = df['close'].rolling(20).std()
|
||
df['accel_score'] = np.clip(1 + (df['accel'] / (df['STD20'] + 1e-9)), 0, 1)
|
||
|
||
# INDEX FINAL
|
||
df['crash_raw'] = (
|
||
0.35 * df['dd_score'] + # le plus important pour crash lent
|
||
0.25 * df['neg_score'] +
|
||
0.20 * df['slope_score'] +
|
||
0.10 * df['crash_score'] +
|
||
0.10 * df['accel_score']
|
||
)
|
||
|
||
# LISSAGE SIMPLE
|
||
df['crash_risk_index'] = df['crash_raw'].ewm(span=24).mean()
|
||
|
||
return df
|
||
|
||
|
||
#
|
||
# def compute_crash_risk_index(df):
|
||
# # -- volatilité
|
||
# df['H-L'] = df['high'] - df['low']
|
||
# df['ATR'] = df['H-L'].rolling(14).mean()
|
||
# df['atr_norm'] = df['ATR'] / df['close']
|
||
#
|
||
# # -- variations lissées pour éviter les spikes
|
||
# df['pct_change_3'] = df['close'].pct_change(3)
|
||
# df['pct_change_3_smooth'] = df['pct_change_3'].rolling(6).mean()
|
||
#
|
||
# # -- speed/accel : on SMOOTH sinon c'est incontrôlable
|
||
# df['speed'] = df['close'].diff().rolling(6).mean()
|
||
# df['accel'] = df['speed'].diff().rolling(6).mean()
|
||
#
|
||
# # -- Bollinger
|
||
# df['MA20'] = df['close'].rolling(20).mean()
|
||
# df['STD20'] = df['close'].rolling(20).std()
|
||
# df['BB_lower'] = df['MA20'] - 2 * df['STD20']
|
||
#
|
||
# # -------- Scores normalisés & STABLES --------
|
||
# df['crash_score'] = np.clip(1 + (df['pct_change_3_smooth'] / 0.05), 0, 1)
|
||
#
|
||
# df['accel_score'] = np.clip(
|
||
# 1 + (df['accel'] / (df['STD20'] + 1e-9)),
|
||
# 0, 1
|
||
# )
|
||
#
|
||
# df['boll_score'] = np.clip(
|
||
# (df['close'] - df['BB_lower']) / (3 * df['STD20']), # + 3σ au lieu de 2σ
|
||
# 0, 1
|
||
# )
|
||
#
|
||
# df['atr_score'] = np.clip(
|
||
# 1 - (df['atr_norm'] / 0.06), # tolérance plus large
|
||
# 0, 1
|
||
# )
|
||
#
|
||
# # -------- COMBINAISON + DOUBLE SMOOTHING --------
|
||
# df['crash_raw'] = (
|
||
# 0.30 * df['crash_score'] +
|
||
# 0.25 * df['accel_score'] +
|
||
# 0.20 * df['boll_score'] +
|
||
# 0.15 * df['atr_score']
|
||
# )
|
||
#
|
||
# # Lissage ultra important pour éviter le 0.3 → 0.9
|
||
# df['crash_risk_index'] = (
|
||
# df['crash_raw'].ewm(span=48).mean() # 2 jours en 1h
|
||
# ).rolling(24).mean() # filtre final
|
||
#
|
||
# return df
|
||
|
||
# def compute_crash_risk_index(df):
|
||
# df['H-L'] = df['high'] - df['low']
|
||
# df['ATR'] = df['H-L'].rolling(14).mean()
|
||
# df['atr_norm'] = df['ATR'] / df['close']
|
||
#
|
||
# df['pct_change_3'] = df['close'].pct_change(3)
|
||
#
|
||
# df['speed'] = df['close'].diff()
|
||
# df['accel'] = df['speed'].diff()
|
||
#
|
||
# df['MA20'] = df['close'].rolling(20).mean()
|
||
# df['STD20'] = df['close'].rolling(20).std()
|
||
# df['BB_lower'] = df['MA20'] - 2 * df['STD20']
|
||
#
|
||
# df['crash_score'] = np.clip(1 + (df['pct_change_3'] / 0.05), 0, 1)
|
||
# df['accel_score'] = np.clip(1 + (df['accel'] / df['STD20']), 0, 1)
|
||
# df['boll_score'] = np.clip((df['close'] - df['BB_lower']) / (2 * df['STD20']), 0, 1)
|
||
# df['atr_score'] = np.clip(1 - (df['atr_norm'] / 0.04), 0, 1)
|
||
#
|
||
# df['crash_raw'] = (
|
||
# 0.30 * df['crash_score'] +
|
||
# 0.25 * df['accel_score'] +
|
||
# 0.20 * df['boll_score'] +
|
||
# 0.15 * df['atr_score']
|
||
# )
|
||
#
|
||
# # Lissage ultra important pour éviter le 0.3 → 0.9
|
||
# df['crash_risk_index'] = (
|
||
# df['crash_raw'].ewm(span=4).mean() # 2 jours en 1h
|
||
# ).rolling(2).mean() # filtre final
|
||
#
|
||
# return df
|
||
|
||
# Load Freqtrade OHLCV JSON
|
||
# with open(DATA_FILE, "r") as f:
|
||
# raw = json.load(f)
|
||
|
||
df = pd.read_feather(DATA_FILE)
|
||
print(df.head())
|
||
|
||
|
||
# df = pd.DataFrame(raw)
|
||
df['date'] = pd.to_datetime(df['date'])
|
||
df = df.set_index('date')
|
||
|
||
# compute risk
|
||
df = compute_crash_risk_index(df)
|
||
|
||
# Only keep Nov. 2025
|
||
df = df["2025-10-01":"2025-12-10"]
|
||
|
||
plt.figure(figsize=(12, 6))
|
||
plt.plot(df.index, df['crash_risk_index'])
|
||
plt.title("Crash Risk Index – Novembre 2025")
|
||
plt.grid(True)
|
||
plt.show()
|