diff --git a/Zeus_8_3_2_B_4_2.py b/Zeus_8_3_2_B_4_2.py index f5c63df..f8c5083 100644 --- a/Zeus_8_3_2_B_4_2.py +++ b/Zeus_8_3_2_B_4_2.py @@ -41,7 +41,7 @@ def normalize(df): class Zeus_8_3_2_B_4_2(IStrategy): levels = [1, 2, 2, 4, 6, 8, 10, 12, 14, 16, 18, 20] - startup_candle_count = 24 + startup_candle_count = 12 * 24 * 2 # ROI table: minimal_roi = { @@ -225,12 +225,14 @@ class Zeus_8_3_2_B_4_2(IStrategy): # Données sous forme de dictionnaire # Bornes des quantiles pour - mid_smooth_deriv1_144_bins = [-13.5716, -0.2332, -0.1108, -0.0566, -0.0246, -0.0014, 0.0096, 0.0340, 0.0675, 0.1214, 0.2468, 8.5702] + mid_smooth_deriv1_24_bins = [-13.5716, -0.2332, -0.1108, -0.0566, -0.0246, -0.0014, 0.0096, 0.0340, 0.0675, 0.1214, 0.2468, 8.5702] sma144_diff_bins = [-0.2592, -0.0166, -0.0091, -0.0051, -0.0025, -0.0005, 0.0012, 0.0034, 0.0062, 0.0105, 0.0183, 0.2436] # Bornes des quantiles pour - mid_smooth_deriv2_144_bins = [-10.2968, -0.2061, -0.0996, -0.0559, -0.0292, -0.0093, 0.0083, 0.0281, 0.0550, 0.0999, 0.2072, 10.2252] + mid_smooth_deriv2_24_bins = [-10.2968, -0.2061, -0.0996, -0.0559, -0.0292, -0.0093, 0.0083, 0.0281, 0.0550, 0.0999, 0.2072, 10.2252] + # ========================================================================= + # variables pour probabilité 144 bougies mid_smooth_1h_bins = [-2.0622, -0.1618, -0.0717, -0.0353, -0.0135, 0.0, 0.0085, 0.0276, 0.0521, 0.0923, 0.1742, 2.3286] sma24_diff_1h_bins = [-0.84253877, -0.13177195, -0.07485074, -0.04293497, -0.02033502, -0.00215711, 0.01411933, 0.03308264, 0.05661652, 0.09362708, 0.14898214, 0.50579505] @@ -251,6 +253,8 @@ class Zeus_8_3_2_B_4_2(IStrategy): # Extraction de la matrice numérique smooth_smadiff_numeric_matrice = smooth_smadiff_matrice_df.reindex(index=ordered_labels, columns=ordered_labels).values + # ========================================================================= + # variables pour probabilité smooth_pct_max_hour_matrice = { 'B5': [43.5, 52.7, 62.3, 65.5, 86.9, 63.1, 81.5, 86.7, 90.2, 90.1, 93.0], 'B4': [34.9, 46.3, 53.6, 60.4, 75.8, 83.3, 81.5, 83.0, 86.4, 86.9, 91.1], @@ -269,8 +273,10 @@ class Zeus_8_3_2_B_4_2(IStrategy): # Extraction de la matrice numérique smooth_pct_max_hour_numeric_matrice = smooth_pct_max_hour_matrice_df.reindex(index=ordered_labels, columns=ordered_labels).values + # ========================================================================= + # variables pour probabilité 144 bougies # Données sous forme de dictionnaire - smooth_sma_144_diff_matrice = { + smooth_sma_24_diff_matrice = { "B5":[40.3, 52.1, 60.2, 68.6, 86.3, 76.5, 75.1, 83.5, 88.7, 96.3, 91.6], "B4":[26.6, 39.4, 48.1, 57.0, 76.7, 82.4, 79.6, 82.4, 91.8, 86.6, 87.8], "B3":[21.5, 27.7, 42.7, 53.2, 70.9, 76.6, 80.8, 79.4, 88.3, 88.0, 87.8], @@ -284,15 +290,42 @@ class Zeus_8_3_2_B_4_2(IStrategy): "H5":[17.9, 25.7, 20.8, 17.8, 8.7, 18.5, 32.3, 37.7, 49.3, 59.8, 61.7] } - smooth_sma_144_diff_matrice_df = pd.DataFrame(smooth_smadiff_matrice, index=index_labels) + smooth_sma_24_diff_matrice_df = pd.DataFrame(smooth_smadiff_matrice, index=index_labels) # Extraction de la matrice numérique - smooth_sma_144_diff_numeric_matrice = smooth_sma_144_diff_matrice_df.reindex(index=ordered_labels, columns=ordered_labels).values + smooth_sma_24_diff_numeric_matrice = smooth_sma_24_diff_matrice_df.reindex(index=ordered_labels, columns=ordered_labels).values # Bornes des quantiles pour mid_smooth_deriv1_1h = [-11.5091, -0.4887, -0.1902, -0.0823, -0.0281, -0.0008, 0.0110, 0.0439, 0.1066, 0.2349, 0.5440, 14.7943] # Bornes des quantiles pour mid_smooth_deriv2_1h = [-6.2109, -0.2093, -0.0900, -0.0416, -0.0171, -0.0035, 0.0033, 0.0168, 0.0413, 0.0904, 0.2099, 6.2109] + # ========================================================================= + # variables pour probabilité jour + # Bornes des quantiles pour + mid_smooth_deriv1_1h_1d_bins = [-11.5091, -0.4887, -0.1902, -0.0823, -0.0281, -0.0008, 0.0110, 0.0439, 0.1066, 0.2349, 0.5440, 14.7943] + # Bornes des quantiles pour + sma24_diff_1h_1d_bins = [-2.1101, -0.1413, -0.0768, -0.0433, -0.0196, -0.0028, 0.0120, 0.0304, 0.0560, 0.0933, 0.1568, 0.7793] + + smooth_1d_sma_2_diff_1d_matrice = { + 'B5': [42.5, 47.8, 52.7, 48.5, 54.2, 64.6, 70.8, 69.2, 72.3, 71.2, 79.9], + 'B4': [34.1, 43.5, 45.7, 53.7, 52.6, 67.3, 63.9, 70.8, 73.5, 67.9, 82.9], + 'B3': [33.7, 42.7, 45.8, 49.6, 49.0, 57.8, 64.7, 68.7, 70.7, 72.6, 87.1], + 'B2': [30.0, 36.6, 40.5, 42.3, 51.2, 62.0, 64.4, 65.2, 69.8, 74.3, 84.9], + 'B1': [21.4, 29.8, 33.6, 39.9, 49.4, 56.1, 59.9, 63.9, 71.0, 72.8, 79.6], + 'N0': [19.8, 30.4, 34.5, 41.5, 42.2, 48.1, 61.7, 64.5, 73.7, 69.3, 79.4], + 'H1': [22.7, 27.0, 36.9, 34.8, 46.3, 50.2, 58.9, 63.1, 65.8, 66.5, 80.0], + 'H2': [23.1, 34.3, 32.2, 31.0, 38.8, 54.3, 53.6, 55.1, 60.3, 63.3, 77.4], + 'H3': [17.0, 32.6, 37.4, 31.0, 35.1, 36.7, 45.2, 53.0, 55.4, 58.6, 71.8], + 'H4': [22.7, 31.9, 28.0, 35.8, 36.3, 46.9, 53.9, 53.8, 58.8, 58.0, 67.6], + 'H5': [18.8, 27.0, 32.1, 36.0, 41.9, 48.1, 49.8, 53.6, 57.2, 62.2, 65.2], + } + smooth_1d_sma_2_diff_1d_matrice_df = pd.DataFrame(smooth_smadiff_matrice, index=index_labels) + # Extraction de la matrice numérique + smooth_1d_sma_2_diff_1d_numeric_matrice = smooth_1d_sma_2_diff_1d_matrice_df.reindex(index=ordered_labels, columns=ordered_labels).values + + # ========================================================================= + # Parameters hyperopt + buy_val = IntParameter(1, 10, default=50, space='buy') buy_val_adjust = IntParameter(1, 10, default=50, space='buy') @@ -526,7 +559,7 @@ class Zeus_8_3_2_B_4_2(IStrategy): self.printLog( f"| {date:<16} | {action:<10} | {pair[0:3]:<3} | {trade_type or '-':<18} |{rate or '-':>9}| {dispo or '-':>6} " f"| {profit or '-':>8} | {pct_max or '-':>6} | {round(self.pairs[pair]['max_touch'], 2) or '-':>11} | {last_lost or '-':>12} " - f"| {round(self.pairs[pair]['last_max'], 0) or '-':>7} |{buys or '-':>4}|{stake or '-':>7}" + f"| {int(self.pairs[pair]['last_max']) or '-':>7} |{buys or '-':>4}|{stake or '-':>7}" f"|{last_candle['tendency'] or '-':>3}|{last_candle['tendency_1h'] or '-':>3}|{last_candle['tendency_1d'] or '-':>3}" f"|{round(last_candle['mid_smooth_deriv1'],3) or '-':>6}|{round(last_candle['mid_smooth_deriv1_1h'],3) or '-':>6}|{round(last_candle['mid_smooth_deriv1_1d'],3) or '-' :>6}|" f"{round(last_candle['mid_smooth_deriv2'],3) or '-' :>6}|{round(last_candle['mid_smooth_deriv2_1h'],3) or '-':>6}|{round(last_candle['mid_smooth_deriv2_1d'],3) or '-':>6}|" @@ -598,7 +631,7 @@ class Zeus_8_3_2_B_4_2(IStrategy): dataframe["percent12"] = (dataframe["close"] - dataframe["open"].shift(12)) / dataframe["open"].shift(12) dataframe = self.calculateTendency(dataframe, window=12) - dataframe = self.calculateTendency(dataframe, window=48, suffixe="_144", factor_1=1000, factor_2=10) + dataframe = self.calculateTendency(dataframe, window=24, suffixe="_24", factor_1=1000, factor_2=10) # print(metadata['pair']) dataframe['rsi'] = talib.RSI(dataframe['close'], timeperiod=14) @@ -629,7 +662,8 @@ class Zeus_8_3_2_B_4_2(IStrategy): # Compter les baisses consécutives self.calculateDownAndUp(dataframe, limit=0.0001) - dataframe = self.calculateRegression(dataframe, column='mid_smooth_144', window=144, degree=3, future_offset=12) + dataframe = self.calculateRegression(dataframe, column='mid_smooth', window=24, degree=4, future_offset=12) + dataframe = self.calculateRegression(dataframe, column='mid_smooth_24', window=144, degree=4, future_offset=12) ################### INFORMATIVE 1h informative = self.dp.get_pair_dataframe(pair=metadata['pair'], timeframe="1h") @@ -639,13 +673,11 @@ class Zeus_8_3_2_B_4_2(IStrategy): informative['hapercent'] = (informative['haclose'] - informative['haopen']) / informative['haclose'] informative = self.calculateTendency(informative, window=12) - # informative = self.apply_regression_derivatives(informative, column='mid', window=5, degree=3) + # informative = self.apply_regression_derivatives(informative, column='mid', window=5, degree=4) # informative['volatility'] = talib.STDDEV(informative['close'], timeperiod=14) / informative['close'] # informative['atr'] = (talib.ATR(informative['high'], informative['low'], informative['close'], timeperiod=14)) / informative['close'] informative['rsi'] = talib.RSI(informative['close']) #, timeperiod=7) informative['rsi_diff'] = informative['rsi'].diff() - informative['rsi_sum'] = (informative['rsi'].rolling(7).sum() - 350) / 7 - informative['rsi_sum_diff'] = informative['rsi_sum'].diff() informative['rsi_diff_2'] = informative['rsi_diff'].diff() informative['max12'] = talib.MAX(informative['close'], timeperiod=12) informative['min12'] = talib.MIN(informative['close'], timeperiod=12) @@ -661,11 +693,11 @@ class Zeus_8_3_2_B_4_2(IStrategy): ################### INFORMATIVE 1d informative = self.dp.get_pair_dataframe(pair=metadata['pair'], timeframe="1d") - informative = self.calculateTendency(informative, window=7) + informative = self.calculateTendency(informative, window=5, factor_1=10000, factor_2=1000) # informative['volatility'] = talib.STDDEV(informative['close'], timeperiod=14) / informative['close'] # informative['atr'] = (talib.ATR(informative['high'], informative['low'], informative['close'], timeperiod=14)) / informative['close'] - # informative = self.apply_regression_derivatives(informative, column='mid', window=5, degree=3) + # informative = self.apply_regression_derivatives(informative, column='mid', window=5, degree=4) informative['max12'] = talib.MAX(informative['close'], timeperiod=12) informative['min12'] = talib.MIN(informative['close'], timeperiod=12) informative['max3'] = talib.MAX(informative['close'], timeperiod=3) @@ -673,15 +705,11 @@ class Zeus_8_3_2_B_4_2(IStrategy): informative['rsi'] = talib.RSI(informative['close']) #, timeperiod=7) informative['rsi_diff'] = informative['rsi'].diff() - informative['rsi_sum'] = (informative['rsi'].rolling(7).sum() - 350) / 7 informative['rsi_diff_2'] = informative['rsi_diff'].diff() informative['sma5'] = talib.SMA(informative, timeperiod=5) informative['sma5_diff'] = 100 * (informative['sma5'].rolling(5).mean().diff()) / informative['sma5'] - informative['futur_percent_1d'] = 100 * (informative['close'].shift(-1) - informative['close']) / informative['close'] - informative['futur_percent_3d'] = 100 * (informative['close'].shift(-3) - informative['close']) / informative['close'] - dataframe = merge_informative_pair(dataframe, informative, self.timeframe, "1d", ffill=True) dataframe['last_price'] = dataframe['close'] @@ -718,27 +746,47 @@ class Zeus_8_3_2_B_4_2(IStrategy): # dataframe['amount'] = amount print(f"amount= {amount}") - # dataframe['mid_smooth_tag'] = qtpylib.crossed_below(dataframe['mid_smooth_deriv1_144'], dataframe['mid_smooth_deriv2_144']) - dataframe['mid_smooth_1h'] = dataframe['mid_smooth_1h'].rolling(window=12, center=True).mean() - dataframe["mid_smooth_deriv1_1h"] = dataframe["mid_smooth_1h"].rolling(12).mean().diff() / 12 - dataframe["mid_smooth_deriv2_1h"] = 12 * dataframe["mid_smooth_deriv1_1h"].rolling(12).mean().diff() + # dataframe['mid_smooth_tag'] = qtpylib.crossed_below(dataframe['mid_smooth_deriv1_24'], dataframe['mid_smooth_deriv2_24']) - dataframe['sma5_1h'] = dataframe['sma5_1h'].rolling(window=12, center=True).mean() - dataframe['sma5_diff_1h'] = dataframe['sma5_diff_1h'].rolling(window=12, center=True).mean() - dataframe['sma24_1h'] = dataframe['sma24_1h'].rolling(window=12, center=True).mean() - dataframe['sma24_diff_1h'] = dataframe['sma24_diff_1h'].rolling(window=12, center=True).mean() + # =============================== + # lissage des valeurs horaires + horizon_h = 12 * 5 + horizon_d = 24 * 5 + dataframe['mid_smooth_1h'] = dataframe['mid_smooth_1h'].rolling(window=horizon_h).mean() + dataframe["mid_smooth_deriv1_1h"] = dataframe["mid_smooth_1h"].rolling(horizon_h).mean().diff() / horizon_h + dataframe["mid_smooth_deriv2_1h"] = horizon_h * dataframe["mid_smooth_deriv1_1h"].rolling(horizon_h).mean().diff() - # dataframe['sma5_1d'] = dataframe['sma5_1d'].interpolate(method='linear') + dataframe['sma5_1h'] = dataframe['sma5_1h'].rolling(window=horizon_h).mean() + dataframe['sma5_diff_1h'] = dataframe['sma5_diff_1h'].rolling(window=horizon_h).mean() + dataframe['sma24_1h'] = dataframe['sma24_1h'].rolling(window=horizon_h).mean() + dataframe['sma24_diff_1h'] = dataframe['sma24_diff_1h'].rolling(window=horizon_h).mean() - # dataframe['sma5_1d'] = dataframe['sma5_1d'].interpolate(method='linear') + dataframe = self.calculateRegression(dataframe, column='mid_smooth_1h', window=horizon_h * 12, degree=4, future_offset=24) + + # =============================== + # Lissage des valeurs horaires + dataframe['mid_smooth_1d'] = dataframe['mid_smooth_1h'].rolling(window=horizon_d * 5).mean() + dataframe["mid_smooth_deriv1_1d"] = dataframe["mid_smooth_1d"].rolling(horizon_d).mean().diff() / horizon_d + dataframe["mid_smooth_deriv2_1d"] = horizon_d * dataframe["mid_smooth_deriv1_1d"].rolling(horizon_d).mean().diff() + + dataframe['sma5_1d'] = dataframe['sma5_1d'].rolling(window=horizon_d * 5).mean() + dataframe['sma5_diff_1d'] = dataframe['sma5_diff_1d'].rolling(window=horizon_d).mean() + # dataframe['sma24_1d'] = dataframe['sma24_1d'].rolling(window=horizon_d).mean() + # dataframe['sma24_diff_1d'] = dataframe['sma24_diff_1d'].rolling(window=horizon_d).mean() + # dataframe = self.calculateRegression(dataframe, column='mid_smooth_1d', window=24, degree=4, future_offset=12) dataframe['percent_with_previous_day'] = 100 * (dataframe['close'] - dataframe['close_1d']) / dataframe['close'] dataframe['percent_with_max_hour'] = 100 * (dataframe['close'] - dataframe['max12_1h']) / dataframe['close'] - dataframe['futur_percent_1h'] = 100 * (dataframe['close'].shift(-12) - dataframe['close']) / dataframe['close'] - dataframe['futur_percent_3h'] = 100 * (dataframe['close'].shift(-36) - dataframe['close']) / dataframe['close'] - dataframe['futur_percent_5h'] = 100 * (dataframe['close'].shift(-60) - dataframe['close']) / dataframe['close'] - dataframe['futur_percent_12h'] = 100 * (dataframe['close'].shift(-144) - dataframe['close']) / dataframe['close'] + dataframe['futur_percent_1h'] = 100 * ((dataframe['mid_smooth_1h'].shift(-12) - dataframe['mid_smooth_1h']) / dataframe['mid_smooth_1h']).rolling(horizon_h).mean() + dataframe['futur_percent_3h'] = 100 * ((dataframe['mid_smooth_1h'].shift(-36) - dataframe['mid_smooth_1h']) / dataframe['mid_smooth_1h']).rolling(horizon_h).mean() + dataframe['futur_percent_5h'] = 100 * ((dataframe['mid_smooth_1h'].shift(-60) - dataframe['mid_smooth_1h']) / dataframe['mid_smooth_1h']).rolling(horizon_h).mean() + dataframe['futur_percent_12h'] = 100 * ((dataframe['mid_smooth_1h'].shift(-144) - dataframe['mid_smooth_1h']) / dataframe['mid_smooth_1h']).rolling(horizon_h).mean() + + # dataframe['futur_percent_1d'] = 100 * (dataframe['close'].shift(-1) - dataframe['close']) / dataframe['close'] + # dataframe['futur_percent_3d'] = 100 * (dataframe['close'].shift(-3) - dataframe['close']) / dataframe['close'] + + self.calculateProbabilite2Index(dataframe, ['futur_percent_12h'], 'mid_smooth_deriv1_1d', 'sma24_diff_1h') return dataframe @@ -755,8 +803,8 @@ class Zeus_8_3_2_B_4_2(IStrategy): def calculateTendency(self, dataframe, window=12, suffixe='', factor_1=100, factor_2=10): dataframe['mid'] = dataframe['open'] + (dataframe['close'] - dataframe['open']) / 2 - # 2. Calcul du lissage par moyenne mobile médiane - dataframe[f"mid_smooth{suffixe}"] = dataframe['close'].rolling(window=window, center=True, min_periods=1).median().rolling( + # 1. Calcul du lissage par moyenne mobile médiane + dataframe[f"mid_smooth{suffixe}"] = dataframe['close'].rolling(window=window).median().rolling( int(window / 4)).mean() # 2. Dérivée première = différence entre deux bougies successives dataframe[f"mid_smooth_deriv1{suffixe}"] = round(factor_1 * dataframe[f"mid_smooth{suffixe}"].diff() / dataframe[f"mid_smooth{suffixe}"], 4) @@ -815,29 +863,11 @@ class Zeus_8_3_2_B_4_2(IStrategy): dataframe['test'] = np.where(dataframe['enter_long'] == 1, dataframe['close'] * 1.01, np.nan) if self.dp.runmode.value in ('backtest'): - dataframe.to_feather(f"user_data/data/binance/{metadata['pair'].replace('/', '_')}_df.feather") + today = datetime.now().strftime("%Y-%m-%d-%H:%M:%S") + dataframe.to_feather(f"user_data/data/binance/{today}-{metadata['pair'].replace('/', '_')}_df.feather") df = dataframe - # # Définition des tranches pour les dérivées - # bins_deriv = [-np.inf, -0.05, -0.01, 0.01, 0.05, np.inf] - # labels = ['forte baisse', 'légère baisse', 'neutre', 'légère hausse', 'forte hausse'] - # - # # Ajout des colonnes bin (catégorisation) - # df[f"{indic_1}_bin"] = pd.cut(df['mid_smooth_deriv1_1h'], bins=bins_deriv, labels=labels) - # df[f"{indic_2}_bin"] = pd.cut(df['mid_smooth_deriv1_1d'], bins=bins_deriv, labels=labels) - # - # # Colonnes de prix futur à analyser - # futur_cols = ['futur_percent_1h', 'futur_percent_2h', 'futur_percent_3h', 'futur_percent_4h', 'futur_percent_5h'] - # - # # Calcul des moyennes et des effectifs - # grouped = df.groupby([f"{indic_2}_bin", f"{indic_1}_bin"])[futur_cols].agg(['mean', 'count']) - # - # pd.set_option('display.width', 200) # largeur max affichage - # pd.set_option('display.max_columns', None) - pd.set_option('display.max_columns', None) - pd.set_option('display.width', 300) # largeur max affichage - # Colonnes à traiter # futur_cols = ['futur_percent_1h', 'futur_percent_3h', 'futur_percent_5h', 'futur_percent_12h'] futur_cols = ['futur_percent_1h'] @@ -855,9 +885,28 @@ class Zeus_8_3_2_B_4_2(IStrategy): return dataframe def calculateProbabilite2Index(self, df, futur_cols, indic_1, indic_2): + # # Définition des tranches pour les dérivées + # bins_deriv = [-np.inf, -0.05, -0.01, 0.01, 0.05, np.inf] + # labels = ['forte baisse', 'légère baisse', 'neutre', 'légère hausse', 'forte hausse'] + # + # # Ajout des colonnes bin (catégorisation) + # df[f"{indic_1}_bin"] = pd.cut(df['mid_smooth_deriv1_1h'], bins=bins_deriv, labels=labels) + # df[f"{indic_2}_bin"] = pd.cut(df['mid_smooth_deriv1_1d'], bins=bins_deriv, labels=labels) + # + # # Colonnes de prix futur à analyser + # futur_cols = ['futur_percent_1h', 'futur_percent_2h', 'futur_percent_3h', 'futur_percent_4h', 'futur_percent_5h'] + # + # # Calcul des moyennes et des effectifs + # grouped = df.groupby([f"{indic_2}_bin", f"{indic_1}_bin"])[futur_cols].agg(['mean', 'count']) + # + # pd.set_option('display.width', 200) # largeur max affichage + # pd.set_option('display.max_columns', None) + pd.set_option('display.max_columns', None) + pd.set_option('display.width', 300) # largeur max affichage + # nettoyage - series = df[f"{indic_2}"].dropna() - unique_vals = df[f"{indic_2}"].nunique() + # series = df[f"{indic_2}"].dropna() + # unique_vals = df[f"{indic_2}"].nunique() # print(unique_vals) # print(df[f"{indic_2}"]) n = len(self.labels) @@ -1035,10 +1084,10 @@ class Zeus_8_3_2_B_4_2(IStrategy): return None def getProbaHausse144(self, last_candle): - value_1 = self.getValuesFromTable(self.mid_smooth_deriv1_144_bins, last_candle['mid_smooth_deriv1_144']) + value_1 = self.getValuesFromTable(self.mid_smooth_deriv1_24_bins, last_candle['mid_smooth_deriv1_24']) value_2 = self.getValuesFromTable(self.sma144_diff_bins, last_candle['sma144_diff']) - val = self.approx_val_from_bins(matrice=self.smooth_sma_144_diff_matrice_df, numeric_matrice=self.smooth_sma_144_diff_numeric_matrice, row_label=value_2, + val = self.approx_val_from_bins(matrice=self.smooth_sma_24_diff_matrice_df, numeric_matrice=self.smooth_sma_24_diff_numeric_matrice, row_label=value_2, col_label=value_1) return val @@ -1121,54 +1170,65 @@ class Zeus_8_3_2_B_4_2(IStrategy): # Détecter les zones de retournement: quand dérivée1 ≈ 0 et que dérivée2 change de signe. def calculateRegression(self, dataframe: DataFrame, - column: str = 'close', - window: int = 50, - degree: int = 3, + column= 'close', + window= 50, + degree=3, future_offset: int = 10 # projection à n bougies après ) -> DataFrame: df = dataframe.copy() regression_fit = [] - # deriv1 = [] - # deriv2 = [] regression_future_fit = [] - # regression_future_deriv1 = [] - # regression_future_deriv2 = [] + + regression_fit = [] + regression_future_fit = [] for i in range(len(df)): - if i < window or i + future_offset >= len(df): + if i < window: regression_fit.append(np.nan) - # deriv1.append(np.nan) - # deriv2.append(np.nan) regression_future_fit.append(np.nan) - # regression_future_deriv1.append(np.nan) - # regression_future_deriv2.append(np.nan) continue - y = df[column].iloc[i - window:i].values - x = np.arange(window) + # Fin de la fenêtre d’apprentissage + end_index = i + start_index = i - window + y = df[column].iloc[start_index:end_index].values + # Si les données sont insuffisantes (juste par précaution) + if len(y) < window: + regression_fit.append(np.nan) + regression_future_fit.append(np.nan) + continue + + # x centré pour meilleure stabilité numérique + x = np.linspace(-1, 1, window) coeffs = np.polyfit(x, y, degree) poly = np.poly1d(coeffs) - x_now = window - 1 - x_future = x_now + future_offset - + # Calcul point présent (dernier de la fenêtre) + x_now = x[-1] regression_fit.append(poly(x_now)) - # deriv1.append(np.polyder(poly, 1)(x_now)) - # deriv2.append(np.polyder(poly, 2)(x_now)) + # Calcul point futur, en ajustant si on dépasse la fin + remaining = len(df) - i - 1 + effective_offset = min(future_offset, remaining) + x_future = x_now + (effective_offset / window) * 2 # respect du même pas regression_future_fit.append(poly(x_future)) - # regression_future_deriv1.append(np.polyder(poly, 1)(x_future)) - # regression_future_deriv2.append(np.polyder(poly, 2)(x_future)) - # df['regression_fit'] = regression_fit - # df['regression_deriv1'] = deriv1 - # df['regression_deriv2'] = deriv2 + df[f"{column}_regression"] = regression_fit + # 2. Dérivée première = différence entre deux bougies successives + df[f"{column}_regression_deriv1"] = round(100 * df[f"{column}_regression"].diff() / df[f"{column}_regression"], 4) - df['regression_future_fit'] = regression_future_fit - # df['regression_future_deriv1'] = regression_future_deriv1 - # df['regression_future_deriv2'] = regression_future_deriv2 + # 3. Dérivée seconde = différence de la dérivée première + df[f"{column}_regression_deriv2"] = round(10 * df[f"{column}_regression_deriv1"].rolling(int(window / 4)).mean().diff(), 4) + + df[f"{column}_future_{future_offset}"] = regression_future_fit + + # # 2. Dérivée première = différence entre deux bougies successives + # df[f"{column}_future_{future_offset}_deriv1"] = round(100 * df[f"{column}_future_{future_offset}"].diff() / df[f"{column}_future_{future_offset}"], 4) + # + # # 3. Dérivée seconde = différence de la dérivée première + # df[f"{column}_future_{future_offset}_deriv2"] = round(10 * df[f"{column}_future_{future_offset}_deriv1"].rolling(int(window / 4)).mean().diff(), 4) return df diff --git a/tools/statistique/deriv1_1d_sma_diff.py b/tools/statistique/deriv1_1d_sma_diff.py new file mode 100644 index 0000000..9165e76 --- /dev/null +++ b/tools/statistique/deriv1_1d_sma_diff.py @@ -0,0 +1,49 @@ +import matplotlib.pyplot as plt +import numpy as np +from mpl_toolkits.mplot3d import Axes3D + +# Labels +x_labels = ['B5', 'B4', 'B3', 'B2', 'B1', 'N0', 'H1', 'H2', 'H3', 'H4', 'H5'] +y_labels = ['B5', 'B4', 'B3', 'B2', 'B1', 'N0', 'H1', 'H2', 'H3', 'H4', 'H5'] + +# Data matrix +data = np.array([ + [42.5, 47.8, 52.7, 48.5, 54.2, 64.6, 70.8, 69.2, 72.3, 71.2, 79.9], + [34.1, 43.5, 45.7, 53.7, 52.6, 67.3, 63.9, 70.8, 73.5, 67.9, 82.9], + [33.7, 42.7, 45.8, 49.6, 49.0, 57.8, 64.7, 68.7, 70.7, 72.6, 87.1], + [30.0, 36.6, 40.5, 42.3, 51.2, 62.0, 64.4, 65.2, 69.8, 74.3, 84.9], + [21.4, 29.8, 33.6, 39.9, 49.4, 56.1, 59.9, 63.9, 71.0, 72.8, 79.6], + [19.8, 30.4, 34.5, 41.5, 42.2, 48.1, 61.7, 64.5, 73.7, 69.3, 79.4], + [22.7, 27.0, 36.9, 34.8, 46.3, 50.2, 58.9, 63.1, 65.8, 66.5, 80.0], + [23.1, 34.3, 32.2, 31.0, 38.8, 54.3, 53.6, 55.1, 60.3, 63.3, 77.4], + [17.0, 32.6, 37.4, 31.0, 35.1, 36.7, 45.2, 53.0, 55.4, 58.6, 71.8], + [22.7, 31.9, 28.0, 35.8, 36.3, 46.9, 53.9, 53.8, 58.8, 58.0, 67.6], + [18.8, 27.0, 32.1, 36.0, 41.9, 48.1, 49.8, 53.6, 57.2, 62.2, 65.2] +]) + +# Meshgrid for 3D plotting +x = np.arange(len(x_labels)) +y = np.arange(len(y_labels)) +x, y = np.meshgrid(x, y) +z = data + +# Plotting +fig = plt.figure(figsize=(12, 8)) +ax = fig.add_subplot(111, projection='3d') +surf = ax.plot_surface(x, y, z, cmap='viridis', edgecolor='k') + +# Axes settings +ax.set_xticks(np.arange(len(x_labels))) +ax.set_yticks(np.arange(len(y_labels))) +ax.set_xticklabels(x_labels) +ax.set_yticklabels(y_labels) +ax.set_xlabel('mid_smooth_deriv1_1d_bin') +ax.set_ylabel('sma24_diff_1h_bin') +ax.set_zlabel('Probabilité de hausse (%)') +ax.set_title('Probabilité de hausse pour futur_percent_12h (%)') + +# Colorbar +fig.colorbar(surf, ax=ax, shrink=0.5, aspect=10) + +plt.tight_layout() +plt.show()