EMPTy UP AND DOWN

This commit is contained in:
Jérôme Delacotte
2026-03-08 19:54:25 +01:00
parent 12be7e0441
commit 4b51cce9c7

View File

@@ -564,7 +564,7 @@ class Empty5m(IStrategy):
self.pairs[pair]['force_sell'] = True self.pairs[pair]['force_sell'] = True
return 'sma' return 'sma'
if profit > 0 and \ if current_profit > 0.00 and \
(baisse > self.baisse.value and last_candle[f"close"] <= last_candle[self.b30_indicateur.value]) \ (baisse > self.baisse.value and last_candle[f"close"] <= last_candle[self.b30_indicateur.value]) \
and last_candle['hapercent'] <0 : and last_candle['hapercent'] <0 :
self.pairs[pair]['force_sell'] = True self.pairs[pair]['force_sell'] = True
@@ -657,7 +657,7 @@ class Empty5m(IStrategy):
# get access to all pairs available in whitelist. # get access to all pairs available in whitelist.
pairs = self.dp.current_whitelist() pairs = self.dp.current_whitelist()
informative_pairs = [(pair, '1h') for pair in pairs] informative_pairs = [(pair, '1h') for pair in pairs]
# informative_pairs += [(pair, '1h') for pair in pairs] informative_pairs += [(pair, '1d') for pair in pairs]
return informative_pairs return informative_pairs
@@ -705,10 +705,10 @@ class Empty5m(IStrategy):
################### INFORMATIVE 1h ################### INFORMATIVE 1h
informative = self.dp.get_pair_dataframe(pair=metadata['pair'], timeframe="1h") informative = self.dp.get_pair_dataframe(pair=metadata['pair'], timeframe="1h")
# informative = self.populateDataframe(informative, timeframe='1d') # informative = self.populateDataframe(informative, timeframe='1d')
# heikinashi = qtpylib.heikinashi(informative) heikinashi = qtpylib.heikinashi(informative)
# informative['haopen'] = heikinashi['open'] informative['haopen'] = heikinashi['open']
# informative['haclose'] = heikinashi['close'] informative['haclose'] = heikinashi['close']
# informative['hapercent'] = (informative['haclose'] - informative['haopen']) / informative['haclose'] informative['hapercent'] = (informative['haclose'] - informative['haopen']) / informative['haclose']
informative['mid'] = informative['open'] + (informative['close'] - informative['open']) / 2 informative['mid'] = informative['open'] + (informative['close'] - informative['open']) / 2
for timeperiod in timeperiods: for timeperiod in timeperiods:
informative[f'max{timeperiod}'] = talib.MAX(informative['close'], timeperiod=timeperiod) informative[f'max{timeperiod}'] = talib.MAX(informative['close'], timeperiod=timeperiod)
@@ -718,6 +718,7 @@ class Empty5m(IStrategy):
informative[f"sma{timeperiod}"] = informative['mid'].ewm(span=timeperiod, adjust=False).mean() informative[f"sma{timeperiod}"] = informative['mid'].ewm(span=timeperiod, adjust=False).mean()
self.calculeDerivees(informative, f"sma{timeperiod}", timeframe=self.timeframe, ema_period=timeperiod) self.calculeDerivees(informative, f"sma{timeperiod}", timeframe=self.timeframe, ema_period=timeperiod)
informative = self.calculateRegression(informative, column='mid', window=10, degree=1, future_offset=12)
informative = self.calculateRegression(informative, column='sma3', window=10, degree=1, future_offset=12) informative = self.calculateRegression(informative, column='sma3', window=10, degree=1, future_offset=12)
informative = self.calculateRegression(informative, column='low', window=10, degree=1, future_offset=12) informative = self.calculateRegression(informative, column='low', window=10, degree=1, future_offset=12)
@@ -743,10 +744,55 @@ class Empty5m(IStrategy):
else: else:
latched[i] = latched[i - 1] latched[i] = latched[i - 1]
informative['stop_buying'] = latched informative['stop_buying'] = latched
informative = self.calculateDownAndUp(informative, limit=0.0001)
dataframe = merge_informative_pair(dataframe, informative, self.timeframe, "1h", ffill=True) dataframe = merge_informative_pair(dataframe, informative, self.timeframe, "1h", ffill=True)
# ###################################################################################################### # ######################################################################################################
# ######################################################################################################
################### INFORMATIVE 1d
informative = self.dp.get_pair_dataframe(pair=metadata['pair'], timeframe="1d")
# informative = self.populateDataframe(informative, timeframe='1d')
# heikinashi = qtpylib.heikinashi(informative)
# informative['haopen'] = heikinashi['open']
# informative['haclose'] = heikinashi['close']
# informative['hapercent'] = (informative['haclose'] - informative['haopen']) / informative['haclose']
informative['mid'] = informative['open'] + (informative['close'] - informative['open']) / 2
for timeperiod in timeperiods:
informative[f'max{timeperiod}'] = talib.MAX(informative['close'], timeperiod=timeperiod)
informative[f'min{timeperiod}'] = talib.MIN(informative['close'], timeperiod=timeperiod)
# informative[f"range{timeperiod}"] = ((informative["close"] - informative[f'min{timeperiod}']) / (informative[f'max{timeperiod}'] - informative[f'min{timeperiod}']))
# informative[f"percent{timeperiod}"] = informative['close'].pct_change(timeperiod)
informative[f"sma{timeperiod}"] = informative['mid'].ewm(span=timeperiod, adjust=False).mean()
self.calculeDerivees(informative, f"sma{timeperiod}", timeframe=self.timeframe, ema_period=timeperiod)
for timeperiod in long_timeperiods:
informative[f"sma{timeperiod}"] = informative['mid'].ewm(span=timeperiod, adjust=False).mean()
self.calculeDerivees(informative, f"sma{timeperiod}", timeframe=self.timeframe, ema_period=timeperiod)
informative['rsi'] = talib.RSI(informative['close'], timeperiod=14)
self.calculeDerivees(informative, f"rsi", timeframe=self.timeframe, ema_period=14)
informative['max_rsi_12'] = talib.MAX(informative['rsi'], timeperiod=12)
informative['max_rsi_24'] = talib.MAX(informative['rsi'], timeperiod=24)
informative[f'stop_buying_deb'] = qtpylib.crossed_below(informative[f"sma12"], informative['sma36']) & (informative['close'] < informative['sma100'])
informative[f'stop_buying_end'] = qtpylib.crossed_above(informative[f"sma12"], informative['sma36']) & (informative['close'] > informative['sma100'])
latched = np.zeros(len(informative), dtype=bool)
for i in range(1, len(informative)):
if informative['stop_buying_deb'].iloc[i]:
latched[i] = True
elif informative['stop_buying_end'].iloc[i]:
latched[i] = False
else:
latched[i] = latched[i - 1]
informative['stop_buying'] = latched
dataframe = merge_informative_pair(dataframe, informative, self.timeframe, "1d", ffill=True)
# ######################################################################################################
range_min = dataframe[f"min12_1h"] range_min = dataframe[f"min12_1h"]
range_max = dataframe[f"max48"] range_max = dataframe[f"max48"]
@@ -768,6 +814,9 @@ class Empty5m(IStrategy):
/ dataframe["sma200_1h"] / dataframe["sma200_1h"]
) )
# Compter les baisses / hausses consécutives
dataframe = self.calculateDownAndUp(dataframe, limit=0.0001)
# récupérer le dernier trade fermé # récupérer le dernier trade fermé
trades = Trade.get_trades_proxy(pair=pair,is_open=False) trades = Trade.get_trades_proxy(pair=pair,is_open=False)
if trades: if trades:
@@ -935,7 +984,8 @@ class Empty5m(IStrategy):
conditions.append(dataframe['close'] <= dataframe['min12_1h']) conditions.append(dataframe['close'] <= dataframe['min12_1h'])
conditions.append(dataframe['min60'] == dataframe['min60'].shift(5)) conditions.append(dataframe['min60'] == dataframe['min60'].shift(5))
conditions.append(dataframe['has_cross_min_6'] == 1) conditions.append(dataframe['has_cross_min_6'] == 1)
# conditions.append(dataframe['min5_1h'] / dataframe['min12_1h'] < 0.03) conditions.append(dataframe['down_count'] <= 5)
conditions.append(dataframe['down_count'] <= 5)
# conditions.append(dataframe['sma12_deriv1'] >= 0) # conditions.append(dataframe['sma12_deriv1'] >= 0)
dataframe.loc[ dataframe.loc[
reduce(lambda x, y: x & y, conditions), reduce(lambda x, y: x & y, conditions),
@@ -1342,3 +1392,25 @@ class Empty5m(IStrategy):
# df[f"{column}_future_{future_offset}_deriv2"] = round(10 * df[f"{column}_future_{future_offset}_deriv1"].rolling(int(window / 4)).mean().diff(), 4) # df[f"{column}_future_{future_offset}_deriv2"] = round(10 * df[f"{column}_future_{future_offset}_deriv1"].rolling(int(window / 4)).mean().diff(), 4)
return df return df
def calculateDownAndUp(self, dataframe, limit=0.0001):
dataframe['down'] = dataframe['mid_regression'] <= dataframe['mid_regression'].shift(1)
dataframe['up'] = dataframe['mid_regression'] >= dataframe['mid_regression'].shift(1)
dataframe['down_count'] = - dataframe['down'].astype(int) * (
dataframe['down'].groupby((dataframe['down'] != dataframe['down'].shift()).cumsum()).cumcount() + 1)
dataframe['up_count'] = dataframe['up'].astype(int) * (
dataframe['up'].groupby((dataframe['up'] != dataframe['up'].shift()).cumsum()).cumcount() + 1)
# Créer une colonne vide
dataframe['down_pct'] = self.calculateUpDownPct(dataframe, 'down_count')
dataframe['up_pct'] = self.calculateUpDownPct(dataframe, 'up_count')
return dataframe
def calculateUpDownPct(self, dataframe, key):
down_pct_values = np.full(len(dataframe), np.nan)
# Remplir la colonne avec les bons calculs
for i in range(len(dataframe)):
shift_value = abs(int(dataframe[key].iloc[i])) # Récupérer le shift actuel
if i - shift_value > 1: # Vérifier que le shift ne dépasse pas l'index
down_pct_values[i] = 100 * (dataframe['close'].iloc[i] - dataframe['close'].iloc[i - shift_value]) / \
dataframe['close'].iloc[i - shift_value]
return down_pct_values