191 lines
8.1 KiB
Python
191 lines
8.1 KiB
Python
from flask import Flask, jsonify, abort, render_template, send_from_directory,request
|
||
import pandas as pd
|
||
import json
|
||
import zipfile
|
||
import os
|
||
import pickle
|
||
import joblib
|
||
from io import TextIOWrapper
|
||
from ydata_profiling import ProfileReport
|
||
|
||
app = Flask(__name__)
|
||
FREQTRADE_USERDATA_DIR = '/mnt/external'
|
||
|
||
|
||
@app.route('/')
|
||
def home():
|
||
# Liste les fichiers dans le répertoire monté
|
||
files = os.listdir(FREQTRADE_USERDATA_DIR + "/backtest_results")
|
||
|
||
# Filtre pour obtenir uniquement les fichiers (pas les dossiers)
|
||
files = [f for f in files if os.path.isfile(os.path.join(FREQTRADE_USERDATA_DIR + "/backtest_results", f)) and f.lower().endswith('.zip')]
|
||
|
||
files2 = os.listdir(FREQTRADE_USERDATA_DIR + "/data/binance")
|
||
files2 = [f for f in files2 if os.path.isfile(os.path.join(FREQTRADE_USERDATA_DIR + "/data/binance", f)) and f.lower().endswith('.feather')]
|
||
|
||
# Retourne le template avec la liste des fichiers
|
||
return render_template('index.html', files=files, files2=files2)
|
||
|
||
|
||
@app.route('/process', methods=['POST'])
|
||
def process():
|
||
# Traitez ici les données envoyées par l'utilisateur
|
||
return "Données traitées !"
|
||
|
||
|
||
@app.route('/read_json/<path:filename>')
|
||
def read_json(filename):
|
||
full_path = os.path.join(FREQTRADE_USERDATA_DIR + "/backtest_results", filename)
|
||
|
||
if filename.endswith('.json'):
|
||
with open(full_path) as f:
|
||
return f.read(), 200, {'Content-Type': 'application/json'}
|
||
|
||
if filename.endswith('.pkl'):
|
||
try:
|
||
data = joblib.load(full_path)
|
||
if isinstance(data, pd.DataFrame):
|
||
return data.to_json(orient='split'), 200, {'Content-Type': 'application/json'}
|
||
if isinstance(data, dict):
|
||
df = pd.DataFrame.from_dict(data)
|
||
return df.to_json(orient='split'), 200, {'Content-Type': 'application/json'}
|
||
if isinstance(data, list):
|
||
df = pd.DataFrame(data)
|
||
return df.to_json(orient='split'), 200, {'Content-Type': 'application/json'}
|
||
return json.dumps({"error": f"Type {type(data)} non géré."}), 200
|
||
except Exception as e:
|
||
return json.dumps({"error": str(e)}), 500
|
||
|
||
elif filename.endswith('.zip'):
|
||
try:
|
||
with zipfile.ZipFile(full_path) as z:
|
||
zip_contents = {}
|
||
for name in z.namelist():
|
||
if name.endswith('.json'):
|
||
with z.open(name) as f:
|
||
print(f"load_json {name}")
|
||
zip_contents[name] = json.load(f)
|
||
elif name.endswith('.feather'):
|
||
with z.open(name) as f:
|
||
dataframe = pd.read_feather(f)
|
||
zip_contents[name] = dataframe.to_json(orient="records")
|
||
|
||
elif name.endswith('.pkl'):
|
||
with z.open(name) as f:
|
||
try:
|
||
data = joblib.load(f)
|
||
if isinstance(data, pd.DataFrame):
|
||
print(f"dataframe {name}")
|
||
zip_contents[name] = data.to_json(orient='records') #, 200, {'Content-Type': 'application/json'}
|
||
elif isinstance(data, dict):
|
||
# On suppose qu’il y a un seul modèle/clé au premier niveau
|
||
outer_key = list(data.keys())[0]
|
||
inner_dict = data[outer_key]
|
||
|
||
if isinstance(inner_dict, dict):
|
||
print(f"dict {name}")
|
||
# On suppose qu’il y a une seule paire (ex: 'BTC/USDT')
|
||
inner_key = list(inner_dict.keys())[0]
|
||
df = inner_dict[inner_key]
|
||
if isinstance(df, pd.DataFrame):
|
||
type = name
|
||
# if "signals" in name:
|
||
# type = 'signals'
|
||
# elif "exited" in name:
|
||
# type = "exited"
|
||
zip_contents[type] = df.to_json(orient='records') #, 200, {'Content-Type': 'application/json'}
|
||
elif isinstance(data, list):
|
||
print(f"list {name}")
|
||
df = pd.DataFrame(data)
|
||
zip_contents[name] = df.to_json(orient='records') # , 200, {'Content-Type': 'application/json'}
|
||
else:
|
||
zip_contents[name] = json.dumps({"error": f"Type {type(data)} non géré."}), 200
|
||
except Exception as e:
|
||
zip_contents[name] = json.dumps({"error": str(e)}), 500
|
||
return json.dumps(zip_contents)
|
||
except Exception as e:
|
||
return json.dumps({"error": str(e)}), 500
|
||
|
||
return json.dumps({"error": "Fichier non pris en charge"}), 400
|
||
|
||
|
||
@app.route('/read_feather/<path:filename>')
|
||
def read_feather(filename):
|
||
path = os.path.join(FREQTRADE_USERDATA_DIR + "/data/binance/", filename)
|
||
try:
|
||
dataframe = pd.read_feather(path)
|
||
# dataframe['min'] = talib.MIN(dataframe['close'], timeperiod=200)
|
||
# dataframe['min12'] = talib.MIN(dataframe['close'], timeperiod=12)
|
||
#
|
||
# dataframe['min50'] = talib.MIN(dataframe['close'], timeperiod=50)
|
||
# dataframe['min200'] = talib.MIN(dataframe['close'], timeperiod=200)
|
||
# dataframe['max200'] = talib.MAX(dataframe['close'], timeperiod=200)
|
||
|
||
return dataframe.to_json(orient="records")
|
||
except Exception as e:
|
||
print(e)
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
@app.route('/generate_report')
|
||
def generate_report():
|
||
filename = request.args.get('filename', '')
|
||
path = os.path.join(FREQTRADE_USERDATA_DIR + "/data/binance/", filename)
|
||
print(path)
|
||
indicators = request.args.get('indicators', '').split(',')
|
||
print(indicators)
|
||
|
||
try:
|
||
dataframe = pd.read_feather(path)
|
||
print(dataframe.columns)
|
||
df = dataframe[indicators]
|
||
profile = ProfileReport(df.loc[1:100], tsmode=True, sortby="date", title="Time-Series EDA")
|
||
|
||
profile.to_file(FREQTRADE_USERDATA_DIR + "/reports/report_timeseries.html")
|
||
return dataframe.to_json(orient="records")
|
||
except Exception as e:
|
||
print(e)
|
||
return jsonify({"error": str(e)}), 500
|
||
|
||
@app.route('/get_chart_data')
|
||
def get_chart_data():
|
||
filename = request.args.get('filename', '')
|
||
path = os.path.join(FREQTRADE_USERDATA_DIR + "/data/binance/", filename)
|
||
print(path)
|
||
indicators = request.args.get('indicators', '').split(',')
|
||
df = pd.read_feather(path)
|
||
|
||
# # Calculs conditionnels
|
||
# if 'sma' in indicators:
|
||
# df['sma'] = df['close'].rolling(window=20).mean()
|
||
# if 'rsi' in indicators:
|
||
# delta = df['close'].diff()
|
||
# gain = delta.where(delta > 0, 0)
|
||
# loss = -delta.where(delta < 0, 0)
|
||
# avg_gain = gain.rolling(14).mean()
|
||
# avg_loss = loss.rolling(14).mean()
|
||
# rs = avg_gain / avg_loss
|
||
# df['rsi'] = 100 - (100 / (1 + rs))
|
||
# if 'bollinger' in indicators:
|
||
# sma = df['close'].rolling(window=20).mean()
|
||
# std = df['close'].rolling(window=20).std()
|
||
# df['bb_upper'] = sma + 2 * std
|
||
# df['bb_lower'] = sma - 2 * std
|
||
#
|
||
# df = df.dropna()
|
||
|
||
# Simplifier les données pour le JSON
|
||
# chart_data = {
|
||
# 'date': df['date'].astype(str).tolist(),
|
||
# 'close': df['close'].tolist(),
|
||
# 'sma': df.get('sma', pd.Series()).tolist(),
|
||
# 'rsi': df.get('rsi', pd.Series()).tolist(),
|
||
# 'bb_upper': df.get('bb_upper', pd.Series()).tolist(),
|
||
# 'bb_lower': df.get('bb_lower', pd.Series()).tolist(),
|
||
# }
|
||
|
||
return df.to_json(orient="records") #jsonify(chart_data)
|
||
|
||
|
||
if __name__ == '__main__':
|
||
app.run(debug=True, host='0.0.0.0', port=5000)
|