752 lines
28 KiB
Python
752 lines
28 KiB
Python
import sqlite3
|
|
import sys
|
|
import datetime
|
|
import time
|
|
import ccxt
|
|
import credentials
|
|
import calendar
|
|
import requests
|
|
import logging
|
|
from flask import Flask, jsonify, request
|
|
from waitress import serve
|
|
|
|
|
|
'''
|
|
In case the certificate's permissions suddenly change (in auto renewal, for example), reset them this way:
|
|
/ sudo su
|
|
# chmod -R 755 /etc/letsencrypt/live/
|
|
# chmod -R 755 /etc/letsencrypt/archive/
|
|
# ll /etc/letsencrypt/ (to verify permissions)
|
|
'''
|
|
|
|
cache_requests = False
|
|
if len(sys.argv)>1 and sys.argv[1]=="--cache_requests":
|
|
cache_requests = True
|
|
|
|
|
|
profits_database = "../profits/profits_database.db"
|
|
hashes_db = {"fetch_last_n_deals":0,
|
|
"fetch_last_n_deals_without_history":0,
|
|
"fetch_full_log":0,
|
|
"fetch_log":0,
|
|
"daily_totals":0,
|
|
"daily_totals_by_pair":0,
|
|
"monthly_totals":0,
|
|
"monthly_totals_by_pair":0,
|
|
"get_averages":0,
|
|
"total_profit":0,
|
|
"total_profit_by_pair":0}
|
|
|
|
|
|
def get_market_caps(limit):
|
|
api_key = credentials.get_credentials("CMC")["key"]
|
|
url = f"https://pro-api.coinmarketcap.com/v1/cryptocurrency/listings/latest?CMC_PRO_API_KEY={api_key}&convert=USD&limit={limit}"
|
|
return requests.get(url).json()["data"]
|
|
|
|
|
|
def load_keys_from_db(file_name):
|
|
#valid_keys = []
|
|
|
|
connection = sqlite3.connect(file_name)
|
|
cursor = connection.cursor()
|
|
cursor.execute("SELECT * FROM credentials_table")
|
|
data = cursor.fetchall()
|
|
connection.close()
|
|
|
|
valid_keys = [line[1] for line in data]
|
|
#for line in data:
|
|
# valid_keys.append(line[1])
|
|
|
|
return valid_keys
|
|
|
|
|
|
|
|
def profit_report():
|
|
##Queries
|
|
connection = sqlite3.connect(profits_database)
|
|
cursor = connection.cursor()
|
|
#Last 60 days query
|
|
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
|
|
SUM(amount) AS total_amount
|
|
FROM profits_table
|
|
WHERE strftime('%s', 'now') - timestamp <= 60 * 24 * 60 * 60 -- 60 days in seconds
|
|
GROUP BY day_utc3
|
|
ORDER BY day_utc3;""")
|
|
last_60_days_rows = cursor.fetchall()
|
|
#Last 30 days query
|
|
#cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
|
|
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
|
|
SUM(amount) AS total_amount
|
|
FROM profits_table
|
|
WHERE strftime('%s', 'now') - timestamp <= 30 * 24 * 60 * 60 -- 30 days in seconds;""")
|
|
last_30_days = cursor.fetchall()
|
|
#Last 7 days query
|
|
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
|
|
SUM(amount) AS total_amount
|
|
FROM profits_table
|
|
WHERE strftime('%s', 'now') - timestamp <= 7 * 24 * 60 * 60 -- 7 days in seconds;""")
|
|
last_7_days = cursor.fetchall()
|
|
#Last n months query
|
|
cursor.execute("""SELECT strftime('%Y-%m', timestamp, 'unixepoch', '-3 hours') AS year_month_utc3,
|
|
SUM(amount) AS total_amount
|
|
FROM profits_table
|
|
WHERE strftime('%s', 'now') - timestamp <= 18 * 30 * 24 * 60 * 60 -- 18 months in seconds
|
|
GROUP BY year_month_utc3
|
|
ORDER BY year_month_utc3;""")
|
|
last_n_months_rows = cursor.fetchall()
|
|
#Yearly totals
|
|
cursor.execute("""SELECT strftime('%Y', timestamp, 'unixepoch', '-3 hours') AS year_utc3,
|
|
SUM(amount) AS total_amount
|
|
FROM profits_table
|
|
WHERE strftime('%s', 'now') - timestamp <= 24 * 365 * 60 * 60 -- 365 days in seconds
|
|
GROUP BY year_utc3
|
|
ORDER BY year_utc3;""")
|
|
yearly_totals = cursor.fetchall()
|
|
#Per exchange
|
|
cursor.execute("""SELECT
|
|
exchange_name,
|
|
CASE
|
|
WHEN strftime('%Y-%m', timestamp, 'unixepoch', '-3 hours') = strftime('%Y-%m', 'now', 'localtime') THEN 'This Month'
|
|
WHEN strftime('%Y-%m', timestamp, 'unixepoch', '-3 hours') = strftime('%Y-%m', 'now', 'localtime', '-1 month') THEN 'Last Month'
|
|
ELSE 'Other Months'
|
|
END AS month_group,
|
|
SUM(amount) AS total_amount
|
|
FROM
|
|
profits_table
|
|
WHERE
|
|
strftime('%s', 'now') - timestamp <= 60 * 24 * 60 * 60 -- 60 days in seconds
|
|
GROUP BY
|
|
exchange_name, month_group
|
|
ORDER BY
|
|
exchange_name, month_group;""")
|
|
per_exchange = cursor.fetchall()
|
|
|
|
#Close db
|
|
cursor.close()
|
|
|
|
|
|
#Projection calculation
|
|
days_in_month = calendar.monthrange(datetime.date.today().year, datetime.date.today().month)[1]
|
|
daily_combined_media = (last_30_days[0][1]/30+last_7_days[0][1]/7)/2
|
|
current_amount = last_n_months_rows[-1][1]
|
|
days_past_this_month = int(last_60_days_rows[-1][0][8:10])
|
|
|
|
#Per exchange
|
|
binance_amount = 0
|
|
gateio_amount = 0
|
|
kucoin_amount = 0
|
|
okex_amount = 0
|
|
|
|
for row in per_exchange:
|
|
if row[0]=="binance":
|
|
if row[1]=="This Month":
|
|
binance_amount = row[2]
|
|
elif row[0]=="gateio":
|
|
if row[1]=="This Month":
|
|
gateio_amount = row[2]
|
|
elif row[0]=="kucoin":
|
|
if row[1]=="This Month":
|
|
kucoin_amount = row[2]
|
|
elif row[0]=="okex":
|
|
if row[1]=="This Month":
|
|
okex_amount = row[2]
|
|
|
|
total_amount = binance_amount+gateio_amount+kucoin_amount+okex_amount
|
|
|
|
last_60_days_result = {row[0]: round(row[1],2) for row in last_60_days_rows}
|
|
last_18_months_result = {row[0]: round(row[1],2) for row in last_n_months_rows}
|
|
last_30_days_average = last_30_days[0][1]/30
|
|
last_7_days_average = last_7_days[0][1]/7
|
|
this_month_projection = current_amount + daily_combined_media*(days_in_month-days_past_this_month)
|
|
binance_percentage = binance_amount/total_amount*100
|
|
gateio_percentage = gateio_amount/total_amount*100
|
|
kucoin_percentage = kucoin_amount/total_amount*100
|
|
okex_percentage = okex_amount/total_amount*100
|
|
|
|
return {"Last 60 days": last_60_days_result,
|
|
"Last 18 months": last_18_months_result,
|
|
"Last 30 days average": last_30_days_average,
|
|
"Last 7 days average": last_7_days_average,
|
|
"This month projection": this_month_projection,
|
|
"Binance": binance_amount,
|
|
"Binance percentage": binance_percentage,
|
|
"Gateio": gateio_amount,
|
|
"Gateio percentage": gateio_percentage,
|
|
"Kucoin": kucoin_amount,
|
|
"Kucoin percentage": kucoin_percentage,
|
|
"OKX": okex_amount,
|
|
"OKX percentage": okex_percentage,
|
|
"Total profit": total_amount}
|
|
|
|
|
|
|
|
def query_total_profit(pair=None):
|
|
'''
|
|
Returns total profit of the trading pair.
|
|
If no pair specified, returns the grand total of all pairs.
|
|
'''
|
|
connection = sqlite3.connect(profits_database)
|
|
cursor = connection.cursor()
|
|
|
|
if pair is None:
|
|
query = "SELECT SUM(amount) AS total_profit FROM profits_table"
|
|
cursor.execute(query)
|
|
connection.close()
|
|
query_result = cursor.fetchall()
|
|
return query_result[0][0]
|
|
else:
|
|
query = """SELECT pair, SUM(amount) AS total_profit
|
|
FROM profits_table
|
|
GROUP BY pair;"""
|
|
cursor.execute(query)
|
|
connection.close()
|
|
query_result = cursor.fetchall()
|
|
for item in query_result:
|
|
if item[0].replace("/","")==pair:
|
|
return item[1]
|
|
return 0
|
|
|
|
|
|
def daily_and_monthly_totals():
|
|
'''
|
|
Returns a tuple with the current day and the current month's total profit.
|
|
'''
|
|
#Connect to db
|
|
connection = sqlite3.connect(profits_database)
|
|
cursor = connection.cursor()
|
|
now = datetime.datetime.now()
|
|
|
|
# Create a datetime object for the start of the day
|
|
start_of_day = datetime.datetime(now.year, now.month, now.day)
|
|
start_of_month = datetime.datetime(now.year, now.month, 1)
|
|
|
|
# Convert the start of the day to Unix time
|
|
start_of_day_unix = int(time.mktime(start_of_day.timetuple()))
|
|
start_of_month_unix = int(time.mktime(start_of_month.timetuple()))
|
|
|
|
query = f"""SELECT * FROM profits_table
|
|
WHERE timestamp >= {start_of_month_unix}
|
|
ORDER BY timestamp DESC;"""
|
|
cursor.execute(query)
|
|
query_result = cursor.fetchall()
|
|
connection.close()
|
|
|
|
monthly_total = sum([item[2] for item in query_result])
|
|
daily_total = sum([item[2] for item in query_result if item[0]>=start_of_day_unix])
|
|
|
|
return (daily_total, monthly_total)
|
|
|
|
|
|
def query_daily_totals(pair=None):
|
|
'''
|
|
Returns a dictionary of daily totals of the trading pair.
|
|
If no pair specified, returns the totals of all pairs.
|
|
'''
|
|
#Connect to db
|
|
connection = sqlite3.connect(profits_database)
|
|
cursor = connection.cursor()
|
|
|
|
result = {}
|
|
|
|
if pair is None:
|
|
query = """SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
|
|
SUM(amount) AS total_profit
|
|
FROM profits_table
|
|
GROUP BY day_utc3;"""
|
|
cursor.execute(query)
|
|
query_result = cursor.fetchall()
|
|
connection.close()
|
|
for item in query_result:
|
|
result[item[0]] = item[1]
|
|
else:
|
|
query = """SELECT pair, strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
|
|
SUM(amount) AS total_profit
|
|
FROM profits_table
|
|
GROUP BY pair, day_utc3;"""
|
|
cursor.execute(query)
|
|
query_result = cursor.fetchall()
|
|
connection.close()
|
|
for item in query_result:
|
|
if item[0].replace("/","")==pair:
|
|
result[item[1]] = item[2]
|
|
return result
|
|
|
|
|
|
def query_monthly_totals(pair=None):
|
|
'''
|
|
Returns a dictionary of monthly totals of the trading pair.
|
|
If no pair specified, returns the totals of all pairs.
|
|
'''
|
|
#Connect to db
|
|
connection = sqlite3.connect(profits_database)
|
|
cursor = connection.cursor()
|
|
|
|
result = {}
|
|
|
|
if pair is None:
|
|
query = """SELECT strftime('%Y-%m', datetime(timestamp, 'unixepoch', '-3 hours')) AS month,
|
|
SUM(amount) AS total_profit
|
|
FROM profits_table
|
|
GROUP BY month;"""
|
|
cursor.execute(query)
|
|
query_result = cursor.fetchall()
|
|
connection.close()
|
|
for item in query_result:
|
|
result[item[0]] = item[1]
|
|
else:
|
|
query = f"""SELECT pair, strftime('%Y-%m', datetime(timestamp, 'unixepoch', '-3 hours')) AS month,
|
|
SUM(amount) AS total_profit
|
|
FROM profits_table
|
|
GROUP BY pair, month;"""
|
|
cursor.execute(query)
|
|
query_result = cursor.fetchall()
|
|
connection.close()
|
|
for item in query_result:
|
|
if item[0].replace("/","")==pair:
|
|
result[item[1]] = item[2]
|
|
return result
|
|
|
|
|
|
def last_n_deals(n):
|
|
'''
|
|
Returns a list of the latest n deals
|
|
'''
|
|
connection = sqlite3.connect(profits_database)
|
|
cursor = connection.cursor()
|
|
cursor.execute(f"SELECT * FROM profits_table ORDER BY timestamp DESC LIMIT ?",(n,))
|
|
result = cursor.fetchall()
|
|
connection.close()
|
|
|
|
return result
|
|
|
|
|
|
def last_n_deals_without_history(n):
|
|
'''
|
|
Like last_n_deals, but without returning the order history. Useful in bandwidth-restricted scenarios.
|
|
'''
|
|
|
|
return [(row[0],row[1],row[2],row[3],row[4],"") for row in last_n_deals(n)]
|
|
|
|
|
|
def last_n_lines(file_name,width,amount=4,full_log=False):
|
|
|
|
file_contents = []
|
|
result = []
|
|
|
|
with open(file_name) as f:
|
|
file_contents = f.readlines()
|
|
|
|
if full_log:
|
|
for line in file_contents:
|
|
result.append(line.strip())
|
|
return result,len(file_contents)
|
|
|
|
for line in file_contents[::-1][:amount]:
|
|
trimmed = line.strip()
|
|
result.append(trimmed[:width])
|
|
if len(trimmed)>width:
|
|
result.append(trimmed[width:width*2])
|
|
return result[:amount],len(file_contents)
|
|
|
|
|
|
def return_parkinson_backtests(broker, days, max_rank):
|
|
'''
|
|
Returns a dictionary containing backtests with the format {coin: value}
|
|
'''
|
|
if broker not in ["binance", "gateio", "kucoin", "okx", "bybit"]:
|
|
return {}
|
|
|
|
evaluation_dictionary = {}
|
|
start_of_day = int(time.mktime(datetime.datetime.now().date().timetuple()))
|
|
since = int(start_of_day - 60*60*24*days)
|
|
|
|
# Getting the data from the database
|
|
print("Querying database...")
|
|
conn = sqlite3.connect(f"data/{broker}.db")
|
|
cursor = conn.cursor()
|
|
cursor.execute('SELECT * FROM volatilities_table WHERE timestamp > ?', (since,))
|
|
rows = cursor.fetchall()
|
|
conn.close()
|
|
|
|
# Parse the data
|
|
print("Parsing the data...")
|
|
for row in rows:
|
|
if row[0] not in evaluation_dictionary:
|
|
evaluation_dictionary[row[0]] = [row[2]]
|
|
else:
|
|
evaluation_dictionary[row[0]].append(row[2])
|
|
|
|
#Calculate weighted averages
|
|
print("Calculating weighted averages")
|
|
weighted_averages = {}
|
|
for key in evaluation_dictionary:
|
|
multiplier = len(evaluation_dictionary[key])
|
|
total = 0
|
|
for value in evaluation_dictionary[key][::-1]:
|
|
total+=value*multiplier/len(evaluation_dictionary[key])
|
|
multiplier-=1
|
|
weighted_averages[key] = total/len(evaluation_dictionary[key])
|
|
|
|
#Filter by rank
|
|
print("Filtering results by CMC rank")
|
|
coins_accepted = []
|
|
market_caps = get_market_caps(max_rank)
|
|
for result in market_caps:
|
|
coins_accepted.append(result["symbol"])
|
|
|
|
for coin in weighted_averages.copy():
|
|
if coin.split("/")[0] not in coins_accepted:
|
|
del(weighted_averages[coin])
|
|
|
|
|
|
#Checking open markets
|
|
print("Filtering results by market state")
|
|
exchange_class = getattr(ccxt, broker)
|
|
broker = exchange_class({
|
|
"apiKey": "",
|
|
"secret": "",
|
|
"timeout": 30000,
|
|
"enableRateLimit": True,
|
|
'options': {
|
|
'newOrderRespType': 'FULL'}
|
|
})
|
|
|
|
markets = broker.load_markets()
|
|
for key in weighted_averages.copy():
|
|
if key not in markets or not markets[key]["active"]:
|
|
del(weighted_averages[key])
|
|
|
|
return weighted_averages
|
|
|
|
|
|
stats_api = Flask(__name__)
|
|
|
|
@stats_api.route("/fetch_backtests")
|
|
def fetch_backtests():
|
|
'''
|
|
GET request
|
|
Parameters: 'exchange_name" -> string
|
|
'days' -> int
|
|
'max_rank' -> int
|
|
'''
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
try:
|
|
broker = request.args.get("exchange_name")
|
|
days = int(request.args.get("days")) # type: ignore
|
|
max_rank = int(request.args.get("max_rank")) # type: ignore
|
|
return return_parkinson_backtests(broker,days,max_rank)
|
|
except Exception as e:
|
|
print(e)
|
|
return jsonify({"HORROR": f"{e}"})
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/fetch_profit_report")
|
|
def fetch_profit_report():
|
|
'''
|
|
GET request
|
|
Parameters: None
|
|
Returns: JSON object with profit report data
|
|
'''
|
|
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
try:
|
|
return jsonify(profit_report())
|
|
except Exception as e:
|
|
print(e)
|
|
return jsonify({"Error": f"{e}"})
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/clear_caches")
|
|
def clear_hashes():
|
|
global hashes_db
|
|
|
|
'''
|
|
GET request
|
|
'''
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
hashes_db = {"fetch_last_n_deals":0,
|
|
"fetch_last_n_deals_without_history":0,
|
|
"fetch_full_log":0,
|
|
"fetch_log":0,
|
|
"daily_totals":0,
|
|
"daily_totals_by_pair":0,
|
|
"monthly_totals":0,
|
|
"monthly_totals_by_pair":0,
|
|
"get_averages":0,
|
|
"total_profit":0,
|
|
"total_profit_by_pair":0}
|
|
return jsonify({"Done":0})
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/fetch_last_n_deals")
|
|
def fetch_last_n_deals():
|
|
'''
|
|
GET request
|
|
Parameter: 'amount_of_deals' -> int
|
|
'''
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
try:
|
|
parameter = request.args.get("amount_of_deals")
|
|
response_value = last_n_deals(parameter)
|
|
if not cache_requests:
|
|
return jsonify({"last_deals": response_value})
|
|
response_hash = hash(str({"last_deals": response_value}))
|
|
if hashes_db["fetch_last_n_deals"]!=response_hash:
|
|
hashes_db["fetch_last_n_deals"] = response_hash
|
|
return jsonify({"last_deals": response_value})
|
|
return jsonify({"no_changes": True})
|
|
except Exception as e:
|
|
print(e)
|
|
return jsonify({"last_deals":""})
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/fetch_last_n_deals_without_history")
|
|
def fetch_last_n_deals_without_history():
|
|
'''
|
|
GET request
|
|
Parameter: 'amount_of_deals' -> int
|
|
'''
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
try:
|
|
parameter = request.args.get("amount_of_deals")
|
|
#return jsonify({"last_deals": last_n_deals_without_history(parameter)})
|
|
response_value = last_n_deals_without_history(parameter)
|
|
if not cache_requests:
|
|
return jsonify({"last_deals": response_value})
|
|
response_hash = hash(str({"last_deals": response_value}))
|
|
if hashes_db["fetch_last_n_deals_without_history"]!=response_hash:
|
|
hashes_db["fetch_last_n_deals_without_history"] = response_hash
|
|
return jsonify({"last_deals": response_value})
|
|
return jsonify({"no_changes": True})
|
|
except Exception as e:
|
|
print(e)
|
|
return jsonify({"last_deals":""})
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/fetch_full_log")
|
|
def fetch_full_log():
|
|
'''
|
|
GET request
|
|
Parameters: 'exchange_name" -> string
|
|
|
|
It trims the full log to 200 lines, to avoid sending too much data to the client.
|
|
'''
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
try:
|
|
exchange_name = request.args.get("exchange_name")
|
|
width = 0
|
|
last_lines,amount_of_lines = last_n_lines(f"../logs/{exchange_name}.log",width,0,full_log=True)
|
|
if not cache_requests:
|
|
return jsonify({"line": last_lines[-200:], "amount_of_lines": amount_of_lines})
|
|
response_hash = hash(str({"line": last_lines, "amount_of_lines": amount_of_lines}))
|
|
if hashes_db["fetch_full_log"]!=response_hash:
|
|
hashes_db["fetch_full_log"] = response_hash
|
|
return jsonify({"line": last_lines[-200:], "amount_of_lines": amount_of_lines})
|
|
return jsonify({"no_changes": True})
|
|
except Exception as e:
|
|
print(e)
|
|
return {"line": [""]*width,"amount_of_lines": 0}
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/fetch_log")
|
|
def fetch_log():
|
|
'''
|
|
GET request
|
|
Parameters: 'exchange_name" -> string
|
|
'width' -> int
|
|
'amount' -> int
|
|
'''
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
try:
|
|
exchange_name = request.args.get("exchange_name")
|
|
width = int(request.args.get("width")) # type: ignore
|
|
amount = int(request.args.get("amount")) # type: ignore
|
|
last_lines,total_amount_of_lines = last_n_lines(f"../logs/{exchange_name}.log",width,amount)
|
|
if not cache_requests:
|
|
return jsonify({"line": last_lines, "amount_of_lines": total_amount_of_lines})
|
|
response_hash = hash(str({"line": last_lines, "amount_of_lines": total_amount_of_lines}))
|
|
if hashes_db["fetch_log"]!=response_hash:
|
|
hashes_db["fetch_log"] = response_hash
|
|
return jsonify({"line": last_lines, "amount_of_lines": total_amount_of_lines})
|
|
return jsonify({"no_changes": True})
|
|
except Exception as e:
|
|
print(e)
|
|
return {"line": [""]*10,"amount_of_lines": 0}
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/combined_totals")
|
|
def combined_totals():
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
daily_totals = daily_and_monthly_totals()
|
|
return jsonify({"combined": daily_totals})
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/daily_totals")
|
|
def get_daily_totals():
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
daily_totals = query_daily_totals()
|
|
if not cache_requests:
|
|
return jsonify(daily_totals)
|
|
response_hash = hash(str(daily_totals))
|
|
if hashes_db["daily_totals"]!=response_hash:
|
|
hashes_db["daily_totals"] = response_hash
|
|
return jsonify(daily_totals)
|
|
return jsonify({"no_changes": True})
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/daily_totals_by_pair")
|
|
def get_daily_totals_by_pair():
|
|
'''
|
|
GET request
|
|
Parameters: 'base' -> string
|
|
'quote' -> string
|
|
'''
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
try:
|
|
base = request.args.get("base")
|
|
quote = request.args.get("quote")
|
|
daily_totals = query_daily_totals(f"{base}{quote}")
|
|
if not cache_requests:
|
|
return jsonify(daily_totals)
|
|
response_hash = hash(str(daily_totals))
|
|
if hashes_db["daily_totals_by_pair"]!=response_hash:
|
|
hashes_db["daily_totals_by_pair"] = response_hash
|
|
return jsonify(daily_totals)
|
|
return jsonify({"no_changes": True})
|
|
except Exception as e:
|
|
print(e)
|
|
return jsonify({'Error': 'Halp'})
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/monthly_totals")
|
|
def get_monthly_totals():
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
monthly_totals = query_monthly_totals()
|
|
if not cache_requests:
|
|
return jsonify(monthly_totals)
|
|
response_hash = hash(str(monthly_totals))
|
|
if hashes_db["monthly_totals"]!=response_hash:
|
|
hashes_db["monthly_totals"] = response_hash
|
|
return jsonify(monthly_totals)
|
|
return jsonify({"no_changes": True})
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/monthly_totals_by_pair")
|
|
def get_monthly_totals_by_pair():
|
|
'''
|
|
GET request
|
|
Parameters: 'base' -> string
|
|
'quote' -> string
|
|
'''
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
try:
|
|
base = request.args.get("base")
|
|
quote = request.args.get("quote")
|
|
monthly_totals = query_monthly_totals(f"{base}{quote}")
|
|
if not cache_requests:
|
|
return jsonify(monthly_totals)
|
|
response_hash = hash(str(monthly_totals))
|
|
if hashes_db["monthly_totals_by_pair"]!=response_hash:
|
|
hashes_db["monthly_totals_by_pair"] = response_hash
|
|
return jsonify(monthly_totals)
|
|
return jsonify({"no_changes": True})
|
|
except Exception as e:
|
|
print(e)
|
|
return jsonify({'Error': 'Halp'})
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/get_averages")
|
|
def get_averages():
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
try:
|
|
daily_totals = query_daily_totals()
|
|
val_30 = 0
|
|
val_7 = 0
|
|
acc_30 = []
|
|
acc_7 = []
|
|
for x in sorted(daily_totals):
|
|
acc_30.append(daily_totals[x])
|
|
acc_7.append(daily_totals[x])
|
|
length_30 = min(30,len(acc_30)) #Last 30 days
|
|
length_7 = min(7,len(acc_7)) #Last 7 days
|
|
for _ in range(length_30):
|
|
val_30 += acc_30.pop()
|
|
for _ in range(length_7):
|
|
val_7 += acc_7.pop()
|
|
if not cache_requests:
|
|
return jsonify({"30_day": val_30/length_30, "7_day": val_7/length_7})
|
|
response_hash = hash(str({"30_day": val_30/length_30, "7_day": val_7/length_7}))
|
|
if hashes_db["get_averages"]!=response_hash:
|
|
hashes_db["get_averages"] = response_hash
|
|
return jsonify({"30_day": val_30/length_30, "7_day": val_7/length_7})
|
|
return jsonify({"no_changes": True})
|
|
except Exception as e:
|
|
print(e)
|
|
return jsonify({'Error': 'Halp'})
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/total_profit")
|
|
def total_profit():
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
total = query_total_profit()
|
|
if not cache_requests:
|
|
return jsonify({"Total profit": total})
|
|
response_hash = hash(str({"Total profit": total}))
|
|
if hashes_db["total_profit"]!=response_hash:
|
|
hashes_db["total_profit"] = response_hash
|
|
return jsonify({"Total profit": total})
|
|
return jsonify({"no_changes": True})
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
@stats_api.route("/total_profit_by_pair")
|
|
def total_profit_by_pair():
|
|
'''
|
|
GET request
|
|
Parameters: 'base' -> string
|
|
'quote' -> string
|
|
'''
|
|
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
|
|
try:
|
|
base = request.args.get("base")
|
|
quote = request.args.get("quote")
|
|
total = query_total_profit(f"{base}{quote}")
|
|
if not cache_requests:
|
|
return jsonify({"Total profit": total})
|
|
response_hash = hash(str({"Total profit": total}))
|
|
if hashes_db["total_profit_by_pair"]!=response_hash:
|
|
hashes_db["total_profit_by_pair"] = response_hash
|
|
return jsonify({"Total profit": total})
|
|
return jsonify({"no_changes": True})
|
|
except Exception as e:
|
|
print(e)
|
|
return jsonify({'Error': 'Halp'})
|
|
return jsonify({'Error': 'API key invalid'}), 401
|
|
|
|
|
|
|
|
if __name__=="__main__":
|
|
# Load valid keys from database
|
|
valid_keys = load_keys_from_db("api_credentials.db")
|
|
|
|
#Waitress
|
|
logger = logging.getLogger('waitress')
|
|
logger.setLevel(logging.INFO)
|
|
serve(stats_api,host="0.0.0.0",port=5010, threads=32)
|
|
|
|
#Flask
|
|
# app.run(host="0.0.0.0", port=5010, debug=True)
|
|
|