Compare commits

..

74 Commits

Author SHA1 Message Date
Nicolás Sánchez 4a1f1c844d 2025.12.01 2025-12-01 10:17:23 -03:00
Nicolás Sánchez 536866364c 2025.11.11 2025-11-18 20:46:49 -03:00
Nicolás Sánchez 7c33dd231d 2025.11.08 2025-11-08 10:42:29 -03:00
Nicolás Sánchez 96d1cf6d78 2025.10.24 2025-10-24 20:17:46 -03:00
Nicolás Sánchez b69b0d2f15 2025.10.12 2025-10-12 19:01:13 -03:00
Nicolás Sánchez 18506dbaf3 2025.10.11 2025-10-11 22:48:26 -03:00
Nicolás Sánchez ca43b3dad5 2025.10.10 2025-10-11 09:58:58 -03:00
Nicolás Sánchez d06bfd9d10 2025.10.09 2025-10-09 17:51:55 -03:00
Nicolás Sánchez e354ea4d55 2025.10.07 2025-10-07 17:36:39 -03:00
Nicolás Sánchez 8f3b0eb186 2025.10.04 2025-10-04 21:40:03 -03:00
Nicolás Sánchez 2823dff56a 2025.10.03 2025-10-03 15:44:59 -03:00
Nicolás Sánchez c42a505e49 2025.10.01 2025-10-01 15:07:17 -03:00
Nicolás Sánchez 0576f93477 2025.09.27 2025-09-27 20:12:29 -03:00
Nicolás Sánchez 65c406a03d 205.09.25 2025-09-25 18:07:58 -03:00
Nicolás Sánchez 9e2a1dc7a1 2025.09.24 2025-09-24 16:04:43 -03:00
Nicolás Sánchez 171738fa4d 2025.09.21 2025-09-21 18:23:14 -03:00
Nicolás Sánchez 09f9aa313c 2025.09.20 2025-09-20 19:18:32 -03:00
Nicolás Sánchez 451e1a63aa 2025.09.19 2025-09-19 17:47:15 -03:00
Nicolás Sánchez 733f6efbff 2025.09.18 2025-09-19 10:29:32 -03:00
Nicolás Sánchez 0d753aa3cd 2025.09.15 2025-09-15 18:55:59 -03:00
Nicolás Sánchez 73eff21dbb 2025.09.12 2025-09-13 08:16:42 -03:00
Nicolás Sánchez 37661d91eb version number 2025-09-12 09:51:12 -03:00
Nicolás Sánchez 885797db01 2025.09.11 2025-09-11 20:36:40 -03:00
Nicolás Sánchez 29dbdce95e 2025.09.10 2025-09-10 16:21:39 -03:00
Nicolás Sánchez f5740c735c 2025.09.08 2025-09-08 18:21:35 -03:00
Nicolás Sánchez bc8d621152 2025.09.07 2025-09-07 18:30:00 -03:00
Nicolás Sánchez 5cf2979f38 partial profit support 2025-09-06 11:29:36 -03:00
Nicolás Sánchez 08ce7c65a0 added support for mod_order_size 2025-09-06 11:28:40 -03:00
Nicolás Sánchez 61a3545dd7 wrong tp order code 2025-09-05 16:44:38 -03:00
Nicolás Sánchez 05b46203ab Merge branch 'concurrent_trading_orders' 2025-09-05 10:56:31 -03:00
Nicolás Sánchez ddd1df7957 2025.09.05 2025-09-05 10:52:14 -03:00
Nicolás Sánchez ab77840bea merge branch concurrent_trading_orders 2025-09-04 17:51:27 -03:00
Nicolás Sánchez 27420946cd I hate Kucoin 2025-09-04 13:56:25 -03:00
Nicolás Sánchez 5dde1a1def del open_orders 2025-09-03 23:10:14 -03:00
Nicolás Sánchez b36d73306d wait time back to 1 2025-09-03 20:56:35 -03:00
Nicolás Sánchez f7365c0340 removed migration code 2025-09-03 20:34:06 -03:00
Nicolás Sánchez 84bada9967 omg 2025-09-03 20:10:42 -03:00
Nicolás Sánchez bb3fb692df /force_trader_close 2025-09-02 16:04:52 -03:00
Nicolás Sánchez 694e5a95d1 removed conditionals in some endpoints 2025-09-01 14:19:20 -03:00
Nicolás Sánchez 56cbf51129 removed minor conditionals in some endpoints 2025-09-01 14:16:51 -03:00
Nicolás Sánchez a5efd6e992 2025.09.01 2025-09-01 13:52:03 -03:00
Nicolás Sánchez 23d85de155 2025.09.01 2025-09-01 13:42:22 -03:00
Nicolás Sánchez 433813115f fixed partial profit reset mkII 2025-08-31 23:22:25 -03:00
Nicolás Sánchez 559b95819a fixed partial profit reset 2025-08-31 21:51:48 -03:00
Nicolás Sánchez e88ed99d6b 2025.08.31 2025-08-31 18:43:13 -03:00
Nicolás Sánchez 5544df9bd7 version number 2025-08-30 18:40:30 -03:00
Nicolás Sánchez 7dab4d4890 small refactor in send_new_safety_order_batch 2025-08-30 18:38:19 -03:00
Nicolás Sánchez 406067497e set pause flag while sending safety orders 2025-08-28 20:01:25 -03:00
Nicolás Sánchez 0dd3077eb5 better handling of concurrent safety orders changes in runtime 2025-08-28 18:05:44 -03:00
Nicolás Sánchez 16e1994ed1 empty_order handling 2025-08-28 10:18:12 -03:00
Nicolás Sánchez c4cfa40577 migration line 2025-08-27 18:57:34 -03:00
Nicolás Sánchez 3a4ce2311e safety order batch send mostly done 2025-08-27 15:56:15 -03:00
Nicolás Sánchez 2e35ea9c13 no_of_safety_orders bug 2025-08-25 18:41:09 -03:00
Nicolás Sánchez b594bd2007 minor refactor 2025-08-25 16:07:08 -03:00
Nicolás Sánchez 58fcff8618 /mod_boosted_concurrent_safety_orders endpoint 2025-08-25 13:34:30 -03:00
Nicolás Sánchez 3daca5336e /mod_concurrent_safety_orders endpoint added 2025-08-25 10:39:03 -03:00
Nicolás Sánchez 069cff2402 std/boosted concurrent safety orders 2025-08-24 14:58:38 -03:00
Nicolás Sánchez 6bf3df0418 first draft 2025-08-24 09:05:12 -03:00
Nicolás Sánchez ca85e454f9 first draft 2025-08-22 15:16:51 -03:00
Nicolás Sánchez f5e5f4eb77 minor corrections/optimizations 2025-08-19 15:49:01 -03:00
Nicolás Sánchez 0de0eb5c08 2025.08.19 2025-08-19 11:42:31 -03:00
Nicolás Sánchez c667c70a64 minor refactorings 2025-08-18 16:10:09 -03:00
Nicolás Sánchez 29c3f37a65 statistics server optimizations 2025-08-18 13:58:36 -03:00
Nicolás Sánchez 74e24e6249 bugfix 2025-08-18 10:19:33 -03:00
Nicolás Sánchez 550ab3f3f6 2025.08.18 2025-08-18 09:37:42 -03:00
Nicolás Sánchez d922bbe06f 2025.08.17 2025-08-17 13:03:13 -03:00
Nicolás Sánchez 9855c64d81 small optimizations 2025-08-17 10:41:16 -03:00
Nicolás Sánchez 3353a09db1 minor refactorings 2025-08-16 17:27:07 -03:00
Nicolás Sánchez 8ef31c4bab minor refactor caching status_string 2025-08-16 14:03:31 -03:00
Nicolás Sánchez 00c6157d2e minor cleanup 2025-08-16 12:40:40 -03:00
Nicolás Sánchez 6c72b35b29 2025.08.16 2025-08-16 12:35:36 -03:00
Nicolás Sánchez 30d8e84833 2025.08.15 2025-08-15 23:36:22 -03:00
Nicolás Sánchez e11be69f00 statistics server optimizations 2025-08-15 10:10:37 -03:00
Nicolás Sánchez 4d23503cee 2025.08.14 2025-08-14 13:54:08 -03:00
10 changed files with 2473 additions and 2082 deletions

1
.gitignore vendored
View File

@ -17,6 +17,7 @@ logs/gateio.log
logs/kucoin.log
upload_testnet.sh
upload_mainnet.sh
upload_local_testnet.sh
utils/data/binance.db
utils/data/okx.db
utils/data/gateio.db

View File

@ -1,3 +1,140 @@
2025.12.01:
. Modified log output of new_market_order.
. Modified Kucoin's case in min_amount_of_base.
2025.11.11:
. deals_cache and log_list cache are now 20 items long.
. Less log spam.
2025.11.08:
. broker.set_default_order_size() now saves the config file to disk after changing the value.
. Variable renaming and other small stuff.
2025.10.24:
. Toggling liquidate_after_switch now writes the config file to disk so the setting persists between trades.
. Manually switching to long now sets double_check_price to false.
. Added a few comments to switch_to_long.
2025.10.12:
. do_cleanup relocated after generating the safety orders' prices.
2025.10.11:
. Minor simplification in do_cleanup.
. Removed a couple of (no longer needed?) pauses.
2025.10.10:
. New endpoint: /refresh_log_cache.
. Fixed an error in /add_so endpoint that incremented the config setting but not the status setting.
2025.10.09:
. Cleanup is done as soon as the trader starts, rather than after sending the take profit and safety orders.
2025.10.07:
. In short traders, if there are too few safety orders (less than 67% of the max amount), safety_order_deviance is increased from 2% to 3%.
2025.10.04:
. Fixed error while logging orders in new_simulated_market_order.
. renew_tp_and_so_routine now send the take profit order first, and then the safety orders.
2025.10.03:
. New broker config option: log_orders. If set to True, the orders will be logged in orders.log under logs directory.
. New API endpoint: /toggle_log_orders.
2025.10.01:
. Fixed base fees not being taken into account.
2025.09.27:
. Added notes in every entry of deal_order_history.
. Minor refactor in renew_tp_and_so_routine.
. Added another cooldown before sending a take profit order (To give the exchange a bit more time to reflect correctly the amount of base present in the account)
. Updated cleanup routine to leave some change in the account.
2025.09.25:
. Added a pause after getting filled orders in check_status.
. Added an extra logging line in take_profit_routine.
2025.09.24:
. Added a new config option: wait_after_initial_market_order. If specifies in seconds the amount of wait time after sending the initial market order.
It should help the exchanges to report correctly the recently filled market order.
. Removed the "PAUSED" notice in the screen output that was unused.
2025.09.21:
. Fixed a bug that caused short traders to have an incorrect order size.
2025.09.20:
. Fixed bug that caused short traders to initialize using the same workflow as a long one.
2025.09.19:
. Added pageSize parameter to the open order requests when querying Kucoin.
2025.09.18:
. do_cleanup now uses get_min_quote_size.
. Added an extra price check to switch_to_long.
. Removed old check_old_long_price method.
2025.09.14:
. Refactored full order list fetching.
. Minor refactor of restart_pair_no_json.
. Pausing the trader is now done via set_pause() method.
. Reverted modification of wait time after initial market order.
. wait_time now present in broker config file.
. Minor refactorings.
2025.09.13:
. Increased wait time after initial market order.
2025.09.12:
. No retries when sending a cleanup order.
. Removed redundant try...except blocks in switch_to_long.
2025.09.11:
. Fixed bug in start_trader that called amount_to_precision with very low amounts and spammed logs.
2025.09.10:
. Deal order history now stores only the id of each order instead of the full order object.
2025.09.08:
. Re-enabled long to short autoswitch.
2025.09.07:
. Increased wait time after sending market orders.
2025.09.05:
. Now the trader supports multiple safety orders at the same time.
. Removed forcing orders when importing a trader. Maybe it will be reinstated at a later date.
. Removed endpoint /reload_safety_orders.
. New endpoints: /mod_concurrent_safety orders, /mod_boosted_concurrent_safety_orders and /force_trader_close.
. Modified cleanup routine.
. Default wait_time back to 0.5 seconds.
. General optimizations.
2025.08.19:
. Improved log trimming.
2025.08.18:
. Database handling optimization.
2025.08.17:
. Minor refactorings.
2025.08.16:
. Improved threading.
2025.08.15:
. "deal order history" is now disabled by default.
. CPU optimizations in status string generation.
2025.08.14:
. Refactored gib_so_size.
. Refactored seconds_to_time.
. Refactored linear_space.
. Refactored dca_cost_calculator.
. Refactored return_optimal_order_size.
. Minor refactor in generate_status_strings.
. Optimized imports.
. Deal_order_history now only stores the important parts of the orders to save some RAM.
. Removed deprecated "profit_to_file" method.
2025.08.12:
. Default "check_slippage" value now True.
. Removed capitalization from exchange name when sending trader quit notification.

View File

@ -1,5 +1,5 @@
import json
import time
from time import time
from json import dumps, load
class ConfigHandler:
'''
@ -14,6 +14,8 @@ class ConfigHandler:
"order_size": self.broker.get_default_order_size(),
"no_of_safety_orders": 30,
"max_short_safety_orders": 45,
"concurrent_safety_orders": 3,
"boosted_concurrent_safety_orders": 5,
"safety_order_deviance": 2,
"safety_order_scale": 0.0105,
"dynamic_so_deviance": True,
@ -35,16 +37,19 @@ class ConfigHandler:
"force_restart_if_retries_exhausted": False,
"check_old_long_price": False #switch_to_short should flip this to True unless stated
}
# if self.broker.get_exchange_name()=="kucoin":
# self.default_config_dictionary["concurrent_safety_orders"]=1
# self.default_config_dictionary["boosted_concurrent_safety_orders"]=1
self.config_file_path = f"configs/{pair.split('/')[0]}{pair.split('/')[1]}.json"
self.config_dictionary = self.default_config_dictionary.copy()
#Loads from disk the config file (if it exists)
if self.load_from_file()==1:
#If the config file does not exist, write a new one with the default values and sign it with timestamp.
self.config_dictionary["generated_at"] = int(time.time())
self.config_dictionary["generated_at"] = int(time())
self.save_to_file()
if config_dict is not None:
self.config_dictionary = {**self.config_dictionary, **config_dict}
self.config_dictionary.update(config_dict)
self.save_to_file()
@ -68,6 +73,12 @@ class ConfigHandler:
def get_max_short_safety_orders(self):
return self.config_dictionary["max_short_safety_orders"]
def get_concurrent_safety_orders(self):
return self.config_dictionary["concurrent_safety_orders"]
def get_boosted_concurrent_safety_orders(self):
return self.config_dictionary["boosted_concurrent_safety_orders"]
def get_safety_order_deviance(self):
return self.config_dictionary["safety_order_deviance"]
@ -173,6 +184,20 @@ class ConfigHandler:
self.config_dictionary["max_short_safety_orders"] = max_short_safety_orders
return 0
def set_concurrent_safety_orders(self, concurrent_safety_orders: int):
# if not isinstance(concurrent_safety_orders, int):
# self.broker.logger.log_this(f"Max concurrent safety orders provided is not an integer",1,self.get_pair())
# return 1
self.config_dictionary["concurrent_safety_orders"] = concurrent_safety_orders
return 0
def set_boosted_concurrent_safety_orders(self, boosted_concurrent_safety_orders: int):
# if not isinstance(concurrent_safety_orders, int):
# self.broker.logger.log_this(f"Max concurrent safety orders provided is not an integer",1,self.get_pair())
# return 1
self.config_dictionary["boosted_concurrent_safety_orders"] = boosted_concurrent_safety_orders
return 0
def set_safety_order_deviance(self, safety_order_deviance: int):
# if not isinstance(safety_order_deviance, int):
# self.broker.logger.log_this(f"Safety order deviance provided is not an integer",1,self.get_pair())
@ -227,6 +252,7 @@ class ConfigHandler:
# self.broker.logger.log_this(f"liquidate_after_switch must be a boolean",1,self.get_pair())
# return 1
self.config_dictionary["liquidate_after_switch"] = liquidate_after_switch
self.save_to_file()
return 0
def set_tp_mode(self, tp_mode: int):
@ -315,7 +341,7 @@ class ConfigHandler:
# return 1
try:
with open(file_path, "w") as f:
f.write(json.dumps(self.config_dictionary, indent=4))
f.write(dumps(self.config_dictionary, indent=4))
return 0
except Exception as e:
self.broker.logger.log_this(f"Error saving config to file: {file_path}: {e}",1,self.get_pair())
@ -329,7 +355,7 @@ class ConfigHandler:
# return 1
try:
with open(file_path, "r") as f:
self.set_config({**self.default_config_dictionary, **json.load(f)})
self.set_config({**self.default_config_dictionary, **load(f)})
return 0
except Exception as e:
self.broker.logger.log_this(f"Config file does not exist or is not readable: {e}",1,self.get_pair())

View File

@ -1,8 +1,10 @@
import json
import collections
import time
import requests
import credentials
import sqlite3
from contextlib import contextmanager
from requests import get as requests_get
from json import load, dumps
from copy import deepcopy
@ -12,53 +14,67 @@ class Broker:
self.broker_config = broker_config
self.exchange = exchange
self.last_price = 0
self.wait_time = 1 #Default wait time for API breathing room
self.cooldown_multiplier = 2 #Default cooldown multiplier value
if "cooldown_multiplier" in self.broker_config:
self.cooldown_multiplier = self.broker_config["cooldown_multiplier"]
self.wait_before_new_safety_order = 1
if "wait_before_new_safety_order" in self.broker_config:
self.wait_before_new_safety_order = self.broker_config["wait_before_new_safety_order"]
self.empty_order = {"id": "", "status": "", "filled": 0, "remaining": 0, "price": 0, "cost": 0, "fees": [], "symbol": ""}
self.retries = self.broker_config["retries"] if "retries" in self.broker_config else 5
self.slippage_default_threshold = self.broker_config["slippage_default_threshold"] if "slippage_default_threshold" in self.broker_config else .03
#Default values
self.wait_time = self.broker_config.get("wait_time",.5)
self.cooldown_multiplier = self.broker_config.get("cooldown_multiplier",2)
self.wait_after_initial_market_order = self.broker_config.get("wait_after_initial_market_order",1)
self.wait_before_new_safety_order = self.broker_config.get("wait_before_new_safety_order",1)
self.retries = self.broker_config.get("retries",5)
self.slippage_default_threshold = self.broker_config.get("slippage_default_threshold",.03)
self.follow_order_history = self.broker_config.get("follow_order_history",False)
self.write_order_history = self.broker_config.get("write_order_history", False)
self.logger = Logger(self.broker_config)
self.write_order_history = True #This should be a toggle in config_file
self.log_orders = self.broker_config.get("log_orders",False)
#Initialize database
self.profits_database_filename = "profits/profits_database.db"
self.database_connection = sqlite3.connect(self.profits_database_filename)
self.database_cursor = self.database_connection.cursor()
self.database_cursor.execute('''
CREATE TABLE IF NOT EXISTS profits_table (
timestamp REAL PRIMARY KEY,
pair TEXT,
amount REAL,
exchange_name TEXT,
order_id TEXT,
order_history TEXT
)
''')
self.database_connection.commit()
self.database_connection.close()
self._db = sqlite3.connect(self.profits_database_filename,
detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES,
check_same_thread=False)
self._db.row_factory = sqlite3.Row
with self._db:
self._db.execute('''
CREATE TABLE IF NOT EXISTS profits_table (
timestamp REAL PRIMARY KEY,
pair TEXT,
amount REAL,
exchange_name TEXT,
order_id TEXT,
order_history TEXT
)
''')
#Load markets
self.markets = self.exchange.load_markets()
#Populates deals cache
self.deals_cache_length = 10
self.deals_cache_length = 20
self.deals_list = self.preload_deals(amount_to_preload=self.deals_cache_length)
@contextmanager
def _cur(self):
'''
Database cursor
'''
cur = self._db.cursor()
try:
yield cur
finally:
cur.close()
def preload_deals(self,amount_to_preload=10):
'''
Reads the last n deals from the database and returns them in a list
'''
connection = sqlite3.connect(self.profits_database_filename)
cursor = connection.cursor()
cursor.execute(f"SELECT * FROM profits_table WHERE exchange_name = ? ORDER BY timestamp DESC LIMIT ?", (self.get_exchange_name(), amount_to_preload))
result = cursor.fetchall()
connection.close()
query = "SELECT * FROM profits_table WHERE exchange_name = ? ORDER BY timestamp DESC LIMIT ?"
with self._cur() as cur:
cur.execute(query, (self.get_exchange_name(), amount_to_preload))
result = cur.fetchall()
return [(row[0],row[1],row[2],row[3],row[4],"") for row in result]
@ -66,6 +82,16 @@ class Broker:
def get_deals_cache(self):
return self.deals_list
def get_log_orders(self):
return self.log_orders
def set_log_orders(self,log_orders:bool):
self.log_orders = log_orders
return 0
def get_symbol(self,pair):
if "/" in pair:
return pair
@ -120,21 +146,13 @@ class Broker:
Returns the timestamps of the last trades from the database for the boosting algorithm
'''
retries = self.retries
while retries>0:
try:
database_connection = sqlite3.connect(self.profits_database_filename)
database_cursor = database_connection.cursor()
database_cursor.execute(f"SELECT * FROM profits_table WHERE timestamp >= {time.time()-timespan} ORDER BY timestamp")
rows = database_cursor.fetchall()
return [item[0] for item in rows if item[1]==pair]
except Exception as e:
self.logger.log_this(f"Exception in preload_timestamps: {e}")
if no_retries:
break
retries-=1
time.sleep(self.wait_time)
return []
limit = time.time()-timespan
query = "SELECT * FROM profits_table WHERE timestamp >= ? ORDER BY timestamp"
with self._cur() as cur:
cur.execute(query,(limit,))
rows = cur.fetchall()
return [item[0] for item in rows if item[1]==pair]
def write_profit_to_cache(self,dataset):
@ -151,22 +169,11 @@ class Broker:
'''
dataset format: (timestamp,pair,amount,exchange_name,order_id,order_history)
'''
retries = self.retries
while retries>0:
try:
database_connection = sqlite3.connect(self.profits_database_filename)
database_cursor = database_connection.cursor()
database_cursor.execute('INSERT INTO profits_table VALUES(?, ?, ?, ?, ?, ?)', dataset)
database_connection.commit()
database_connection.close()
except Exception as e:
self.logger.log_this(f"Exception in write_profit_to_db: {e}")
if no_retries:
break
retries-=1
time.sleep(self.wait_time)
return 0
return 1
query = "INSERT INTO profits_table VALUES(?, ?, ?, ?, ?, ?)"
with self._db:
self._db.execute(query, dataset)
return 0
def check_for_duplicate_profit_in_db(self,order,no_retries=False):
@ -175,29 +182,21 @@ class Broker:
Compares the id of the last profit order with the one in the database.
'''
retries = self.retries
while retries>0:
try:
database_connection = sqlite3.connect(self.profits_database_filename)
database_cursor = database_connection.cursor()
database_cursor.execute(f"SELECT * FROM profits_table WHERE pair = '{order['symbol']}' ORDER BY timestamp DESC LIMIT 1;")
rows = database_cursor.fetchall()
database_connection.close()
if rows==[]:
return False
return order["id"]==rows[0][4]
except Exception as e:
self.logger.log_this(f"Exception in check_for_duplicate_profit_in_db: {e}",1)
if no_retries:
break
retries-=1
time.sleep(self.wait_time)
return False
query = f"SELECT * FROM profits_table WHERE pair = ? ORDER BY timestamp DESC LIMIT 1;"
with self._cur() as cur:
cur.execute(query, (order["symbol"],))
result = cur.fetchone()
if result is None:
return False
return order["id"]==result[4]
def get_write_order_history(self):
return self.write_order_history
def get_follow_order_history(self):
return self.follow_order_history
def get_cooldown_multiplier(self):
return self.cooldown_multiplier
@ -205,6 +204,13 @@ class Broker:
self.cooldown_multiplier = value
return 0
def get_wait_after_initial_market_order(self):
return self.wait_after_initial_market_order
def set_wait_after_initial_market_order(self, value:float):
self.wait_after_initial_market_order = value
return 0
def get_wait_before_new_safety_order(self):
return self.wait_before_new_safety_order
@ -218,6 +224,7 @@ class Broker:
def set_default_order_size(self,size):
try:
self.broker_config["default_order_size"] = float(size)
self.rewrite_config_file()
except Exception as e:
self.logger.log_this(f"Exception in set_default_order_size: {e}",1)
return 1
@ -290,7 +297,7 @@ class Broker:
def reload_config_file(self):
try:
with open(self.config_filename) as f:
self.broker_config = json.load(f)
self.broker_config = load(f)
except Exception as e:
self.logger.log_this(f"Exception while reading the config file: {e}",1)
@ -340,9 +347,9 @@ class Broker:
try:
if backup:
with open(f"{self.exchange}.bak","w") as c:
c.write(json.dumps(self.broker_config, indent=4))
c.write(dumps(self.broker_config, indent=4))
with open(f"{self.config_filename}","w") as f:
f.write(json.dumps(self.broker_config, indent=4))
f.write(dumps(self.broker_config, indent=4))
return 0
except Exception as e:
self.logger.log_this(f"Problems writing the config file. Exception: {e}",1)
@ -390,10 +397,13 @@ class Broker:
if self.get_exchange_name()=="binance":
a = self.exchange.fetch_last_prices(pair_list)
return {x: a[x]["price"] for x in a.keys()}
elif self.get_exchange_name()=="kucoin":
a = self.exchange.fetch_tickers(pair_list)
if pair_list is None:
return {x: a[x]["close"] for x in a.keys()}
return {x: a[x]["close"] for x in a.keys() if x in pair_list}
else:
#a = self.exchange.fetch_tickers(pair_list)
a = self.exchange.fetch_tickers()
#return {x.upper(): a[x]["close"] for x in a.keys() if x.upper() in pair_list}
if pair_list is None:
return {x: a[x]["close"] for x in a.keys()}
return {x: a[x]["close"] for x in a.keys() if x in pair_list}
@ -414,13 +424,10 @@ class Broker:
:param no_retries: if True, will not retry if exception occurs
:return: closing price of trading pair
'''
retries = self.retries
while retries>0:
try:
pair = symbol
a = self.exchange.fetch_ticker(pair)
self.last_price = a["close"]
self.last_price = self.exchange.fetch_ticker(symbol)["close"]
return self.last_price
except Exception as e:
self.logger.log_this(f"Exception in get_ticker_price: {e}",1)
@ -471,7 +478,7 @@ class Broker:
try:
return orderbook["bids"][0][0]
except Exception as e:
self.logger.log_this(f"Exception getting top mid price: {e}",1,symbol)
self.logger.log_this(f"Exception getting top bid price: {e}",1,symbol)
return self.get_ticker_price(symbol)
@ -516,28 +523,6 @@ class Broker:
return []
def fetch_full_orders(self,pairs=None) -> list:
'''
Returns a list of all orders on the exchange
:param pairs: list of pairs to get orders for
:return: list of orders
'''
if pairs is None:
pairs = []
try:
orders = []
if self.get_exchange_name()=="binance":
orders = self.get_opened_orders_binance(pairs)
else:
orders = self.get_opened_orders()
return [] if orders is None else orders
except Exception as e:
self.logger.log_this(f"Exception in fetch_full_orders: {e}",2)
return []
def fetch_open_orders(self,pairs=None) -> list:
'''
Returns a list of all open orders on the exchange
@ -549,10 +534,18 @@ class Broker:
if pairs is None:
pairs = []
try:
#id_list = []
if self.get_exchange_name()=="binance":
return self.get_opened_orders_binance(pairs)
return self.get_opened_orders()
if self.broker_config.get("unified_order_query"):
return self.exchange.fetch_open_orders()
result = []
for pair in pairs:
a = self.exchange.fetch_open_orders(pair)
result.extend(iter(a))
return result
elif self.get_exchange_name()=="kucoin":
return self.exchange.fetch_open_orders(params={"pageSize": "500"})
else:
return self.exchange.fetch_open_orders()
except Exception as e:
self.logger.log_this(f"Exception in fetch_open_orders: {e}",2)
return []
@ -569,16 +562,15 @@ class Broker:
if pairs is None:
pairs = []
try:
#id_list = []
if self.get_exchange_name()=="binance":
return self.get_closed_orders_binance(pairs)
return self.get_closed_orders()
except Exception as e:
self.logger.log_this(f"Exception in fetch_open_orders: {e}",2)
self.logger.log_this(f"Exception in fetch_closed_orders: {e}",2)
return []
def get_opened_orders(self,no_retries=False): #It should return a list of all opened orders
def get_closed_orders(self,pair=None,no_retries=False): #It should return a list of all opened orders
'''
Returns a list of all the open orders on the exchange
@ -589,28 +581,7 @@ class Broker:
retries = self.retries
while retries>0:
try:
return self.exchange.fetch_open_orders()
except Exception as e:
self.logger.log_this(f"Exception in get_opened_orders: {e}",1)
if no_retries:
break
time.sleep(self.wait_time)
retries-=1
return []
def get_closed_orders(self,no_retries=False): #It should return a list of all opened orders
'''
Returns a list of all the open orders on the exchange
:param pairs: list of pairs
:return: list of all the open orders on the exchange
'''
retries = self.retries
while retries>0:
try:
return self.exchange.fetch_closed_orders()
return self.exchange.fetch_closed_orders(pair)
except Exception as e:
self.logger.log_this(f"Exception in get_closed_orders: {e}",1)
if no_retries:
@ -620,27 +591,6 @@ class Broker:
return []
def get_opened_orders_binance(self,pairs):
'''
Returns a list of all the open orders on the exchange
:param pairs: list of pairs
:return: list of all the open orders on the exchange
'''
try:
if "unified_order_query" in self.broker_config and self.broker_config["unified_order_query"] is True:
return self.exchange.fetch_open_orders()
result = []
for pair in pairs:
a = self.exchange.fetch_open_orders(pair)
result.extend(iter(a))
return result
except Exception as e:
self.logger.log_this(f"Exception in get_opened_orders_binance: {e}",1)
return []
def get_closed_orders_binance(self,pairs):
'''
Returns a list of all the closed orders on the exchange
@ -650,7 +600,7 @@ class Broker:
'''
try:
if "unified_order_query" in self.broker_config and self.broker_config["unified_order_query"] is True:
if self.broker_config.get("unified_order_query"):
return self.exchange.fetch_closed_orders()
result = []
for pair in pairs:
@ -672,16 +622,15 @@ class Broker:
:return: 0 if order was succesfully canceled, 1 if not
'''
pair = symbol
tries = self.retries//2
while tries>0:
try:
while self.get_order(id,pair)["status"]=="open":
self.exchange.cancel_order(id,symbol=pair)
while self.get_order(id,symbol)["status"]=="open":
self.exchange.cancel_order(id,symbol)
time.sleep(self.wait_time)
return 0
except Exception as e:
if self.get_order(id,pair)["status"]=="canceled":
if self.get_order(id,symbol)["status"]=="canceled":
return 0
self.logger.log_this(f"Exception in cancel_order: id {id} - exception: {e}",1)
if no_retries:
@ -715,7 +664,7 @@ class Broker:
return amount
def new_simulated_market_order(self,symbol,size,side,amount_in_base=False,no_retries=False):
def new_simulated_market_order(self,symbol,size,side,amount_in_base=False,no_retries=False,log=""):
'''
TODO: Emulating Market Orders With Limit Orders
@ -742,26 +691,29 @@ class Broker:
'''
retries = self.retries//2
pair = symbol
while retries>0:
try:
if self.get_exchange_name()=="gateio" and side=="buy" and not amount_in_base:
new_order = self.exchange.create_market_buy_order_with_cost(pair, size)
new_order = self.exchange.create_market_buy_order_with_cost(symbol, size)
if self.log_orders:
self.logger.log_order(f"New simulated market order: Symbol: {symbol} - Side: {side} - Size: {size} - ID: {new_order['id']} - Origin: {log}")
else:
order_book = self.get_order_book(symbol)
if order_book=={}:
self.logger.log_this(f"new_simulated_market_order. Order book returned an empty dictionary",1,symbol)
return self.empty_order
if amount_in_base or side!="buy":
base_amount = self.amount_to_precision(pair,size)
base_amount = self.amount_to_precision(symbol,size)
else:
avg_price = self.average_price_depth(order_book,size,"sell")
base_amount = size/avg_price if avg_price is not None else size/self.get_ticker_price(symbol)
price = self.find_minimum_viable_price(order_book,base_amount,side)
#Maybe check for slippage here instead of within the trader itself? idk
new_order = self.exchange.create_order(pair,"limit",side,base_amount,price)
new_order = self.exchange.create_order(symbol,"limit",side,base_amount,price)
if self.log_orders:
self.logger.log_order(f"New simulated market order: Symbol: {symbol} - Side: {side} - Size: {size} - Price: {price} - ID: {new_order['id']} - Origin: {log}")
time.sleep(self.wait_time)
return self.get_order(new_order["id"],pair)
return self.get_order(new_order["id"],symbol)
except Exception as e:
self.logger.log_this(f"new_simulated_market_order exception: {e}",1,symbol)
if no_retries:
@ -806,7 +758,7 @@ class Broker:
return None
def new_market_order(self,symbol,size,side,amount_in_base=False,no_retries=False): #It should send a new market order to the exchange
def new_market_order(self,symbol,size,side,amount_in_base=False,no_retries=False, log=""): #It should send a new market order to the exchange
'''
Sends a new market order to the exchange.
@ -820,24 +772,24 @@ class Broker:
if self.broker_config["simulate_market_orders"]:
return self.new_simulated_market_order(symbol,size,side,amount_in_base=amount_in_base)
retries = self.retries
pair = symbol
while retries>0:
try:
if side=="buy":
to_buy = float(size)
if not amount_in_base:
to_buy = float(size)/self.get_top_ask_price(pair)
amount = self.amount_to_precision(pair,to_buy)
to_buy = float(size)/self.get_top_ask_price(symbol)
amount = self.amount_to_precision(symbol,to_buy)
else:
amount = self.amount_to_precision(pair,size) #Market sell orders are always nominated in base currency
amount = self.amount_to_precision(symbol,size) #Market sell orders are always nominated in base currency
order_to_send = self.exchange.create_order(pair,"market",side,amount)
order_to_send = self.exchange.create_order(symbol,"market",side,amount)
if self.log_orders:
self.logger.log_order(f"New market order: Symbol: {symbol} - Side: {side} - Size: {size} - ID: {order_to_send['id']} - Origin: {log}")
time.sleep(self.wait_time)
# Wait a bit more when dealing with Kucoin
return self.get_order(order_to_send["id"],pair)
return self.get_order(order_to_send["id"],symbol)
except Exception as e:
self.logger.log_this(f"Exception in new_market_order: {e}",1,pair)
self.logger.log_this(f"Exception in new_market_order: {e} - Side: {side} - Size: {size}",1,symbol)
if no_retries:
break
time.sleep(self.wait_time)
@ -885,7 +837,41 @@ class Broker:
return "the lowest price limit for sell orders is" in str(error_object).lower()
def new_limit_order(self,symbol,size,side,price,no_retries=False):
# def new_limit_orders(self, orders: list) -> list:
# sent_orders = []
# #Send the orders
# tries = self.retries
# while tries>=0:
# try:
# sent_orders = self.exchange.create_orders(orders)
# except Exception as e:
# self.logger.log_this(f"Exception while sending safety orders: {e}",1)
# tries-=1
# time.sleep(self.wait_time)
# if tries==0:
# return []
# #Retrieve the orders from the exchange by id to confirm that they were sent
# #Specially for OKX, since the orders that create_orders return are empty (only id is present)
# returned_orders = []
# for order in sent_orders:
# tries = self.retries
# while tries>=0:
# try:
# returned_orders.append(self.get_order(order["id"],order["symbol"]))
# time.sleep(self.wait_time)
# except Exception as e:
# self.logger.log_this(f"Exception while retrieving safety orders: {e}",1)
# tries-=1
# if tries==0:
# if self.get_exchange_name()=="okex":
# return returned_orders
# returned_orders.append(order) #In the case of the other exchanges, we just assume that the order was sent and append it.
# time.sleep(self.wait_time)
# return returned_orders
def new_limit_order(self,symbol,size,side,price,no_retries=False,log=""):
'''
Sends a new limit order.
@ -897,20 +883,15 @@ class Broker:
'''
tries = self.retries
pair = symbol
while tries>=0:
try:
order_to_send = self.exchange.create_order(pair,"limit",side,self.amount_to_precision(pair,size),price)
order_to_send = self.exchange.create_order(symbol,"limit",side,self.amount_to_precision(symbol,size),price)
time.sleep(self.wait_time)
return self.get_order(order_to_send["id"],pair)
#if order_to_send["amount"] is not None: # Because Kucoin etc etc
# return self.get_order(order_to_send["id"],pair) #
#self.logger.log_this(f"Error sending order: Null order returned",2,pair) #
#self.cancel_order(order_to_send["id"],symbol,no_retries=True) #
#retries-=1
if self.log_orders:
self.logger.log_order(f"New limit order: Symbol: {symbol} - Side: {side} - Size: {size} - Price: {price} - ID: {order_to_send['id']} - Notes: {log}")
return self.get_order(order_to_send["id"],symbol)
except Exception as e:
self.logger.log_this(f"Exception in new_limit_order - Side: {side} - Size: {size} - {self.amount_to_precision(pair,size)} - Exception: {e}",1,symbol)
self.logger.log_this(f"Exception in new_limit_order - Side: {side} - Size: {size} - {self.amount_to_precision(symbol,size)} - Exception: {e}",1,symbol)
if self.not_enough_balance_error(e):
if tries<=self.retries//2: #Halves the amount of retries if there is a balance error.
return 1
@ -941,10 +922,9 @@ class Broker:
if id=="":
return self.empty_order
tries = self.retries
pair = symbol
while tries>0:
try:
return self.exchange.fetch_order(id,symbol=pair)
return self.exchange.fetch_order(id,symbol)
except Exception as e:
self.logger.log_this(f"Exception in get_order: {e}",1,symbol)
if no_retries:
@ -962,10 +942,9 @@ class Broker:
:return: The market information.
'''
tries = self.retries
pair = symbol
while tries>0:
try:
return self.exchange.market(pair)
return self.exchange.market(symbol)
except Exception as e:
self.logger.log_this(f"Exception in fetch_market: {e}",1,symbol)
if no_retries:
@ -983,10 +962,9 @@ class Broker:
:return: The ticker information.
'''
tries = self.retries
pair = symbol
while tries>0:
try:
return self.exchange.fetch_ticker(pair)
return self.exchange.fetch_ticker(symbol)
except Exception as e:
self.logger.log_this(f"Exception in get_ticker: {e}")
if no_retries:
@ -1010,9 +988,9 @@ class Broker:
if self.get_exchange_name() in ["okex","bybit"]:
return float(market["limits"]["amount"]["min"])
elif self.get_exchange_name() in ["kucoin"]:
return (float(market["limits"]["cost"]["min"])+.1)/self.get_ticker_price(pair)
return max(float(market["limits"]["amount"]["min"]),(float(market["limits"]["cost"]["min"])+.25)/self.get_ticker_price(pair))
elif self.get_exchange_name() in ["gateio"]:
return (float(market["limits"]["cost"]["min"])+.25)/self.get_ticker_price(pair)
return (float(market["limits"]["cost"]["min"])+.1)/self.get_ticker_price(pair)
elif self.get_exchange_name()=="binance":
for line in market["info"]["filters"]:
if line["filterType"] == "NOTIONAL":
@ -1050,8 +1028,8 @@ class Broker:
:param pair: pair
:return: step size
'''
market = self.fetch_market(pair)
if market is None:
return None
@ -1074,18 +1052,30 @@ class Logger:
self.broker_config = broker_config
self.exchange_name = self.broker_config["exchange"]
self.tg_credentials = credentials.get_credentials("telegram")
self.log_list_max_length = 10
self.log_list = self.preload_logs()
self.log_list_max_length = 20 # log cache
self.log_list = collections.deque(maxlen=self.log_list_max_length)
self.preload_logs()
def preload_logs(self):
try:
with open(f"logs/{self.exchange_name}.log","r") as f:
self.log_list = f.readlines()
return self.log_list[-self.log_list_max_length:]
for line in f:
self.log_list.append(line.rstrip("\n"))
return 0
except Exception as e:
print(e)
return []
return 1
def refresh_logs(self):
try:
self.log_list.clear()
self.preload_logs()
return 0
except Exception as e:
print(e)
return 1
def set_log_list_max_length(self, amount):
@ -1094,7 +1084,7 @@ class Logger:
def get_log_list(self):
return self.log_list
return list(self.log_list)
def set_telegram_notifications(self, toggle):
@ -1115,7 +1105,7 @@ class Logger:
send_text = f"https://api.telegram.org/bot{tg_credentials['token']}/sendMessage?chat_id={tg_credentials['chatid']}&parse_mode=Markdown&text={message}"
output = None
if self.broker_config["telegram"] or ignore_config:
output = requests.get(send_text,timeout=5).json() #5 seconds timeout. This could also be a tunable.
output = requests_get(send_text,timeout=5).json() #5 seconds timeout. This could also be a tunable.
if not output["ok"]:
self.log_this(f"Error in send_tg_message: {output}")
return 1
@ -1124,6 +1114,9 @@ class Logger:
self.log_this(f"Error in send_tg_message: {e}",1)
return 1
def log_order(self,message):
with open(f"logs/orders.log","a") as log_file:
log_file.write(time.strftime(f"[%Y/%m/%d %H:%M:%S] | {message}\n"))
def log_this(self,message,level=2,pair=None):
'''
@ -1145,26 +1138,16 @@ class Logger:
#Write to log file
with open(f"logs/{self.exchange_name}.log","a") as log_file:
log_file.write(text+"\n")
log_file.close()
#Append to log list
self.log_list.append(text)
#Trim log list
self.log_list = self.log_list[-self.log_list_max_length:]
except Exception as e:
print("Can't write log file")
print(e)
if level<1:
self.send_tg_message(f"{self.broker_config['exchange'].capitalize()} | {pair_data}{message}",ignore_config=level==-1)
return 0
class Order:
def __init__(self, order: dict = {}):
pass

1598
main.py

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,5 @@
import json
import time
from time import strftime
from json import dumps, load
class StatusHandler:
'''
@ -12,7 +12,8 @@ class StatusHandler:
"pair": f"{base}/{quote}",
"take_profit_order": broker.get_empty_order(),
"take_profit_price": 0.0,
"safety_order": broker.get_empty_order(),
"safety_orders": [],
"safety_orders_filled": 0,
"next_so_price": 0.0,
"order_size": 0.0,
"partial_profit": 0.0,
@ -23,8 +24,7 @@ class StatusHandler:
"quote_spent": 0.0,
"base_bought": 0.0,
"so_amount": 0,
"no_of_safety_orders": "",
"take_profit_price": "",
"no_of_safety_orders": 0,
"safety_price_table": [],
"deal_uptime": 0.0,
"total_uptime": 0.0,
@ -44,10 +44,10 @@ class StatusHandler:
"deal_order_history": []
}
self.status_file_path = f"status/{base}{quote}.status"
self.status_dictionary = self.default_status_dictionary.copy()
if status_dict is not None:
self.status_dictionary = {**self.status_dictionary, **status_dict}
self.status_dictionary = {k: v for k, v in self.default_status_dictionary.items()}
if status_dict:
self.status_dictionary.update(status_dict)
self.save_to_file()
def get_pair(self):
@ -59,8 +59,14 @@ class StatusHandler:
def get_take_profit_price(self):
return self.status_dictionary["take_profit_price"]
def get_safety_order(self):
return self.status_dictionary["safety_order"]
def get_safety_orders(self):
"""
Returns the list of open safety orders
"""
return self.status_dictionary["safety_orders"]
def get_safety_orders_filled(self):
return self.status_dictionary["safety_orders_filled"]
def get_next_so_price(self):
return self.status_dictionary["next_so_price"]
@ -149,6 +155,10 @@ class StatusHandler:
def get_status_file_path(self):
return self.status_file_path
def set_pair(self, trading_pair):
self.pair = trading_pair
return 0
def set_status_file_path(self, new_file_path: str):
# if not isinstance(new_file_path, str):
# self.broker.logger.log_this(f"File path provided is not a string",1,self.get_pair())
@ -182,8 +192,15 @@ class StatusHandler:
self.status_dictionary["so_order_id"] = order_id
return 0
def set_safety_order(self, order):
self.status_dictionary["safety_order"] = order
def set_safety_orders(self, orders: list):
"""
Replaces the whole safety orders list
"""
self.status_dictionary["safety_orders"] = orders
return 0
def set_safety_orders_filled(self, amount: int):
self.status_dictionary["safety_orders_filled"] = amount
return 0
def set_next_so_price(self, price: float):
@ -382,29 +399,44 @@ class StatusHandler:
self.status_dictionary["deal_order_history"] = deal_history
return 0
def add_safety_order(self, order):
"""
Appends a newly-created safety order to the internal list
"""
self.status_dictionary["safety_orders"].append(order)
return 0
def remove_safety_order_by_id(self, order_id: str):
"""
Removes an order from the list (mostly used when that order is filled or canceled)
"""
orders = self.get_safety_orders()
self.status_dictionary["safety_orders"] = [order for order in orders if order["id"] != order_id]
return 0
def clear_deal_order_history(self):
self.status_dictionary["deal_order_history"] = []
return 0
def update_deal_order_history(self, new_deal: dict):
def update_deal_order_history(self, new_deal: dict, note: str = ""):
# if not isinstance(new_deal, dict):
# self.broker.logger.log_this(f"value provided is not a dict",1,self.get_pair())
self.status_dictionary["deal_order_history"].append(new_deal)
id = new_deal["id"] if "id" in new_deal else None
self.status_dictionary["deal_order_history"].append(f"{note} - {id}")
return 0
def save_to_file(self, file_path = None, is_backup = False):
if file_path is None:
file_path = self.status_file_path
if is_backup:
try:
with open(time.strftime(f"{file_path}_%Y-%m-%d_%H:%M:%S.json"), "w") as f:
f.write(json.dumps(self.status_dictionary, indent=4))
with open(strftime(f"{file_path}_%Y-%m-%d_%H:%M:%S.json"), "w") as f:
f.write(dumps(self.status_dictionary, indent=4))
except Exception as e:
self.broker.logger.log_this(f"Error creating status backup file: {e}",1)
try:
with open(file_path, "w") as f:
f.write(json.dumps(self.status_dictionary, indent=4))
f.write(dumps(self.status_dictionary, indent=4))
return 0
except Exception as e:
self.broker.logger.log_this(f"Error saving status to file: {file_path}: {e}",1)
@ -415,7 +447,7 @@ class StatusHandler:
file_path = self.status_file_path
try:
with open(file_path, "r") as f:
self.status_dictionary = {**self.default_status_dictionary, **json.load(f)}
self.status_dictionary = {**self.default_status_dictionary, **load(f)}
return 0
except Exception as e:
self.broker.logger.log_this(f"Error loading status from file: {file_path}: {e}",1)

View File

@ -1,30 +1,25 @@
Mandatory:
=========
1. Stats webpage.
2. Maintain local orderbooks for each trading pair, which enables:
0. Stats webpage.
1. Maintain local orderbooks for each trading pair, which enables:
2a. Smart order pricing: Prioritization of fill speed over instant profit or vice versa
3. Proper handling of order price too high/low in OKX (rare, it happens when under heavy volatility).
4. Multiple safety orders open at the same time (to catch big volatility spikes more effectively)
5. Things that should be objects (it's not 1994):
* Orders.
* Config (Mostly done).
* Status (Mostly done).
6. API documentation.
7. Implement api key hashing.
8. Dockerize.
2. Proper handling of order price too high/low in OKX (rare, it happens when under heavy volatility).
3. API documentation.
4. Implement api key hashing.
5. Dockerize.
6. Earn should be integrated into the instance, in order to be able to invest the idle funds from the short traders.
Would be nice to have:
=====================
0. Trader order: alphabetical; by uptime; by safety orders, by percentage_to_completion. (Although this may be more suitable for the web and mobile apps)
1. Local implementation of amount_to_precision, cost_to_precision and price_to_precision. (Unless the plan is to continue to use CCXT forever)
2. Instead of cancelling and resending the take profit order, you could just edit it (Kucoin only supports editing on high frequency orders)
3. Round-robin trading pairs: Instead of a fixed list of trading pairs, after n closed deals the trader is terminated and a new one spawns, picking the trading pair
2. Instead of cancelling and resending the take profit order, edit it (Kucoin only supports editing on high frequency orders)
3. When autoswitching to long, instead of using a big market order, the last safety order should be a sell order of all the available funds.
4. Round-robin trading pairs: Instead of a fixed list of trading pairs, after n closed deals the trader is terminated and a new one spawns, picking the trading pair
from a pre-populated list (the trading pairs can be selected by using Yang-Zhang, Parkinson or another volatility indicator)
This could be very benefitial, since it limits the long time commitment to a small list of trading pairs, enabling the instance to react to market trends very
rapidly.
4. Earn should also use funds from short traders.
4b. Should Earn be integrated to the instance?
Maybe it's a good idea?:

1391
trader.py

File diff suppressed because it is too large Load Diff

View File

@ -11,6 +11,12 @@ try:
api_key = credentials.get_credentials("testnet_api_key")["key"]
base_url = credentials.get_url("testnet") #type: ignore
exchanges = {"Binance":"/binance"}
elif sys.argv[1]=="--local_testnet":
is_testnet = True
string_to_add = "LOCAL TESTNET "
api_key = credentials.get_credentials("local_testnet_api_key")["key"]
base_url = credentials.get_url("local_testnet") #type: ignore
exchanges = {"Binance":":5001"}
elif sys.argv[1]=="--mainnet":
is_testnet = False
string_to_add = "MAINNET "
@ -37,7 +43,8 @@ INSTANCE
10) edit_call_wait_time 11) reload_markets 12) fetch_full_log
13) paused_traders 14) fetch_log 15) edit_cooldown_multiplier
16) get_balance 17) cancel_global_last_call
18) mod_default_order_size
18) mod_default_order_size 19) toggle_log_orders
20) refresh_log_cache
EARN
31) toggle_pause 32) get_step_size 33) set_step_size
@ -56,9 +63,10 @@ TRADERS
62) mod_tp_level 63) last_call 64) deferred_last_call
65) toggle_pause 66) toggle_cleanup 67) toggle_autoswitch
68) toggle_check_old_long_price 69) switch_quote_currency
70) reload_safety_order 71) view_old_long 72) switch_price
73) reload_trader_config 74) toggle_liquidate_after_switch
75) base_add_calculation
70) view_old_long 71) switch_price 72) reload_trader_config
73) toggle_liquidate_after_switch 74) base_add_calculation
75) mod_concurrent_safety_orders 76) force_trader_close
77) mod_order_size
98) Change broker 99) Exit
'''
@ -334,6 +342,19 @@ if __name__=="__main__":
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
input("Press ENTER to continue ")
elif command==19:
print("toggle_log_orders turns on or off the logging of orders")
if input("Proceed? (Y/n) ") in ["Y","y",""]:
url = f"{base_url}{port}/toggle_log_orders"
print(json.loads(requests.post(url, headers=headers).content))
input("Press ENTER to continue ")
elif command==20:
print("refresh_log_cache refreshes the log cache")
if input("Proceed? (Y/n) ") in ["Y","y",""]:
url = f"{base_url}{port}/refresh_log_cache"
print(json.loads(requests.post(url, headers=headers).content))
input("Press ENTER to continue ")
######################
######## EARN ########
@ -561,10 +582,6 @@ if __name__=="__main__":
print("In order for the importing to be successful, a status file must exist in the status directory ")
print("and the take profit order must be open.")
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
tp_id_input = input("Input take profit order id to use (if any)")
forced_tp_id = None if tp_id_input=="" else tp_id_input
so_id_input = input("Input safety order id to use (if any)")
forced_so_id = None if so_id_input=="" else so_id_input
if not validate_pair(trading_pair):
print("The input is invalid")
@ -573,9 +590,7 @@ if __name__=="__main__":
url = f"{base_url}{port}/import_pair"
base,quote = trading_pair.split("/")
parameters = {"base": base,
"quote": quote,
"forced_tp_id": forced_tp_id,
"forced_so_id": forced_so_id}
"quote": quote}
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
input("Press ENTER to continue ")
@ -798,20 +813,6 @@ if __name__=="__main__":
input("Press ENTER to continue ")
elif command==70:
print("reload_safety_order reloads the safety order to the reader using the order id present in the status dictionary")
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
if not validate_pair(trading_pair):
print("The input is invalid")
break
if input("Proceed? (Y/n) ") in ["Y","y",""]:
url = f"{base_url}{port}/reload_safety_order"
base,quote = trading_pair.split("/")
parameters = {"base": base,
"quote": quote}
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
input("Press ENTER to continue ")
elif command==71:
print("Views the old_long information")
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
if not validate_pair(trading_pair):
@ -824,7 +825,7 @@ if __name__=="__main__":
print(json.loads(requests.get(url,headers=headers).content))
input("Press ENTER to continue ")
elif command==72:
elif command==71:
print("Returns the price target to reach to switch to long mode")
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
if not validate_pair(trading_pair):
@ -836,7 +837,7 @@ if __name__=="__main__":
print(json.loads(requests.get(url,headers=headers).content))
input("Press ENTER to continue ")
elif command==73:
elif command==72:
print("Reloads from disk the configuration file of a trader")
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
if not validate_pair(trading_pair):
@ -850,7 +851,7 @@ if __name__=="__main__":
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
input("Press ENTER to continue ")
elif command==74:
elif command==73:
print("toggle_liquidate_after_switch enables or disables the liquidation after an automatic switch to long of a short trader")
print("This is only valid in a short trader, of course.")
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
@ -865,7 +866,7 @@ if __name__=="__main__":
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
input("Press ENTER to continue ")
elif command==75:
elif command==74:
print("Returns the amount of safety orders that can be added to a short trader with the available funds")
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
if not validate_pair(trading_pair):
@ -876,3 +877,56 @@ if __name__=="__main__":
url = f"{base_url}{port}/base_add_so_calculation?base={base}&quote={quote}"
print(json.loads(requests.get(url,headers=headers).content))
input("Press ENTER to continue ")
elif command==75:
print("mod_concurrent_safety_orders modifies the amount of safety orders opened at the same time")
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
new_amount = input("Desired amount of orders: ")
if not validate_pair(trading_pair):
print("The input is invalid")
break
if not validate_int(new_amount):
print("The amount entered is invalid")
break
if input("Proceed? (Y/n) ") in ["Y","y",""]:
url = f"{base_url}{port}/mod_concurrent_safety_orders"
base,quote = trading_pair.split("/")
parameters = {"base": base,
"quote": quote,
"amount": new_amount}
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
input("Press ENTER to continue ")
elif command==76:
print("force_trader_close forces a trader to close the current position")
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
if not validate_pair(trading_pair):
print("The input is invalid")
break
if input("Proceed? (Y/n) ") in ["Y","y",""]:
url = f"{base_url}{port}/force_trader_close"
base,quote = trading_pair.split("/")
parameters = {"base": base,
"quote": quote}
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
input("Press ENTER to continue ")
elif command==77:
print("mod_order_size modifies the initial order size of a trader")
print("The change impacts as soon as the trader starts a new deal")
trading_pair = input("Input trader in the format BASE/QUOTE: ").upper()
amount = input("Desired order size: ")
if not validate_pair(trading_pair):
print("The input is invalid")
break
if not validate_float_or_int(amount):
print("The amount entered is invalid")
break
if input("Proceed? (Y/n) ") in ["Y","y",""]:
url = f"{base_url}{port}/mod_order_size"
base,quote = trading_pair.split("/")
parameters = {"base": base,
"quote": quote,
"amount": amount}
print(json.loads(requests.post(url, headers=headers, json=parameters).content))
input("Press ENTER to continue ")

View File

@ -1,41 +1,44 @@
import sqlite3
import sys
import datetime
import time
import ccxt
import credentials
import calendar
import requests
import logging
import threading
import os
from contextlib import contextmanager
from flask import Flask, jsonify, request
from waitress import serve
cache_requests = True if len(sys.argv)>1 and sys.argv[1]=="--cache_requests" else False
profits_database = "../profits/profits_database.db"
hashes_db = {"fetch_last_n_deals":0,
"fetch_last_n_deals_without_history":0,
"fetch_full_log":0,
"fetch_log":0,
"daily_totals":0,
"daily_totals_by_pair":0,
"monthly_totals":0,
"monthly_totals_by_pair":0,
"get_averages":0,
"total_profit":0,
"total_profit_by_pair":0}
_local_storage = threading.local()
def get_db_connection():
current_time = time.time()
def get_market_caps(limit):
api_key = credentials.get_credentials("CMC")["key"]
url = f"https://pro-api.coinmarketcap.com/v1/cryptocurrency/listings/latest?CMC_PRO_API_KEY={api_key}&convert=USD&limit={limit}"
return requests.get(url).json()["data"]
if not hasattr(_local_storage, 'connection') or not hasattr(_local_storage, 'created_at') or (current_time - _local_storage.created_at) > 3600: # Reconnect every hour
if hasattr(_local_storage, 'connection'):
try:
_local_storage.connection.close()
except:
pass
_local_storage.connection = sqlite3.connect(profits_database, check_same_thread=False)
_local_storage.connection.row_factory = sqlite3.Row
_local_storage.created_at = current_time
return _local_storage.connection
@contextmanager
def db_cursor():
conn = get_db_connection()
cur = conn.cursor()
try:
yield cur
except Exception:
conn.rollback()
raise
def load_keys_from_db(file_name):
#valid_keys = []
connection = sqlite3.connect(file_name)
cursor = connection.cursor()
cursor.execute("SELECT * FROM credentials_table")
@ -43,46 +46,50 @@ def load_keys_from_db(file_name):
connection.close()
valid_keys = [line[1] for line in data]
#for line in data:
# valid_keys.append(line[1])
return valid_keys
def get_valid_keys():
if not hasattr(get_valid_keys, '_keys'):
get_valid_keys._keys = load_keys_from_db("api_credentials.db")
return get_valid_keys._keys
def profit_report():
##Queries
connection = sqlite3.connect(profits_database)
cursor = connection.cursor()
#Last 60 days query
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
SUM(amount) AS total_amount
FROM profits_table
WHERE strftime('%s', 'now') - timestamp <= 60 * 24 * 60 * 60 -- 60 days in seconds
GROUP BY day_utc3
ORDER BY day_utc3;""")
last_60_days_rows = cursor.fetchall()
with db_cursor() as cursor:
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
SUM(amount) AS total_amount
FROM profits_table
WHERE strftime('%s', 'now') - timestamp <= 60 * 24 * 60 * 60 -- 60 days in seconds
GROUP BY day_utc3
ORDER BY day_utc3;""")
last_60_days_rows = cursor.fetchall()
#Last 30 days query
#cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
SUM(amount) AS total_amount
FROM profits_table
WHERE strftime('%s', 'now') - timestamp <= 30 * 24 * 60 * 60 -- 30 days in seconds;""")
last_30_days = cursor.fetchall()
with db_cursor() as cursor:
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
SUM(amount) AS total_amount
FROM profits_table
WHERE strftime('%s', 'now') - timestamp <= 30 * 24 * 60 * 60 -- 30 days in seconds;""")
last_30_days = cursor.fetchall()
#Last 7 days query
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
SUM(amount) AS total_amount
FROM profits_table
WHERE strftime('%s', 'now') - timestamp <= 7 * 24 * 60 * 60 -- 7 days in seconds;""")
last_7_days = cursor.fetchall()
with db_cursor() as cursor:
cursor.execute("""SELECT strftime('%Y-%m-%d', timestamp, 'unixepoch', '-3 hours') AS day_utc3,
SUM(amount) AS total_amount
FROM profits_table
WHERE strftime('%s', 'now') - timestamp <= 7 * 24 * 60 * 60 -- 7 days in seconds;""")
last_7_days = cursor.fetchall()
#Last n months query
cursor.execute("""SELECT strftime('%Y-%m', timestamp, 'unixepoch', '-3 hours') AS year_month_utc3,
SUM(amount) AS total_amount
FROM profits_table
WHERE strftime('%s', 'now') - timestamp <= 18 * 30 * 24 * 60 * 60 -- 18 months in seconds
GROUP BY year_month_utc3
ORDER BY year_month_utc3;""")
last_n_months_rows = cursor.fetchall()
with db_cursor() as cursor:
cursor.execute("""SELECT strftime('%Y-%m', timestamp, 'unixepoch', '-3 hours') AS year_month_utc3,
SUM(amount) AS total_amount
FROM profits_table
WHERE strftime('%s', 'now') - timestamp <= 18 * 30 * 24 * 60 * 60 -- 18 months in seconds
GROUP BY year_month_utc3
ORDER BY year_month_utc3;""")
last_n_months_rows = cursor.fetchall()
#Yearly totals
# cursor.execute("""SELECT strftime('%Y', timestamp, 'unixepoch', '-3 hours') AS year_utc3,
# SUM(amount) AS total_amount
@ -92,26 +99,24 @@ def profit_report():
# ORDER BY year_utc3;""")
# yearly_totals = cursor.fetchall()
#Per exchange
cursor.execute("""SELECT
exchange_name,
CASE
WHEN strftime('%Y-%m', timestamp, 'unixepoch', '-3 hours') = strftime('%Y-%m', 'now', 'localtime') THEN 'This Month'
WHEN strftime('%Y-%m', timestamp, 'unixepoch', '-3 hours') = strftime('%Y-%m', 'now', 'localtime', '-1 month') THEN 'Last Month'
ELSE 'Other Months'
END AS month_group,
SUM(amount) AS total_amount
FROM
profits_table
WHERE
strftime('%s', 'now') - timestamp <= 60 * 24 * 60 * 60 -- 60 days in seconds
GROUP BY
exchange_name, month_group
ORDER BY
exchange_name, month_group;""")
per_exchange = cursor.fetchall()
#Close db
cursor.close()
with db_cursor() as cursor:
cursor.execute("""SELECT
exchange_name,
CASE
WHEN strftime('%Y-%m', timestamp, 'unixepoch', '-3 hours') = strftime('%Y-%m', 'now', 'localtime') THEN 'This Month'
WHEN strftime('%Y-%m', timestamp, 'unixepoch', '-3 hours') = strftime('%Y-%m', 'now', 'localtime', '-1 month') THEN 'Last Month'
ELSE 'Other Months'
END AS month_group,
SUM(amount) AS total_amount
FROM
profits_table
WHERE
strftime('%s', 'now') - timestamp <= 60 * 24 * 60 * 60 -- 60 days in seconds
GROUP BY
exchange_name, month_group
ORDER BY
exchange_name, month_group;""")
per_exchange = cursor.fetchall()
#Projection calculation
@ -127,16 +132,17 @@ def profit_report():
okex_amount = 0
for row in per_exchange:
if row[0]=="binance":
exchange_name = row[0].strip().lower()
if exchange_name=="binance":
if row[1]=="This Month":
binance_amount = row[2]
elif row[0]=="gateio":
elif exchange_name=="gateio":
if row[1]=="This Month":
gateio_amount = row[2]
elif row[0]=="kucoin":
elif exchange_name=="kucoin":
if row[1]=="This Month":
kucoin_amount = row[2]
elif row[0]=="okex":
elif exchange_name=="okex":
if row[1]=="This Month":
okex_amount = row[2]
@ -168,41 +174,36 @@ def profit_report():
"Total profit": total_amount}
def query_total_profit(pair=None):
'''
Returns total profit of the trading pair.
If no pair specified, returns the grand total of all pairs.
'''
connection = sqlite3.connect(profits_database)
cursor = connection.cursor()
if pair is None:
query = "SELECT SUM(amount) AS total_profit FROM profits_table"
cursor.execute(query)
connection.close()
query_result = cursor.fetchall()
with db_cursor() as cursor:
cursor.execute(query)
query_result = cursor.fetchall()
return query_result[0][0]
else:
query = """SELECT pair, SUM(amount) AS total_profit
FROM profits_table
GROUP BY pair;"""
cursor.execute(query)
connection.close()
query_result = cursor.fetchall()
with db_cursor() as cursor:
cursor.execute(query)
query_result = cursor.fetchall()
for item in query_result:
if item[0].replace("/","")==pair:
return item[1]
return 0
def daily_and_monthly_totals():
def daily_and_monthly_totals() -> tuple[float, float]:
'''
Returns a tuple with the current day and the current month's total profit.
'''
#Connect to db
connection = sqlite3.connect(profits_database)
cursor = connection.cursor()
now = datetime.datetime.now()
# Create a datetime object for the start of the day
@ -213,15 +214,16 @@ def daily_and_monthly_totals():
start_of_day_unix = int(time.mktime(start_of_day.timetuple()))
start_of_month_unix = int(time.mktime(start_of_month.timetuple()))
query = """SELECT * FROM profits_table
WHERE timestamp >= ?
ORDER BY timestamp DESC;"""
cursor.execute(query, (start_of_month_unix,))
query_result = cursor.fetchall()
connection.close()
monthly_total = sum([item[2] for item in query_result])
daily_total = sum([item[2] for item in query_result if item[0]>=start_of_day_unix])
query = """SELECT
COALESCE(SUM(CASE WHEN timestamp >= :day THEN amount END),0) AS daily_total,
COALESCE(SUM(CASE WHEN timestamp >= :month THEN amount END),0) AS monthly_total
FROM profits_table;
"""
with db_cursor() as cur:
cur.execute(query, {"day": start_of_day_unix, "month": start_of_month_unix})
row = cur.fetchone()
daily_total = float(row["daily_total"])
monthly_total = float(row["monthly_total"])
return (daily_total, monthly_total)
@ -231,9 +233,6 @@ def query_daily_totals(pair=None):
Returns a dictionary of daily totals of the trading pair.
If no pair specified, returns the totals of all pairs.
'''
#Connect to db
connection = sqlite3.connect(profits_database)
cursor = connection.cursor()
result = {}
@ -242,9 +241,9 @@ def query_daily_totals(pair=None):
SUM(amount) AS total_profit
FROM profits_table
GROUP BY day_utc3;"""
cursor.execute(query)
query_result = cursor.fetchall()
connection.close()
with db_cursor() as cursor:
cursor.execute(query)
query_result = cursor.fetchall()
for item in query_result:
result[item[0]] = item[1]
else:
@ -252,9 +251,9 @@ def query_daily_totals(pair=None):
SUM(amount) AS total_profit
FROM profits_table
GROUP BY pair, day_utc3;"""
cursor.execute(query)
query_result = cursor.fetchall()
connection.close()
with db_cursor() as cursor:
cursor.execute(query)
query_result = cursor.fetchall()
for item in query_result:
if item[0].replace("/","")==pair:
result[item[1]] = item[2]
@ -266,9 +265,6 @@ def query_monthly_totals(pair=None):
Returns a dictionary of monthly totals of the trading pair.
If no pair specified, returns the totals of all pairs.
'''
#Connect to db
connection = sqlite3.connect(profits_database)
cursor = connection.cursor()
result = {}
@ -277,9 +273,9 @@ def query_monthly_totals(pair=None):
SUM(amount) AS total_profit
FROM profits_table
GROUP BY month;"""
cursor.execute(query)
query_result = cursor.fetchall()
connection.close()
with db_cursor() as cursor:
cursor.execute(query)
query_result = cursor.fetchall()
for item in query_result:
result[item[0]] = item[1]
else:
@ -287,9 +283,9 @@ def query_monthly_totals(pair=None):
SUM(amount) AS total_profit
FROM profits_table
GROUP BY pair, month;"""
cursor.execute(query)
query_result = cursor.fetchall()
connection.close()
with db_cursor() as cursor:
cursor.execute(query)
query_result = cursor.fetchall()
for item in query_result:
if item[0].replace("/","")==pair:
result[item[1]] = item[2]
@ -300,11 +296,9 @@ def last_n_deals(n):
'''
Returns a list of the latest n deals
'''
connection = sqlite3.connect(profits_database)
cursor = connection.cursor()
cursor.execute("SELECT * FROM profits_table ORDER BY timestamp DESC LIMIT ?",(n,))
result = cursor.fetchall()
connection.close()
with db_cursor() as cursor:
cursor.execute("SELECT * FROM profits_table ORDER BY timestamp DESC LIMIT ?",(n,))
result = cursor.fetchall()
return result
@ -338,98 +332,34 @@ def last_n_lines(file_name,width,amount=4,full_log=False):
return result[:amount],len(file_contents)
def return_parkinson_backtests(broker, days, max_rank):
'''
Returns a dictionary containing backtests with the format {coin: value}
'''
if broker not in ["binance", "gateio", "kucoin", "okx", "bybit"]:
return {}
def tail_log(filename, lines=200):
if not os.path.exists(filename):
return []
evaluation_dictionary = {}
start_of_day = int(time.mktime(datetime.datetime.now().date().timetuple()))
since = int(start_of_day - 60*60*24*days)
block_size = 1024
blocks = []
with open(filename, 'rb') as f:
f.seek(0, 2)
#total_bytes = remaining_bytes = f.tell()
remaining_bytes = f.tell()
# Getting the data from the database
print("Querying database...")
conn = sqlite3.connect(f"data/{broker}.db")
cursor = conn.cursor()
cursor.execute('SELECT * FROM volatilities_table WHERE timestamp > ?', (since,))
rows = cursor.fetchall()
conn.close()
while len(blocks) < lines and remaining_bytes > 0:
read_bytes = min(block_size, remaining_bytes)
f.seek(-read_bytes, 1)
block = f.read(read_bytes).splitlines()
f.seek(-read_bytes, 1)
# Parse the data
print("Parsing the data...")
for row in rows:
if row[0] not in evaluation_dictionary:
evaluation_dictionary[row[0]] = [row[2]]
else:
evaluation_dictionary[row[0]].append(row[2])
# Prepend to blocks (since we're reading backwards)
blocks = block[-(len(blocks)+1):] + blocks
remaining_bytes -= read_bytes
#Calculate weighted averages
print("Calculating weighted averages")
weighted_averages = {}
for key in evaluation_dictionary:
multiplier = len(evaluation_dictionary[key])
total = 0
for value in evaluation_dictionary[key][::-1]:
total+=value*multiplier/len(evaluation_dictionary[key])
multiplier-=1
weighted_averages[key] = total/len(evaluation_dictionary[key])
#Filter by rank
print("Filtering results by CMC rank")
coins_accepted = []
market_caps = get_market_caps(max_rank)
for result in market_caps:
coins_accepted.append(result["symbol"])
for coin in weighted_averages.copy():
if coin.split("/")[0] not in coins_accepted:
del(weighted_averages[coin])
#Checking open markets
print("Filtering results by market state")
exchange_class = getattr(ccxt, broker)
broker = exchange_class({
"apiKey": "",
"secret": "",
"timeout": 30000,
"enableRateLimit": True,
'options': {
'newOrderRespType': 'FULL'}
})
markets = broker.load_markets()
for key in weighted_averages.copy():
if key not in markets or not markets[key]["active"]:
del(weighted_averages[key])
return weighted_averages
# Decode and filter empty lines
result = [line.decode('utf-8', errors='ignore').strip() for line in blocks if line.strip()]
return result[-lines:],len(result[-lines:])
stats_api = Flask(__name__)
@stats_api.route("/fetch_backtests")
def fetch_backtests():
'''
GET request
Parameters: 'exchange_name" -> string
'days' -> int
'max_rank' -> int
'''
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
try:
broker = request.args.get("exchange_name")
days = int(request.args.get("days")) # type: ignore
max_rank = int(request.args.get("max_rank")) # type: ignore
return return_parkinson_backtests(broker,days,max_rank)
except Exception as e:
print(e)
return jsonify({"HORROR": f"{e}"})
return jsonify({'Error': 'API key invalid'}), 401
@stats_api.route("/fetch_profit_report")
def fetch_profit_report():
'''
@ -438,37 +368,15 @@ def fetch_profit_report():
Returns: JSON object with profit report data
'''
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
try:
return jsonify(profit_report())
except Exception as e:
print(e)
return jsonify({"Error": f"{e}"})
return jsonify({'Error': 'API key invalid'}), 401
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
return jsonify({'Error': 'API key invalid'}), 401
try:
return jsonify(profit_report())
except Exception as e:
print(e)
return jsonify({"Error": f"{e}"})
@stats_api.route("/clear_caches")
def clear_hashes():
global hashes_db
'''
GET request
'''
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
hashes_db = {"fetch_last_n_deals":0,
"fetch_last_n_deals_without_history":0,
"fetch_full_log":0,
"fetch_log":0,
"daily_totals":0,
"daily_totals_by_pair":0,
"monthly_totals":0,
"monthly_totals_by_pair":0,
"get_averages":0,
"total_profit":0,
"total_profit_by_pair":0}
return jsonify({"Done":0})
return jsonify({'Error': 'API key invalid'}), 401
@stats_api.route("/fetch_last_n_deals")
def fetch_last_n_deals():
@ -476,21 +384,15 @@ def fetch_last_n_deals():
GET request
Parameter: 'amount_of_deals' -> int
'''
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
try:
parameter = request.args.get("amount_of_deals")
response_value = last_n_deals(parameter)
if not cache_requests:
return jsonify({"last_deals": response_value})
response_hash = hash(str({"last_deals": response_value}))
if hashes_db["fetch_last_n_deals"]!=response_hash:
hashes_db["fetch_last_n_deals"] = response_hash
return jsonify({"last_deals": response_value})
return jsonify({"no_changes": True})
except Exception as e:
print(e)
return jsonify({"last_deals":""})
return jsonify({'Error': 'API key invalid'}), 401
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
return jsonify({'Error': 'API key invalid'}), 401
try:
parameter = request.args.get("amount_of_deals")
response_value = last_n_deals(parameter)
return jsonify({"last_deals": response_value})
except Exception as e:
print(e)
return jsonify({"last_deals":""})
@stats_api.route("/fetch_last_n_deals_without_history")
@ -499,22 +401,16 @@ def fetch_last_n_deals_without_history():
GET request
Parameter: 'amount_of_deals' -> int
'''
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
try:
parameter = request.args.get("amount_of_deals")
#return jsonify({"last_deals": last_n_deals_without_history(parameter)})
response_value = last_n_deals_without_history(parameter)
if not cache_requests:
return jsonify({"last_deals": response_value})
response_hash = hash(str({"last_deals": response_value}))
if hashes_db["fetch_last_n_deals_without_history"]!=response_hash:
hashes_db["fetch_last_n_deals_without_history"] = response_hash
return jsonify({"last_deals": response_value})
return jsonify({"no_changes": True})
except Exception as e:
print(e)
return jsonify({"last_deals":""})
return jsonify({'Error': 'API key invalid'}), 401
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
return jsonify({'Error': 'API key invalid'}), 401
try:
parameter = request.args.get("amount_of_deals")
#return jsonify({"last_deals": last_n_deals_without_history(parameter)})
response_value = last_n_deals_without_history(parameter)
return jsonify({"last_deals": response_value})
except Exception as e:
print(e)
return jsonify({"last_deals":""})
@stats_api.route("/fetch_full_log")
@ -525,22 +421,16 @@ def fetch_full_log():
It trims the full log to 200 lines, to avoid sending too much data to the client.
'''
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
try:
exchange_name = request.args.get("exchange_name")
width = 0
last_lines,amount_of_lines = last_n_lines(f"../logs/{exchange_name}.log",width,0,full_log=True)
if not cache_requests:
return jsonify({"line": last_lines[-200:], "amount_of_lines": amount_of_lines})
response_hash = hash(str({"line": last_lines, "amount_of_lines": amount_of_lines}))
if hashes_db["fetch_full_log"]!=response_hash:
hashes_db["fetch_full_log"] = response_hash
return jsonify({"line": last_lines[-200:], "amount_of_lines": amount_of_lines})
return jsonify({"no_changes": True})
except Exception as e:
print(e)
return {"line": [""]*width,"amount_of_lines": 0}
return jsonify({'Error': 'API key invalid'}), 401
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
return jsonify({'Error': 'API key invalid'}), 401
try:
exchange_name = request.args.get("exchange_name")
width = 0
last_lines, amount_of_lines = tail_log(f"../logs/{exchange_name}.log", 200)
return jsonify({"line": last_lines[-200:], "amount_of_lines": amount_of_lines})
except Exception as e:
print(e)
return {"line": [""]*width,"amount_of_lines": 0}
@stats_api.route("/fetch_log")
@ -551,45 +441,33 @@ def fetch_log():
'width' -> int
'amount' -> int
'''
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
try:
exchange_name = request.args.get("exchange_name")
width = int(request.args.get("width")) # type: ignore
amount = int(request.args.get("amount")) # type: ignore
last_lines,total_amount_of_lines = last_n_lines(f"../logs/{exchange_name}.log",width,amount)
if not cache_requests:
return jsonify({"line": last_lines, "amount_of_lines": total_amount_of_lines})
response_hash = hash(str({"line": last_lines, "amount_of_lines": total_amount_of_lines}))
if hashes_db["fetch_log"]!=response_hash:
hashes_db["fetch_log"] = response_hash
return jsonify({"line": last_lines, "amount_of_lines": total_amount_of_lines})
return jsonify({"no_changes": True})
except Exception as e:
print(e)
return {"line": [""]*10,"amount_of_lines": 0}
return jsonify({'Error': 'API key invalid'}), 401
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
return jsonify({'Error': 'API key invalid'}), 401
try:
exchange_name = request.args.get("exchange_name")
width = int(request.args.get("width")) # type: ignore
amount = int(request.args.get("amount")) # type: ignore
last_lines,total_amount_of_lines = last_n_lines(f"../logs/{exchange_name}.log",width,amount)
return jsonify({"line": last_lines, "amount_of_lines": total_amount_of_lines})
except Exception as e:
print(e)
return {"line": [""]*10,"amount_of_lines": 0}
@stats_api.route("/combined_totals")
def combined_totals():
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
daily_totals = daily_and_monthly_totals()
return jsonify({"combined": daily_totals})
return jsonify({'Error': 'API key invalid'}), 401
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
return jsonify({'Error': 'API key invalid'}), 401
daily_totals = daily_and_monthly_totals()
return jsonify({"combined": daily_totals})
@stats_api.route("/daily_totals")
def get_daily_totals():
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
daily_totals = query_daily_totals()
if not cache_requests:
return jsonify(daily_totals)
response_hash = hash(str(daily_totals))
if hashes_db["daily_totals"]!=response_hash:
hashes_db["daily_totals"] = response_hash
return jsonify(daily_totals)
return jsonify({"no_changes": True})
return jsonify({'Error': 'API key invalid'}), 401
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
return jsonify({'Error': 'API key invalid'}), 401
daily_totals = query_daily_totals()
return jsonify(daily_totals)
@stats_api.route("/daily_totals_by_pair")
@ -599,36 +477,24 @@ def get_daily_totals_by_pair():
Parameters: 'base' -> string
'quote' -> string
'''
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
try:
base = request.args.get("base")
quote = request.args.get("quote")
daily_totals = query_daily_totals(f"{base}{quote}")
if not cache_requests:
return jsonify(daily_totals)
response_hash = hash(str(daily_totals))
if hashes_db["daily_totals_by_pair"]!=response_hash:
hashes_db["daily_totals_by_pair"] = response_hash
return jsonify(daily_totals)
return jsonify({"no_changes": True})
except Exception as e:
print(e)
return jsonify({'Error': 'Halp'})
return jsonify({'Error': 'API key invalid'}), 401
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
return jsonify({'Error': 'API key invalid'}), 401
try:
base = request.args.get("base")
quote = request.args.get("quote")
daily_totals = query_daily_totals(f"{base}{quote}")
return jsonify(daily_totals)
except Exception as e:
print(e)
return jsonify({'Error': 'Halp'})
@stats_api.route("/monthly_totals")
def get_monthly_totals():
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
monthly_totals = query_monthly_totals()
if not cache_requests:
return jsonify(monthly_totals)
response_hash = hash(str(monthly_totals))
if hashes_db["monthly_totals"]!=response_hash:
hashes_db["monthly_totals"] = response_hash
return jsonify(monthly_totals)
return jsonify({"no_changes": True})
return jsonify({'Error': 'API key invalid'}), 401
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
return jsonify({'Error': 'API key invalid'}), 401
monthly_totals = query_monthly_totals()
return jsonify(monthly_totals)
@stats_api.route("/monthly_totals_by_pair")
@ -638,67 +504,47 @@ def get_monthly_totals_by_pair():
Parameters: 'base' -> string
'quote' -> string
'''
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
try:
base = request.args.get("base")
quote = request.args.get("quote")
monthly_totals = query_monthly_totals(f"{base}{quote}")
if not cache_requests:
return jsonify(monthly_totals)
response_hash = hash(str(monthly_totals))
if hashes_db["monthly_totals_by_pair"]!=response_hash:
hashes_db["monthly_totals_by_pair"] = response_hash
return jsonify(monthly_totals)
return jsonify({"no_changes": True})
except Exception as e:
print(e)
return jsonify({'Error': 'Halp'})
return jsonify({'Error': 'API key invalid'}), 401
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
return jsonify({'Error': 'API key invalid'}), 401
try:
base = request.args.get("base")
quote = request.args.get("quote")
monthly_totals = query_monthly_totals(f"{base}{quote}")
return jsonify(monthly_totals)
except Exception as e:
print(e)
return jsonify({'Error': 'Halp'})
@stats_api.route("/get_averages")
def get_averages():
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
try:
daily_totals = query_daily_totals()
val_30 = 0
val_7 = 0
acc_30 = []
acc_7 = []
for x in sorted(daily_totals):
acc_30.append(daily_totals[x])
acc_7.append(daily_totals[x])
length_30 = min(30,len(acc_30)) #Last 30 days
length_7 = min(7,len(acc_7)) #Last 7 days
for _ in range(length_30):
val_30 += acc_30.pop()
for _ in range(length_7):
val_7 += acc_7.pop()
if not cache_requests:
return jsonify({"30_day": val_30/length_30, "7_day": val_7/length_7})
response_hash = hash(str({"30_day": val_30/length_30, "7_day": val_7/length_7}))
if hashes_db["get_averages"]!=response_hash:
hashes_db["get_averages"] = response_hash
return jsonify({"30_day": val_30/length_30, "7_day": val_7/length_7})
return jsonify({"no_changes": True})
except Exception as e:
print(e)
return jsonify({'Error': 'Halp'})
return jsonify({'Error': 'API key invalid'}), 401
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
return jsonify({'Error': 'API key invalid'}), 401
try:
daily_totals = query_daily_totals()
val_30 = 0
val_7 = 0
recent_days = sorted(daily_totals.keys(), reverse=True)[:30]
acc_30 = [daily_totals[date] for date in recent_days[:30]]
acc_7 = [daily_totals[date] for date in recent_days[:7]]
length_30 = min(30,len(acc_30)) #Last 30 days
length_7 = min(7,len(acc_7)) #Last 7 days
for _ in range(length_30):
val_30 += acc_30.pop()
for _ in range(length_7):
val_7 += acc_7.pop()
return jsonify({"30_day": val_30/length_30, "7_day": val_7/length_7})
except Exception as e:
print(e)
return jsonify({'Error': 'Halp'})
@stats_api.route("/total_profit")
def total_profit():
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
total = query_total_profit()
if not cache_requests:
return jsonify({"Total profit": total})
response_hash = hash(str({"Total profit": total}))
if hashes_db["total_profit"]!=response_hash:
hashes_db["total_profit"] = response_hash
return jsonify({"Total profit": total})
return jsonify({"no_changes": True})
return jsonify({'Error': 'API key invalid'}), 401
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
return jsonify({'Error': 'API key invalid'}), 401
total = query_total_profit()
return jsonify({"Total profit": total})
@stats_api.route("/total_profit_by_pair")
@ -708,33 +554,25 @@ def total_profit_by_pair():
Parameters: 'base' -> string
'quote' -> string
'''
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
try:
base = request.args.get("base")
quote = request.args.get("quote")
total = query_total_profit(f"{base}{quote}")
if not cache_requests:
return jsonify({"Total profit": total})
response_hash = hash(str({"Total profit": total}))
if hashes_db["total_profit_by_pair"]!=response_hash:
hashes_db["total_profit_by_pair"] = response_hash
return jsonify({"Total profit": total})
return jsonify({"no_changes": True})
except Exception as e:
print(e)
return jsonify({'Error': 'Halp'})
return jsonify({'Error': 'API key invalid'}), 401
if not "X-API-KEY" in request.headers or not request.headers.get("X-API-KEY") in get_valid_keys():
return jsonify({'Error': 'API key invalid'}), 401
try:
base = request.args.get("base")
quote = request.args.get("quote")
total = query_total_profit(f"{base}{quote}")
return jsonify({"Total profit": total})
except Exception as e:
print(e)
return jsonify({'Error': 'Halp'})
if __name__=="__main__":
# Load valid keys from database
valid_keys = load_keys_from_db("api_credentials.db")
#Waitress
logger = logging.getLogger('waitress')
logger.setLevel(logging.INFO)
serve(stats_api,host="0.0.0.0",port=5010, threads=32)
serve(stats_api,host="0.0.0.0",port=5010)
#Flask
# app.run(host="0.0.0.0", port=5010, debug=True)