deals cache

This commit is contained in:
Nicolás Sánchez 2024-11-26 16:47:07 -03:00
parent 9eb6e0b548
commit b554caafec
5 changed files with 77 additions and 11 deletions

View File

@ -1,3 +1,7 @@
2024.11.26:
. Implemented deals cache to reduce db load until the new database service is implemented.
. Added a new API endpoint: /get_deals_cache.
2024.11.25: 2024.11.25:
. Implemented a short log list: In order to avoid constant log file queries, a list of the last few log entries is stored in memory and it's returned . Implemented a short log list: In order to avoid constant log file queries, a list of the last few log entries is stored in memory and it's returned
via /get_log_list API endpoint. via /get_log_list API endpoint.

View File

@ -25,7 +25,8 @@ class broker:
self.logger = logger(self.read_config) self.logger = logger(self.read_config)
self.write_order_history = True #This should be a toggle in config_file self.write_order_history = True #This should be a toggle in config_file
self.database_connection = sqlite3.connect("profits/profits_database.db") self.profits_database_filename = "profits/profits_database.db"
self.database_connection = sqlite3.connect(self.profits_database_filename)
self.database_cursor = self.database_connection.cursor() self.database_cursor = self.database_connection.cursor()
self.database_cursor.execute(''' self.database_cursor.execute('''
CREATE TABLE IF NOT EXISTS profits_table ( CREATE TABLE IF NOT EXISTS profits_table (
@ -42,6 +43,28 @@ class broker:
self.exchange.load_markets() self.exchange.load_markets()
#Populates deals cache
self.deals_cache_length = 10
self.deals_list = self.preload_deals(amount_to_preload=self.deals_cache_length)
def preload_deals(self,amount_to_preload=10):
'''
Reads the last n deals from the database and returns them in a list
'''
connection = sqlite3.connect(self.profits_database_filename)
cursor = connection.cursor()
cursor.execute(f"SELECT * FROM profits_table WHERE exchange_name = ? ORDER BY timestamp DESC LIMIT ?", (self.get_exchange_name(), amount_to_preload))
result = cursor.fetchall()
connection.close()
return [(row[0],row[1],row[2],row[3],row[4],"") for row in result]
def get_deals_cache(self):
return self.deals_list
def all_markets(self,no_retries=False): def all_markets(self,no_retries=False):
retries = self.retries retries = self.retries
while retries>0: while retries>0:
@ -73,7 +96,7 @@ class broker:
retries = self.retries retries = self.retries
while retries>0: while retries>0:
try: try:
database_connection = sqlite3.connect("profits/profits_database.db") database_connection = sqlite3.connect(self.profits_database_filename)
database_cursor = database_connection.cursor() database_cursor = database_connection.cursor()
database_cursor.execute(f"SELECT * FROM profits_table WHERE timestamp >= {time.time()-timespan} ORDER BY timestamp") database_cursor.execute(f"SELECT * FROM profits_table WHERE timestamp >= {time.time()-timespan} ORDER BY timestamp")
rows = database_cursor.fetchall() rows = database_cursor.fetchall()
@ -87,6 +110,16 @@ class broker:
return [] return []
def write_profit_to_cache(self,dataset):
'''
dataset format: (timestamp,pair,amount,exchange_name,order_id,order_history)
'''
self.deals_list.insert(0,(dataset[0],dataset[1],dataset[2],dataset[3],dataset[4],""))
self.deals_list = self.deals_list[:self.deals_cache_length]
return 0
def write_profit_to_db(self,dataset,no_retries=False): def write_profit_to_db(self,dataset,no_retries=False):
''' '''
dataset format: (timestamp,pair,amount,exchange_name,order_id,order_history) dataset format: (timestamp,pair,amount,exchange_name,order_id,order_history)
@ -94,7 +127,7 @@ class broker:
retries = self.retries retries = self.retries
while retries>0: while retries>0:
try: try:
database_connection = sqlite3.connect("profits/profits_database.db") database_connection = sqlite3.connect(self.profits_database_filename)
database_cursor = database_connection.cursor() database_cursor = database_connection.cursor()
database_cursor.execute('INSERT INTO profits_table VALUES(?, ?, ?, ?, ?, ?)', dataset) database_cursor.execute('INSERT INTO profits_table VALUES(?, ?, ?, ?, ?, ?)', dataset)
database_connection.commit() database_connection.commit()
@ -118,7 +151,7 @@ class broker:
retries = self.retries retries = self.retries
while retries>0: while retries>0:
try: try:
database_connection = sqlite3.connect("profits/profits_database.db") database_connection = sqlite3.connect(self.profits_database_filename)
database_cursor = database_connection.cursor() database_cursor = database_connection.cursor()
database_cursor.execute(f"SELECT * FROM profits_table WHERE pair = '{order['symbol']}' ORDER BY timestamp DESC LIMIT 1;") database_cursor.execute(f"SELECT * FROM profits_table WHERE pair = '{order['symbol']}' ORDER BY timestamp DESC LIMIT 1;")
rows = database_cursor.fetchall() rows = database_cursor.fetchall()
@ -324,7 +357,7 @@ class broker:
retries = self.retries retries = self.retries
while retries>0: while retries>0:
try: try:
if self.read_config["exchange"]=="binance": if self.get_exchange_name()=="binance":
a = self.exchange.fetch_last_prices(pair_list) a = self.exchange.fetch_last_prices(pair_list)
return {x: a[x]["price"] for x in a.keys()} return {x: a[x]["price"] for x in a.keys()}
else: else:
@ -465,7 +498,7 @@ class broker:
pairs = [] pairs = []
try: try:
orders = [] orders = []
if self.read_config["exchange"]=="binance": if self.get_exchange_name()=="binance":
orders = self.get_opened_orders_binance(pairs) orders = self.get_opened_orders_binance(pairs)
else: else:
orders = self.get_opened_orders() orders = self.get_opened_orders()
@ -487,7 +520,7 @@ class broker:
pairs = [] pairs = []
try: try:
#id_list = [] #id_list = []
if self.read_config["exchange"]=="binance": if self.get_exchange_name()=="binance":
return self.get_opened_orders_binance(pairs) return self.get_opened_orders_binance(pairs)
return self.get_opened_orders() return self.get_opened_orders()
#else: #else:
@ -625,7 +658,7 @@ class broker:
pair = symbol pair = symbol
while retries>0: while retries>0:
try: try:
if self.read_config["exchange"]=="gateio" and side=="buy" and not amount_in_base: if self.get_exchange_name()=="gateio" and side=="buy" and not amount_in_base:
new_order = self.exchange.create_market_buy_order_with_cost(pair, size) new_order = self.exchange.create_market_buy_order_with_cost(pair, size)
else: else:
order_book = self.get_order_book(symbol) order_book = self.get_order_book(symbol)

30
main.py
View File

@ -22,7 +22,7 @@ In case the permissions of the certificate changes, reset them this way:
# ll /etc/letsencrypt/ # ll /etc/letsencrypt/
''' '''
version = "2024.11.25" version = "2024.11.26"
''' '''
Color definitions. If you want to change them, check the reference at https://en.wikipedia.org/wiki/ANSI_escape_code#Colors Color definitions. If you want to change them, check the reference at https://en.wikipedia.org/wiki/ANSI_escape_code#Colors
@ -1102,6 +1102,20 @@ def get_log_list():
return jsonify({'Error': 'API key invalid'}), 401 return jsonify({'Error': 'API key invalid'}), 401
@base_api.route("/get_deals_cache", methods=['GET'])
def get_deals_cache():
'''
GET request
Parameters:
None
'''
if "X-API-KEY" in request.headers and request.headers.get("X-API-KEY") in valid_keys:
return unwrapped_get_deals_cache()
return jsonify({'Error': 'API key invalid'}), 401
@base_api.route("/trader_time", methods=['GET']) @base_api.route("/trader_time", methods=['GET'])
def trader_time(): def trader_time():
''' '''
@ -2020,6 +2034,20 @@ def unwrapped_get_log_list():
return jsonify({"Logs": broker.logger.get_log_list()}) return jsonify({"Logs": broker.logger.get_log_list()})
def unwrapped_get_deals_cache():
'''
Retrieves the last n entries from the broker's logger.
This list is kept on memory, to avoid having to read the log file every time.
Parameters:
None
Returns:
jsonify: A jsonified dictionary containing the last n entries from the log file.
'''
return jsonify({"Deals": broker.get_deals_cache()})
def unwrapped_call_wait_time(wait_time): def unwrapped_call_wait_time(wait_time):
''' '''
Modifies the time between some API calls and retries. Modifies the time between some API calls and retries.

View File

@ -7,8 +7,7 @@ Mandatory:
3. Consolidate vocabulary (trader, pair and bot; instance & trader) 3. Consolidate vocabulary (trader, pair and bot; instance & trader)
4. Base add for short traders. 4. Base add for short traders.
5. Proper handling of order price too high/low in OKX (rare, it happens when under heavy volatility). 5. Proper handling of order price too high/low in OKX (rare, it happens when under heavy volatility).
6. Keep a copy of the instance's last n log entries on RAM, to speed up querying. 6. Redo all the database code. It should be a separate service (running locally or not) that all instances can access.
7. Do the same for the last n deals. Load a few from the db at instance initialization.
Would be nice to have: Would be nice to have:

View File

@ -1327,6 +1327,8 @@ class trader:
try: try:
order_history = json.dumps(self.status_dict["deal_order_history"]) if write_deal_order_history else "" order_history = json.dumps(self.status_dict["deal_order_history"]) if write_deal_order_history else ""
dataset = (time.time(),self.pair,amount,self.broker.get_exchange_name(),str(orderid),order_history) dataset = (time.time(),self.pair,amount,self.broker.get_exchange_name(),str(orderid),order_history)
#Write profit to cache
self.broker.write_profit_to_cache(dataset)
return self.broker.write_profit_to_db(dataset) return self.broker.write_profit_to_db(dataset)
except Exception as e: except Exception as e:
self.broker.logger.log_this(f"Exception while writing profit: {e}",1,self.pair) self.broker.logger.log_this(f"Exception while writing profit: {e}",1,self.pair)