fetch pair volatility
This commit is contained in:
parent
36a8593dee
commit
13fe6cd00c
|
|
@ -0,0 +1,182 @@
|
||||||
|
'''
|
||||||
|
Downloads data from the exchange, calculates the volatilities and writes the results to a database.
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
import ccxt
|
||||||
|
import time
|
||||||
|
import sqlite3
|
||||||
|
import math
|
||||||
|
import statistics
|
||||||
|
from threading import Thread
|
||||||
|
|
||||||
|
|
||||||
|
def yang_zhang(candles):
|
||||||
|
'''
|
||||||
|
Calculates Yang-Zhang volatility
|
||||||
|
|
||||||
|
candles: list of candles in the form of [time, open, high, low, close, volume]
|
||||||
|
|
||||||
|
Yang, D., & Zhang, Q. (2000). Drift‐Independent Volatility Estimation Based on High, Low, Open, and Close Prices. The Journal of Business, 73(3), 477–492. https://doi.org/10.1086/209650
|
||||||
|
'''
|
||||||
|
normalized_open = []
|
||||||
|
normalized_high = []
|
||||||
|
normalized_down = []
|
||||||
|
normalized_close = []
|
||||||
|
rogers_satchell = []
|
||||||
|
|
||||||
|
#Calculating normalized values
|
||||||
|
for x in range(1,len(candles)):
|
||||||
|
normalized_open.append(math.log(candles[x][1]/candles[x-1][4]))
|
||||||
|
normalized_high.append(math.log(candles[x][2]/candles[x][1]))
|
||||||
|
normalized_down.append(math.log(candles[x][3]/candles[x][1]))
|
||||||
|
normalized_close.append(math.log(candles[x][4]/candles[x][1]))
|
||||||
|
rogers_satchell.append(normalized_high[x-1]*(normalized_high[x-1]-normalized_close[x-1])+normalized_down[x-1]*(normalized_down[x-1]-normalized_close[x-1]))
|
||||||
|
|
||||||
|
#Calculating volatilities
|
||||||
|
rs_volatility = math.sqrt(sum(rogers_satchell)/(len(rogers_satchell)))
|
||||||
|
no_volatility = statistics.stdev(normalized_open)
|
||||||
|
nc_volatility = statistics.stdev(normalized_close)
|
||||||
|
|
||||||
|
#Calculate constant k
|
||||||
|
k = 0.34/(1.34+((len(candles)+1)/(len(candles)-1)))
|
||||||
|
|
||||||
|
return math.sqrt(no_volatility**2+k*nc_volatility**2+(1-k)*rs_volatility**2)
|
||||||
|
|
||||||
|
|
||||||
|
def parkinson(candles):
|
||||||
|
'''
|
||||||
|
Calculates Parkison volatility.
|
||||||
|
|
||||||
|
candles: list of lists with the format [timestamp,open,high,low,close,volume]
|
||||||
|
|
||||||
|
Parkinson, M. (1980) The Extreme Value Method for Estimating the Variance of the Rate of Return. Journal of Business, 53, 61-65. https://doi.org/10.1086/296071
|
||||||
|
'''
|
||||||
|
|
||||||
|
logsquared = [math.log(float(x[2])/float(x[3]))**2 for x in candles]
|
||||||
|
avg = sum(logsquared)/len(logsquared)
|
||||||
|
return math.sqrt(avg/(4*math.log(2)))
|
||||||
|
|
||||||
|
|
||||||
|
def write_to_db(broker,data):
|
||||||
|
"""
|
||||||
|
Write data to database
|
||||||
|
"""
|
||||||
|
|
||||||
|
database_connection = sqlite3.connect(f"data/{broker}.db")
|
||||||
|
database_cursor = database_connection.cursor()
|
||||||
|
for item in data:
|
||||||
|
database_cursor.execute('INSERT INTO volatilities_table VALUES(?, ?, ?, ?, ?)', [item, int(time.time()), data[item][0], data[item][1], data[item][2]])
|
||||||
|
database_connection.commit()
|
||||||
|
database_connection.close()
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def get_pair_list(broker, inclusions = ["/USDT"], exclusions = []):
|
||||||
|
"""
|
||||||
|
Get the list of pairs from exchange
|
||||||
|
"""
|
||||||
|
|
||||||
|
exchange = getattr(ccxt, broker)
|
||||||
|
exchange = exchange({
|
||||||
|
"apiKey": "",
|
||||||
|
"secret": "",
|
||||||
|
"password": "",
|
||||||
|
"timeout": 30000,
|
||||||
|
"enableRateLimit": True
|
||||||
|
})
|
||||||
|
|
||||||
|
pair_list = [pair for pair in exchange.fetch_tickers().keys()]
|
||||||
|
|
||||||
|
for inclusion in inclusions:
|
||||||
|
for pair in pair_list.copy():
|
||||||
|
if inclusion not in pair:
|
||||||
|
pair_list.remove(pair)
|
||||||
|
|
||||||
|
for exclusion in exclusions:
|
||||||
|
for pair in pair_list.copy():
|
||||||
|
if exclusion in pair:
|
||||||
|
pair_list.remove(pair)
|
||||||
|
|
||||||
|
return pair_list
|
||||||
|
|
||||||
|
|
||||||
|
def fetch_data(broker: str, pair_list: list, timeframe: str, samples: int):
|
||||||
|
"""
|
||||||
|
Fetch data from exchange
|
||||||
|
"""
|
||||||
|
|
||||||
|
global volatilities
|
||||||
|
|
||||||
|
wait_time = .25 #Sleep time between requests
|
||||||
|
index = 0
|
||||||
|
|
||||||
|
exchange = getattr(ccxt, broker)
|
||||||
|
exchange = exchange({
|
||||||
|
"apiKey": "",
|
||||||
|
"secret": "",
|
||||||
|
"password": "",
|
||||||
|
"timeout": 30000,
|
||||||
|
"enableRateLimit": True
|
||||||
|
})
|
||||||
|
|
||||||
|
for pair in pair_list:
|
||||||
|
trading_volume = 0
|
||||||
|
index += 1
|
||||||
|
try:
|
||||||
|
data = exchange.fetch_ohlcv(pair,timeframe=timeframe,limit=samples)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
parkinson_volatility = parkinson(data)
|
||||||
|
yangzhang_volatility = yang_zhang(data)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
continue
|
||||||
|
for item in data:
|
||||||
|
trading_volume += item[4]*item[5]
|
||||||
|
volatilities[broker][pair] = [round(yangzhang_volatility*100,2),round(parkinson_volatility*100,2),int(trading_volume)]
|
||||||
|
print(f"{pair} on {broker} ready, {len(pair_list)-index} pairs remaining.")
|
||||||
|
time.sleep(wait_time)
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
if __name__=="__main__":
|
||||||
|
|
||||||
|
threads = []
|
||||||
|
exchanges = ["binance","gateio","kucoin","okx"]
|
||||||
|
pair_list = []
|
||||||
|
volatilities = {item:{} for item in exchanges}
|
||||||
|
exchange_list = [item for item in volatilities]
|
||||||
|
samples = 288
|
||||||
|
timeframe = "5m"
|
||||||
|
minimum_volume = 0
|
||||||
|
|
||||||
|
#Create databases for each exchange
|
||||||
|
for item in exchange_list:
|
||||||
|
database_connection = sqlite3.connect(f"data/{item}.db")
|
||||||
|
database_cursor = database_connection.cursor()
|
||||||
|
database_cursor.execute('''
|
||||||
|
CREATE TABLE IF NOT EXISTS volatilities_table (
|
||||||
|
pair TEXT,
|
||||||
|
timestamp INTEGER,
|
||||||
|
yang_zhang REAL,
|
||||||
|
parkinson REAL,
|
||||||
|
volume REAL)''')
|
||||||
|
database_connection.commit()
|
||||||
|
database_connection.close()
|
||||||
|
|
||||||
|
|
||||||
|
for broker in exchange_list:
|
||||||
|
threads.append(Thread(target=fetch_data,args=(broker, get_pair_list(broker), timeframe, samples,)))
|
||||||
|
|
||||||
|
for thread in threads:
|
||||||
|
thread.start()
|
||||||
|
for thread in threads:
|
||||||
|
thread.join()
|
||||||
|
|
||||||
|
for item in exchange_list:
|
||||||
|
write_to_db(item,volatilities[item])
|
||||||
|
|
||||||
Loading…
Reference in New Issue