Skip to content

Commit

Permalink
only call klines_caching_service in backtesting
Browse files Browse the repository at this point in the history
  • Loading branch information
Azulinho committed Nov 21, 2022
1 parent 8761d95 commit 881f751
Showing 1 changed file with 153 additions and 12 deletions.
165 changes: 153 additions & 12 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
floor_value,
mean,
percent,
requests_with_backoff,
)


Expand Down Expand Up @@ -1840,19 +1841,21 @@ def backtesting(self) -> None:
def load_klines_for_coin(self, coin) -> bool:
"""fetches from binance or a local cache klines for a coin"""

ok: bool = False
data: dict = {}
# fetch all the available klines for this coin, for the last
# 60min, 24h, and 1000 days
ok: bool = False
# TODO: only run this in backtesting mode, not live or testnet
# in live/testnet, go directly to binance.
response = requests.get(
self.klines_caching_service_url
+ f"?symbol={coin.symbol}"
+ f"&date={coin.date}"
+ f"&mode={self.mode}"
+ f"&debug={self.debug}"
)
data = response.json()
if self.mode in ["testnet", "live"]:
data = self.fetch_klines_from_binance(coin.symbol, coin.date)
else:
response: requests.Response = requests.get(
self.klines_caching_service_url
+ f"?symbol={coin.symbol}"
+ f"&date={coin.date}"
+ f"&mode={self.mode}"
+ f"&debug={self.debug}"
)
data = response.json()
# TODO: rework this
if data:
ok = True
Expand All @@ -1863,7 +1866,7 @@ def load_klines_for_coin(self, coin) -> bool:
coin.highest = data["highest"]

# trim values
unit_values = {
unit_values: dict = {
"m": (60, 1),
"h": (24, 60),
# for 'Days' we retrieve 1000 days, binance API default
Expand All @@ -1881,6 +1884,144 @@ def load_klines_for_coin(self, coin) -> bool:

return ok

def fetch_klines_from_binance(self, symbol: str, date: int) -> dict:
"""fetches from binance"""

# when we initialise a coin, we pull a bunch of klines from binance.
# we pull klines for the last 60min, the last 24h, and the last 1000days

# url to pull klines data from
api_url: str = (
f"https://api.binance.com/api/v3/klines?symbol={symbol}&"
)

# build a dict to allows to calculate how far back in h,m,d we are going
# to pull klines data from
unit_values: dict = {
"m": (60, 1),
"h": (24, 60),
# for 'Days' we retrieve 1000 days, binance API default
"d": (1000, 60 * 24),
}

# build all the query strings we need to fetch data from binance
binance_query_strings: dict = {}
for unit in ["m", "h", "d"]:

# lets find out the from what date we need to pull klines from while in
# backtesting mode.
timeslice, minutes_before_now = unit_values[unit]

backtest_end_time = date
end_unix_time: int = int(
(backtest_end_time - (60 * minutes_before_now)) * 1000
)

query: str = f"{api_url}endTime={end_unix_time}&interval=1{unit}"
binance_query_strings[unit] = query

# now we need to initialize a temp buckets{} with the
# lowest[], averages[], highest[]
buckets: dict = {}
for bucket in ["lowest", "averages", "highest"]:
buckets[bucket] = {}
for unit in ["m", "h", "d", "s"]:
buckets[bucket][unit] = []

# now we need to query binance and populate our buckets dict
for unit in ["m", "h", "d"]:

# the call binance for list of klines for our loop var
# unit ('m', 'm', 'd')
ok, klines = self.call_binance_for_klines(
binance_query_strings[unit]
)
if ok:
# and get a dict with the lowest, averages, highest lists from those
# binance raw klines
ok, low_avg_high = self.populate_values(klines, unit)

if ok:
# we should now have a new dict containing list of our
# lowest, averages, highest values in low_avg_high
for bucket in ["lowest", "averages", "highest"]:
buckets[bucket][unit] = low_avg_high[bucket]
# we need to trim our lists, so that we don't keep more
# values that we should,
# like storing the last 1000 minutes
#
# keep 60 minutes on our minutes bucket
# 24 hours in our hours bucket
timeslice, _ = unit_values[unit]
while len(buckets[bucket][unit]) > timeslice:
buckets[bucket][unit].pop()
return buckets

def call_binance_for_klines(self, query):
"""calls upstream binance and retrieves the klines for a coin"""
logging.info(f"calling binance on {query}")
response = requests_with_backoff(query)
if response.status_code == 400:
# 400 typically means binance has no klines for this coin
logging.warning(f"got a 400 from binance for {query}")
return (True, [])
return (True, response.json())

def process_klines_line(self, kline):
"""returns date, low, avg, high from a kline"""
(_, _, high, low, _, _, closetime, _, _, _, _, _) = kline

date = float(c_from_timestamp(closetime / 1000).timestamp())
low = float(low)
high = float(high)
avg = (low + high) / 2

return date, low, avg, high

def populate_values(self, klines, unit) -> Tuple:
"""builds averages[], lowest[], highest[] out of klines"""
_lowest: list = []
_averages: list = []
_highest: list = []

# retrieve and calculate the lowest, highest, averages
# from the klines data.
# we need to transform the dates into consumable timestamps
# that work for our bot.
for line in klines:
date, low, avg, high = self.process_klines_line(line)
_lowest.append((date, low))
_averages.append((date, avg))
_highest.append((date, high))

# finally, populate all the data coin buckets
buckets: dict = {}
for metric in ["lowest", "averages", "highest"]:
buckets[metric] = []

unit_buckets: Dict[str, int] = {
"m": 60,
"h": 24,
# for 'Days' we retrieve 1000 days, binance API default
"d": 1000,
}

timeslice: int = unit_buckets[unit]
# we gather all the data we collected and only populate
# the required number of records we require.
# this could possibly be optimized, but at the same time
# this only runs the once when we initialise a coin
for d, v in _lowest[-timeslice:]:
buckets["lowest"].append((d, v))

for d, v in _averages[-timeslice:]:
buckets["averages"].append((d, v))

for d, v in _highest[-timeslice:]:
buckets["highest"].append((d, v))

return (True, buckets)

def print_final_balance_report(self):
"""calculates and outputs final balance"""

Expand Down

0 comments on commit 881f751

Please sign in to comment.