Skip to content

Commit

Permalink
Merge pull request #211 from Azulinho/next_release
Browse files Browse the repository at this point in the history
Next release
  • Loading branch information
Azulinho authored Jun 24, 2023
2 parents cd73754 + f92aaeb commit fdcd3a6
Show file tree
Hide file tree
Showing 5 changed files with 72 additions and 37 deletions.
4 changes: 2 additions & 2 deletions lib/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
import udatetime
from binance.client import Client
from filelock import SoftFileLock
from tenacity import retry, wait_exponential
from tenacity import retry, wait_fixed, stop_after_delay


def mean(values: list[float]) -> float:
Expand Down Expand Up @@ -44,7 +44,7 @@ def c_from_timestamp(date: float) -> datetime:
return datetime.fromtimestamp(date)


@retry(wait=wait_exponential(multiplier=1, max=3))
@retry(wait=wait_fixed(2), stop=stop_after_delay(10))
def cached_binance_client(access_key: str, secret_key: str) -> Client:
"""retry wrapper for binance client first call"""

Expand Down
12 changes: 9 additions & 3 deletions run
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ function usage() {
echo "./run config-endpoint-service BIND=0.0.0.0 CONFIG_FILE=myconfig.yaml"
echo "./run klines-caching-service BIND=0.0.0.0"
echo "./run price_log_service BIND=0.0.0.0"
echo "./run download_price_logs FROM=20220101 TO=20220131"
echo "./run download_price_logs FROM=20220101 TO=20220131 UNIT=1m"
}

function free_port () { # looks for a free TCP port
Expand Down Expand Up @@ -72,6 +72,10 @@ function download_price_logs() { # downloads klines logs fro binance
exit 1
fi

if [ -z "$UNIT" ]; then
export UNIT="1m"
fi

docker run --rm \
${USE_TTY} \
${DOCKER_RUN_AS} \
Expand All @@ -81,7 +85,7 @@ function download_price_logs() { # downloads klines logs fro binance
${RUN_IN_BACKGROUND} \
${IMAGE}:${TAG} \
/cryptobot/.venv/bin/python -u /cryptobot/utils/pull_klines.py \
-s ${FROM} -e ${TO}
-s ${FROM} -e ${TO} -u ${UNIT}
}

function docker_network() { # creates a docker network
Expand Down Expand Up @@ -307,9 +311,11 @@ function github_actions_ci_pr_docker_tests() {
cat tests/price.log.gz | grep BTCUSDT | grep 2021-12-${ta} |gzip -1 > log/BTCUSDT/202112${ta}.log.gz
done
cp tests/index.json.gz log/
cp tests/index_v2.json.gz log/

export PRICE_LOG_PORT=$( cat ${STATE_DIR}/.price_log_service.port)
curl --output /dev/null http://${DOCKER_IP}:${PRICE_LOG_PORT}/index.json.gz
curl --output /dev/null http://${DOCKER_IP}:${PRICE_LOG_PORT}/index_v2.json.gz

echo BuyMoonSellRecoveryStrategy.yaml
cp tests/BuyMoonSellRecoveryStrategy.yaml configs/
Expand Down Expand Up @@ -352,7 +358,7 @@ function github_actions_ci_pr_docker_tests() {
TAG=pr CONFIG_FILE=prove-backtesting.yaml

wc -l results/prove-backtesting.prove-backtesting.yaml.txt \
| grep '29'
| grep '44'

for ta in 01 02 03 04 05 06 07 08 09
do
Expand Down
Binary file added tests/index_v2.json.gz
Binary file not shown.
17 changes: 6 additions & 11 deletions utils/prove-backtesting.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,10 +16,10 @@
import pandas as pd
import requests
import yaml
from tenacity import retry, wait_exponential
from tenacity import retry, wait_fixed, stop_after_delay


@retry(wait=wait_exponential(multiplier=2, min=1, max=30))
@retry(wait=wait_fixed(2), stop=stop_after_delay(10))
def get_index_json(query: str) -> requests.Response:
"""retry wrapper for requests calls"""
response: requests.Response = requests.get(query, timeout=5)
Expand Down Expand Up @@ -455,17 +455,18 @@ def write_all_coin_configs(
"""generate all coinfiles"""

r: requests.Response = get_index_json(
f"{self.price_log_service_url}/index.json.gz"
f"{self.price_log_service_url}/index_v2.json.gz"
)
index: Any = json.loads(r.content)
index_dates = index["DATES"]

next_run_coins: Dict[str, Any] = self.filter_on_avail_days_with_log(
dates, index
dates, index_dates
)

if self.enable_new_listing_checks:
next_run_coins = self.filter_on_coins_with_min_age_logs(
index, dates[-1], next_run_coins
index_dates, dates[-1], next_run_coins
)
for coin, _price_logs in next_run_coins.items():
self.write_single_coin_config(coin, _price_logs, thisrun)
Expand Down Expand Up @@ -776,12 +777,6 @@ def run_optimized_config(self, s_investment: float) -> float:
price_logs = pv.generate_price_log_list(rollforward_dates)
tickers = pv.gather_best_results_from_backtesting_log("coincfg")

# if our backtesting gave us no tickers,
# we'll skip this forward testing run
if not tickers:
log_msg("forwardtesting config contains no tickers, skipping run")
continue

log_msg(
f"now forwardtesting {rollforward_dates[0]}...{rollforward_dates[-1]}"
)
Expand Down
76 changes: 55 additions & 21 deletions utils/pull_klines.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,52 +49,86 @@ def daterange(date1, date2):
return dates


def generate_index(log_dir="log"):
"""generates index.json with dates <- [coins]"""
def gather_symbols_and_logs(log_dir="log") -> tuple[set[str], set[str]]:
"""returns lists of symbols and dates"""
date_list = set()
symbols_list = set()
index = {}

# gather all date.log.gz logs and
# all symbol dirs
for item in sorted(os.listdir(log_dir)):
for dir_item in sorted(os.listdir(log_dir)):
if (
os.path.isfile(f"{log_dir}/{item}")
and item.startswith("20")
and ".log." in item
os.path.isfile(f"{log_dir}/{dir_item}")
and dir_item.startswith("20")
and dir_item.endswith(".log.gz")
):
date = item.split(".")[0]
date: str = dir_item.split(".")[0]
date_list.add(date)
if os.path.isdir(f"{log_dir}/{item}"):
symbols_list.add(item)
if os.path.isdir(f"{log_dir}/{dir_item}"):
symbols_list.add(dir_item)

return (set(symbols_list), set(date_list))


def gather_symbols_per_date(
log_dir, symbols_list, date_list
) -> dict[str, list[str]]:
"""returns map of dates containing symbols available on that date"""
dates_idx: dict[str, list[str]] = {}

# we'll store all symbol logs in each date
for date in sorted(date_list):
index[date] = set()
if date not in dates_idx:
dates_idx[date] = []

# iterate over all the symbols and gather all the
# logfiles in in each one of those symbol dirs
for _symbol in sorted(symbols_list):
logs = os.listdir(f"{log_dir}/{_symbol}")
logs: list[str] = os.listdir(f"{log_dir}/{_symbol}")
for _log in sorted(logs):
if not os.path.isfile(f"{log_dir}/{_symbol}/{_log}"):
continue
date = _log.split(".")[0]
index[date].add(_symbol)
_date: str = _log.split(".")[0]
dates_idx[_date].append(_symbol)
return dates_idx

tmp = index
index = {}
for date in tmp.keys(): # pylint: disable=C0206,C0201
index[date] = list(tmp[date])

def generate_index(log_dir="log") -> None:
"""generates index.json with dates <- [coins]"""

print("generating index...")
symbols_list, date_list = gather_symbols_and_logs(log_dir)

dates_index: dict[str, list[str]] = gather_symbols_per_date(
log_dir, symbols_list, date_list
)

# generate index_v1
print("writing index.json.gz...")

with gzip.open(
f"{log_dir}/index.json.gz", "wt", encoding="utf-8"
) as index_json:
index_json.write(json.dumps(dates_index, indent=4))

# generate index_v2
print("generating index_v2.json.gz...")
index: dict[str, dict] = {"DATES": {}, "COINS": {}}
for date in dates_index.keys(): # pylint: disable=C0206,C0201
index["DATES"][date] = list(dates_index[date])

for _symbol in sorted(os.listdir(log_dir)):
if os.path.isdir(f"{log_dir}/{_symbol}"):
logs: list[str] = os.listdir(f"{log_dir}/{_symbol}")
index["COINS"][_symbol] = sorted(logs)

print("writing index_v2.json.gz...")
with gzip.open(
f"{log_dir}/index_v2.json.gz", "wt", encoding="utf-8"
) as index_json:
index_json.write(json.dumps(index, indent=4))


if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser: argparse.ArgumentParser = argparse.ArgumentParser()
parser.add_argument("-s", "--start", help="start day to fetch klines for")
parser.add_argument(
"-e", "--end", help="end day to fetch klines for", required=False
Expand Down

0 comments on commit fdcd3a6

Please sign in to comment.