Skip to content

Commit

Permalink
Merge pull request #57 from username-anthony-is-not-available/combine…
Browse files Browse the repository at this point in the history
…d_rsi_bollinger_strategy_and_more

Genius Yield’s Trading Strategies Competition - Combined rsi bollinger strategy (incl. optional Fear & Greed Index + extras)
  • Loading branch information
4TT1L4 authored Aug 4, 2024
2 parents e4619c9 + 465fa33 commit 223bd9b
Show file tree
Hide file tree
Showing 27 changed files with 1,722 additions and 353 deletions.
19 changes: 19 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
{
"cSpell.words": [
"addstrategy",
"ALTCOINS",
"arange",
"backtrader",
"Bollinger",
"Cardano",
"cerebro",
"Doji",
"fgcior",
"fgis",
"Keltner",
"lovelaces",
"talipp",
"tgens"
],
"pylint.args": ["--max-line-length=120", "--disable=missing-module-docstring"]
}
24 changes: 20 additions & 4 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,25 @@ WORKDIR /app
# Install dependencies
RUN apt-get update && apt-get install -y \
dos2unix \
wget \
unzip \
curl \
gnupg \
--no-install-recommends

# Install Chrome
RUN curl -fsSL https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add - && \
echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" > /etc/apt/sources.list.d/google-chrome.list && \
apt-get update && \
apt-get install -y google-chrome-stable && \
rm -rf /var/lib/apt/lists/*

# Download ChromeDriver
RUN CHROME_DRIVER_VERSION=`curl -sS chromedriver.storage.googleapis.com/LATEST_RELEASE` && \
wget -O /tmp/chromedriver.zip "https://chromedriver.storage.googleapis.com/${CHROME_DRIVER_VERSION}/chromedriver_linux64.zip" && \
unzip /tmp/chromedriver.zip -d /usr/local/bin/ && \
rm /tmp/chromedriver.zip

--no-install-recommends

# Create a non-privileged user that the app will run under.
Expand All @@ -22,9 +41,7 @@ ARG UID=10001
RUN adduser \
--disabled-password \
--gecos "" \
--home "/nonexistent" \
--shell "/sbin/nologin" \
--no-create-home \
--uid "${UID}" \
appuser

Expand All @@ -38,7 +55,7 @@ RUN --mount=type=cache,target=/root/.cache/pip \

# Copy the source code into the container.
COPY *.py .
COPY strategies/* strategies/
COPY src/ src/
COPY bot-api.yaml .
COPY requirements.txt .
COPY .flaskenv .
Expand All @@ -47,7 +64,6 @@ COPY .flaskenv .
COPY *.sh .
RUN dos2unix *.sh
RUN chmod +x *.sh

RUN /bin/bash -c /app/generate_client.sh

# Expose the port that the application listens on.
Expand Down
8 changes: 8 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,14 @@ start-b:
docker compose up -d --build strategy_b
docker compose logs -f strategy_b

start-c:
docker compose up -d --build strategy_c
docker compose logs -f strategy_c

start-fgis:
docker compose up -d --build fear_and_greed_index_strategy
docker compose logs -f fear_and_greed_index_strategy

start-bb:
docker compose up -d --build bollinger_bands_strategy
docker compose logs -f bollinger_bands_strategy
Expand Down
28 changes: 14 additions & 14 deletions app.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,22 @@
from client import AuthenticatedClient
from client.models import ErrorResponse, Settings
from client.api.settings import get_settings
from client.types import Response
from typing import cast, Union
from flask import Flask, jsonify
import threading
import time
import importlib
import logging
import os
import sys
import importlib
import yaml
import threading
import time
from api import Api
from api import ApiException
from datetime import datetime
import logging
from typing import Union, cast

import yaml
from client import AuthenticatedClient
from client.api.settings import get_settings
from client.models import ErrorResponse, Settings
from client.types import Response
from flask import Flask, jsonify
from flask_wtf.csrf import CSRFProtect

from api import Api, ApiException

# Spin up Flask Application
app = Flask(__name__)

Expand Down Expand Up @@ -48,7 +48,7 @@ def health_check():
return jsonify(status='healthy', message='Service is up and running!')

def load_strategy(strategy_class):
module = importlib.import_module(f".{strategy_class}", ".strategies")
module = importlib.import_module(f".{strategy_class}", ".src.strategies")
if hasattr(module, 'init'):
module.init()
strategy_class_ref = getattr(module, strategy_class)
Expand Down
51 changes: 47 additions & 4 deletions compose.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ services:
coreProvider:
maestroToken: <<CORE_MAESTRO_API_KEY>>
turboSubmit: false
networkId: "mainnet" # supported: mainnet ot preprod
networkId: "mainnet" # supported: mainnet or preprod
logging:
- type: {tag: stderr}
severity: "Debug" # Options: Debug, Info, Warning or Error
Expand All @@ -33,7 +33,7 @@ services:
environment:
BACKEND_URL: http://server:8082
SERVER_API_KEY: ${SERVER_API_KEY}
EXECUTION_DELAY: 90 # Time period in seconds to wait between strategy exeuctions
EXECUTION_DELAY: 90 # Time period in seconds to wait between strategy executions
STARTUP_DELAY: 1 # Time period in seconds to wait for the backend to start
RETRY_DELAY: 20 # Time period in seconds to wait before retrying to reach the backend
CONFIRMATION_DELAY: 90
Expand All @@ -49,7 +49,7 @@ services:
environment:
BACKEND_URL: http://server:8082
SERVER_API_KEY: ${SERVER_API_KEY}
EXECUTION_DELAY: 15 # Time period in seconds to wait between strategy exeuctions
EXECUTION_DELAY: 15 # Time period in seconds to wait between strategy executions
STARTUP_DELAY: 1 # Time period in seconds to wait for the backend to start
RETRY_DELAY: 20 # Time period in seconds to wait before retrying to reach the backend
CONFIRMATION_DELAY: 90
Expand All @@ -59,13 +59,54 @@ services:
setting_2: 567 ms
depends_on:
- server
strategy_c:
build:
context: .
environment:
BACKEND_URL: http://server:8082
SERVER_API_KEY: ${SERVER_API_KEY}
EXECUTION_DELAY: 30 # Time period in seconds to wait between strategy executions
STARTUP_DELAY: 1 # Time period in seconds to wait for the backend to start
RETRY_DELAY: 20 # Time period in seconds to wait before retrying to reach the backend
CONFIRMATION_DELAY: 90
STRATEGY: strategy_c
CONFIG: |
ASSET_PAIR: "asset1266q2ewhgul7jh3xqpvjzqarrepfjuler20akr-asset1xdz4yj4ldwlpsz2yjgjtt9evg9uskm8jrzjwhj"
START_TIME: "2023-06-15T19:19:56.462Z"
END_TIME: "2024-06-15T19:19:56.462Z"
BIN_INTERVAL: "1d"
depends_on:
- server
fear_and_greed_index_strategy:
build:
context: .
environment:
BACKEND_URL: http://server:8082
SERVER_API_KEY: ${SERVER_API_KEY}
EXECUTION_DELAY: 60 # Time period in seconds to wait between strategy executions
STARTUP_DELAY: 1 # Time period in seconds to wait for the backend to start
RETRY_DELAY: 20 # Time period in seconds to wait before retrying to reach the backend
CONFIRMATION_DELAY: 90
STRATEGY: fear_and_greed_index_strategy
CONFIG: |
BASE_ASSET: lovelace
# GENS for MAINNET:
TARGET_ASSET: dda5fdb1002f7389b33e036b6afee82a8189becb6cba852e8b79b4fb.0014df1047454e53
# tGENS for PRERPOD:
# TARGET_ASSET: c6e65ba7878b2f8ea0ad39287d3e2fd256dc5c4160fc19bdf4c4d87e.7447454e53
POSITION_SIZE_LOVELACES: 1000000
STD_DEV_MULTIPLIER: 1.5
PERIOD: 5
FEAR_AND_GREED_INDEX_THRESHOLD: 60
depends_on:
- server
bollinger_bands_strategy:
build:
context: .
environment:
BACKEND_URL: http://server:8082
SERVER_API_KEY: ${SERVER_API_KEY}
EXECUTION_DELAY: 20 # Time period in seconds to wait between strategy exeuctions
EXECUTION_DELAY: 20 # Time period in seconds to wait between strategy executions
STARTUP_DELAY: 1 # Time period in seconds to wait for the backend to start
RETRY_DELAY: 20 # Time period in seconds to wait before retrying to reach the backend
CONFIRMATION_DELAY: 90
Expand Down Expand Up @@ -104,5 +145,7 @@ services:
RSI_OVERSOLD: 35 # Higher threshold for oversold
BB_PERIOD: 20 # Standard period
BB_STD_DEV: 1.8 # Tighter bands for volatility
USE_FEAR_AND_GREED: true
FEAR_AND_GREED_INDEX_THRESHOLD: 60
depends_on:
- server
6 changes: 5 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
backtrader==1.9.78.123
beautifulsoup4==4.12.3
flask==3.0
Flask-WTF==1.2.1
gunicorn==22.0.0
openapi-python-client==0.19.1
selenium==4.21.0
talipp==2.2.0
Flask-WTF==1.2.1
webdriver-manager==4.0.1
File renamed without changes.
Empty file added src/data_extraction/__init__.py
Empty file.
138 changes: 138 additions & 0 deletions src/data_extraction/fear_and_greed_index_web_scraper.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
import logging
import re
from typing import Optional

from bs4 import BeautifulSoup
from selenium import webdriver
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from webdriver_manager.chrome import ChromeDriverManager


class ScraperException(Exception):
"""Custom exception for scraper-related errors."""


class FearAndGreedIndexWebScraper:
"""
A scraper for the Cardano Fear and Greed Index.
This class provides functionality to scrape the Cardano Fear and Greed Index
from a specified URL using Selenium WebDriver and BeautifulSoup.
Attributes:
URL (str): The URL of the Cardano Fear and Greed Index page.
CLASS_SELECTOR (str): The CSS class selector for the index value element.
WEB_DRIVER_WAIT_DEFAULT_TIMEOUT (int): Default timeout for WebDriverWait.
"""

URL: str = 'https://cfgi.io/cardano-fear-greed-index/'
# cspell:disable-next-line
CLASS_SELECTOR: str = 'apexcharts-datalabel-value'
WEB_DRIVER_WAIT_DEFAULT_TIMEOUT: int = 15

def __init__(self, logger: logging.Logger, web_driver_wait_timeout: Optional[int] = None):
"""
Initialize the FearGreedIndexScraper.
Args:
logger (logging.Logger): Logger object for logging messages.
web_driver_wait_timeout (Optional[int]):
Timeout for WebDriverWait. Defaults to WEB_DRIVER_WAIT_DEFAULT_TIMEOUT.
"""
self.logger: logging.Logger = logger
self.index_value: Optional[str] = None
self.web_driver_wait_timeout: int = web_driver_wait_timeout or self.WEB_DRIVER_WAIT_DEFAULT_TIMEOUT
self.driver: webdriver.Chrome = self._init_driver()

def _init_driver(self) -> webdriver.Chrome:
"""
Initialize and return a Chrome WebDriver.
Returns:
webdriver.Chrome: An instance of Chrome WebDriver.
"""
options = webdriver.ChromeOptions()
options.add_argument('--headless')
options.add_argument('--no-sandbox')
options.add_argument('--disable-dev-shm-usage')

return webdriver.Chrome(service=Service(ChromeDriverManager().install()), options=options)

def fetch_page_content(self) -> str:
"""
Fetch the page content from the URL.
Returns:
str: The HTML content of the page.
Raises:
ScraperException: If there's an error fetching the page content.
"""
try:
self.driver.get(self.URL)
WebDriverWait(self.driver, self.web_driver_wait_timeout).until(
EC.presence_of_element_located((By.CLASS_NAME, self.CLASS_SELECTOR))
)
return self.driver.page_source
except Exception as e:
self.logger.error(f"Error fetching page content: {e}")
raise ScraperException(f"Failed to fetch page content: {e}") from e

def parse_index_value(self, html_content: str) -> None:
"""
Parse the index value from the HTML content.
Args:
html_content (str): The HTML content to parse.
Raises:
ScraperException: If there's an error parsing the HTML content or if the index element is not found.
"""
try:
soup = BeautifulSoup(html_content, 'html.parser')
index_element = soup.find(class_=self.CLASS_SELECTOR)
if index_element:
self.index_value = index_element.get_text(strip=True)
self.logger.info(f"Successfully parsed index value: {self.index_value}")
else:
raise ScraperException("Could not find the Fear and Greed Index element on the page.")
except Exception as e:
self.logger.error(f"Error parsing HTML content: {e}")
raise ScraperException(f"Failed to parse HTML content: {e}") from e

@staticmethod
def extract_number(percentage_str: str) -> Optional[int]:
"""
Extract the numeric value from a percentage string.
Args:
percentage_str (str): The percentage string to extract the number from.
Returns:
Optional[int]: The extracted number as an integer, or None if no number is found.
"""
match = re.search(r'\d+', percentage_str)
return int(match.group()) if match else None

def get_index_value(self) -> Optional[int]:
"""
Get the Fear and Greed Index value.
Returns:
Optional[int]: The Fear and Greed Index value as an integer, or None if the value couldn't be retrieved.
"""
try:
with self.driver: # Use context manager for proper cleanup
html_content = self.fetch_page_content()
if html_content:
self.parse_index_value(html_content)
return self.extract_number(self.index_value) if self.index_value else None
except ScraperException as e:
self.logger.error(f"Scraper error: {e}")
return None
finally:
if self.driver:
self.driver.quit()
Loading

0 comments on commit 223bd9b

Please sign in to comment.