diff --git a/.github/workflows/publish_docker.yaml b/.github/workflows/publish_docker.yaml index 6d52e8a0..cc7ae4be 100644 --- a/.github/workflows/publish_docker.yaml +++ b/.github/workflows/publish_docker.yaml @@ -11,7 +11,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout the repository - uses: actions/checkout@v3 + uses: actions/checkout@v4 - name: Docker meta id: meta uses: docker/metadata-action@v4 diff --git a/.github/workflows/python-test.yml b/.github/workflows/python-test.yml index 0bb5c307..9fb31cd7 100644 --- a/.github/workflows/python-test.yml +++ b/.github/workflows/python-test.yml @@ -22,10 +22,10 @@ jobs: PYTHON: ${{ matrix.python-version }} steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} diff --git a/.vscode/launch.json b/.vscode/launch.json index d2306e04..7dfcb36e 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,20 +1,28 @@ { "configurations": [ { - "name": "EMHASS run", - "type": "python", - "request": "launch", - "program": "web_server.py", - "console": "integratedTerminal", - "cwd": "${workspaceFolder}/src/emhass/", - "purpose":["debug-in-terminal"], - "justMyCode": true, - "env": { - "CONFIG_PATH": "/workspaces/emhass/config_emhass.yaml", - "OPTIONS_PATH": "/workspaces/emhass/options.json", - "SECRETS_PATH": "/workspaces/emhass/secrets_emhass.yaml", - "DATA_PATH": "/workspaces/emhass/data/", - } + "name": "Python: Current File", + "type": "python", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal", + "justMyCode": true + }, + { + "name": "EMHASS run", + "type": "python", + "request": "launch", + "program": "web_server.py", + "console": "integratedTerminal", + "cwd": "${workspaceFolder}/src/emhass/", + "purpose":["debug-in-terminal"], + "justMyCode": true, + "env": { + "CONFIG_PATH": "/workspaces/emhass/config_emhass.yaml", + "OPTIONS_PATH": "/workspaces/emhass/options.json", + "SECRETS_PATH": "/workspaces/emhass/secrets_emhass.yaml", + "DATA_PATH": "/workspaces/emhass/data/", + } }, { "name": "EMHASS run ADDON", diff --git a/CHANGELOG.md b/CHANGELOG.md index 7ac6df8d..3f880600 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,19 @@ # Changelog +## [0.7.0] - 2024-01-28 +### Improvement +- Added a new feature to provide operating time windows for deferrable loads. Thanks to @michaelpiron +- Added lots of new options to be configured by the user. Thanks to @GeoDerp +- Updated stylesheet with mobile & dark theme by @GeoDerp +- Improved launch.json to fully test EMHASS on different configurations. Thanks to @GeoDerp +- Added new script to debug and develop new time series clustering feature +- Improved documentation. Thanks to @g1za +### Fix +- Updated github workflow actions/checkout to v4 and actions/setup-python to v5 +- Changed default values for weight_battery_discharge and weight_battery_charge to zero +- Renamed classes to conform to PEP8 +- Bump markupsafe from 2.1.3 to 2.1.4 + ## [0.6.2] - 2024-01-04 ### Improvement - Added option to pass additional weight for battery usage diff --git a/config_emhass.yaml b/config_emhass.yaml index f1595dfd..621d1a9c 100644 --- a/config_emhass.yaml +++ b/config_emhass.yaml @@ -58,8 +58,8 @@ optim_conf: set_battery_dynamic: False # add a constraint to limit the dynamic of the battery power in power per time unit battery_dynamic_max: 0.9 # maximum dynamic positive power variation in percentage of battery maximum power battery_dynamic_min: -0.9 # minimum dynamic negative power variation in percentage of battery maximum power - weight_battery_discharge: 1.0 # weight applied in cost function to battery usage for discharge - weight_battery_charge: 1.0 # weight applied in cost function to battery usage for charge + weight_battery_discharge: 0.0 # weight applied in cost function to battery usage for discharge + weight_battery_charge: 0.0 # weight applied in cost function to battery usage for charge plant_conf: P_grid_max: 9000 # The maximum power that can be supplied by the utility grid in Watts diff --git a/docs/conf.py b/docs/conf.py index f37f20e6..6b06bc42 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -22,7 +22,7 @@ author = 'David HERNANDEZ' # The full version, including alpha/beta/rc tags -release = '0.6.2' +release = '0.7.0' # -- General configuration --------------------------------------------------- diff --git a/requirements.txt b/requirements.txt index bdd51541..1a84a681 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,7 +1,7 @@ wheel numpy<=1.26.0 -pandas<=2.0.3 scipy<=1.11.3 +pandas<=2.0.3 pvlib>=0.10.2 protobuf>=3.0.0 pytz>=2021.1 diff --git a/scripts/load_clustering.py b/scripts/load_clustering.py index 425953f8..a586fa75 100644 --- a/scripts/load_clustering.py +++ b/scripts/load_clustering.py @@ -9,8 +9,8 @@ pio.renderers.default = 'browser' pd.options.plotting.backend = "plotly" -from emhass.retrieve_hass import retrieve_hass -from emhass.forecast import forecast +from emhass.retrieve_hass import RetrieveHass +from emhass.forecast import Forecast from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger from sklearn.cluster import KMeans @@ -55,7 +55,7 @@ else: logger.info("Using EMHASS methods to retrieve the new forecast model train data") retrieve_hass_conf, _, _ = get_yaml_parse(pathlib.Path(root+'/config_emhass.yaml'), use_secrets=True) - rh = retrieve_hass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], + rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], params, root, logger, get_data_from_file=False) diff --git a/scripts/load_forecast_sklearn.py b/scripts/load_forecast_sklearn.py index 4ba84d3a..df652703 100644 --- a/scripts/load_forecast_sklearn.py +++ b/scripts/load_forecast_sklearn.py @@ -8,8 +8,8 @@ pio.renderers.default = 'browser' pd.options.plotting.backend = "plotly" -from emhass.retrieve_hass import retrieve_hass -from emhass.forecast import forecast +from emhass.retrieve_hass import RetrieveHass +from emhass.forecast import Forecast from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger from sklearn.linear_model import LinearRegression @@ -63,7 +63,7 @@ def neg_r2_score(y_true, y_pred): else: logger.info("Using EMHASS methods to retrieve the new forecast model train data") retrieve_hass_conf, _, _ = get_yaml_parse(pathlib.Path(root+'/config_emhass.yaml'), use_secrets=True) - rh = retrieve_hass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], + rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], params, root, logger, get_data_from_file=False) @@ -240,14 +240,14 @@ def neg_r2_score(y_true, y_pred): # Let's perform a naive load forecast for comparison retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(pathlib.Path(root+'/config_emhass.yaml'), use_secrets=True) - fcst = forecast(retrieve_hass_conf, optim_conf, plant_conf, + fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, params, root, logger) P_load_forecast = fcst.get_load_forecast(method='naive') # Then retrieve some data and perform a prediction mocking a production env - rh = retrieve_hass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], - params, root, logger, get_data_from_file=False) + rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], + retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], + params, root, logger, get_data_from_file=False) days_list = get_days_list(days_needed) var_model = retrieve_hass_conf['var_load'] diff --git a/scripts/optim_results_analysis.py b/scripts/optim_results_analysis.py index 91147679..2788829b 100644 --- a/scripts/optim_results_analysis.py +++ b/scripts/optim_results_analysis.py @@ -9,9 +9,9 @@ pio.renderers.default = 'browser' pd.options.plotting.backend = "plotly" -from emhass.retrieve_hass import retrieve_hass -from emhass.optimization import optimization -from emhass.forecast import forecast +from emhass.retrieve_hass import RetrieveHass +from emhass.optimization import Optimization +from emhass.forecast import Forecast from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger # the root folder @@ -21,14 +21,14 @@ def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, params, get_data_from_file): - fcst = forecast(retrieve_hass_conf, optim_conf, plant_conf, + fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, params, root, logger, get_data_from_file=get_data_from_file) df_weather = fcst.get_weather_forecast(method=optim_conf['weather_forecast_method']) P_PV_forecast = fcst.get_power_from_weather(df_weather) P_load_forecast = fcst.get_load_forecast(method=optim_conf['load_forecast_method']) df_input_data_dayahead = pd.concat([P_PV_forecast, P_load_forecast], axis=1) df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast'] - opt = optimization(retrieve_hass_conf, optim_conf, plant_conf, + opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf, fcst.var_load_cost, fcst.var_prod_price, 'profit', root, logger) return fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt @@ -41,9 +41,9 @@ def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(pathlib.Path(root+'/config_emhass.yaml'), use_secrets=False) retrieve_hass_conf, optim_conf, plant_conf = \ retrieve_hass_conf, optim_conf, plant_conf - rh = retrieve_hass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], - params, root, logger) + rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], + retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], + params, root, logger) if get_data_from_file: with open(pathlib.Path(root+'/data/test_df_final.pkl'), 'rb') as inp: rh.df_final, days_list, var_list = pickle.load(inp) diff --git a/scripts/special_config_analysis.py b/scripts/special_config_analysis.py index b161bf46..ae7c01ea 100644 --- a/scripts/special_config_analysis.py +++ b/scripts/special_config_analysis.py @@ -19,9 +19,9 @@ pio.renderers.default = 'browser' pd.options.plotting.backend = "plotly" -from emhass.retrieve_hass import retrieve_hass -from emhass.optimization import optimization -from emhass.forecast import forecast +from emhass.retrieve_hass import RetrieveHass +from emhass.optimization import Optimization +from emhass.forecast import Forecast from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger # the root folder @@ -31,14 +31,14 @@ def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, params, get_data_from_file): - fcst = forecast(retrieve_hass_conf, optim_conf, plant_conf, + fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, params, root, logger, get_data_from_file=get_data_from_file) df_weather = fcst.get_weather_forecast(method='solar.forecast') P_PV_forecast = fcst.get_power_from_weather(df_weather) P_load_forecast = fcst.get_load_forecast(method=optim_conf['load_forecast_method']) df_input_data_dayahead = pd.concat([P_PV_forecast, P_load_forecast], axis=1) df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast'] - opt = optimization(retrieve_hass_conf, optim_conf, plant_conf, + opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf, fcst.var_load_cost, fcst.var_prod_price, 'cost', root, logger) return fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt @@ -152,9 +152,9 @@ def build_params(params, options): else: retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(config_path, use_secrets=True, params = json.dumps(params)) - rh = retrieve_hass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], - params, root, logger) + rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], + retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], + params, root, logger) days_list = get_days_list(retrieve_hass_conf['days_to_retrieve']) var_list = [retrieve_hass_conf['var_load'], retrieve_hass_conf['var_PV']] rh.get_data(days_list, var_list, diff --git a/scripts/use_cases_analysis.py b/scripts/use_cases_analysis.py index d24d01da..0f75f0c0 100644 --- a/scripts/use_cases_analysis.py +++ b/scripts/use_cases_analysis.py @@ -16,9 +16,9 @@ pio.renderers.default = 'browser' pd.options.plotting.backend = "plotly" -from emhass.retrieve_hass import retrieve_hass -from emhass.optimization import optimization -from emhass.forecast import forecast +from emhass.retrieve_hass import RetrieveHass +from emhass.optimization import Optimization +from emhass.forecast import Forecast from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger # the root folder @@ -28,14 +28,14 @@ def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, params, get_data_from_file): - fcst = forecast(retrieve_hass_conf, optim_conf, plant_conf, + fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, params, root, logger, get_data_from_file=get_data_from_file) df_weather = fcst.get_weather_forecast(method='solar.forecast') P_PV_forecast = fcst.get_power_from_weather(df_weather) P_load_forecast = fcst.get_load_forecast(method=optim_conf['load_forecast_method']) df_input_data_dayahead = pd.concat([P_PV_forecast, P_load_forecast], axis=1) df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast'] - opt = optimization(retrieve_hass_conf, optim_conf, plant_conf, + opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf, fcst.var_load_cost, fcst.var_prod_price, 'profit', root, logger) return fcst, P_PV_forecast, P_load_forecast, df_input_data_dayahead, opt @@ -45,9 +45,9 @@ def get_forecast_optim_objects(retrieve_hass_conf, optim_conf, plant_conf, params = None save_figures = False retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(pathlib.Path(root+'/config_emhass.yaml'), use_secrets=True) - rh = retrieve_hass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], - retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], - params, root, logger) + rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], + retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], + params, root, logger) days_list = get_days_list(retrieve_hass_conf['days_to_retrieve']) var_list = [retrieve_hass_conf['var_load'], retrieve_hass_conf['var_PV']] rh.get_data(days_list, var_list, diff --git a/setup.py b/setup.py index cf754acf..bf7c643d 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ setup( name='emhass', # Required - version='0.6.2', # Required + version='0.7.0', # Required description='An Energy Management System for Home Assistant', # Optional long_description=long_description, # Optional long_description_content_type='text/markdown', # Optional (see note above) @@ -31,7 +31,7 @@ 'Intended Audience :: Developers', 'Topic :: Software Development :: Build Tools', 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', "Operating System :: OS Independent", ], keywords='energy, management, optimization, hass', # Optional @@ -43,15 +43,19 @@ 'numpy<=1.26', 'scipy<=1.11.3', 'pandas<=2.0.3', - 'pvlib>=0.10.1', + 'pvlib>=0.10.2', 'protobuf>=3.0.0', 'pytz>=2021.1', 'requests>=2.25.1', 'beautifulsoup4>=4.9.3', + 'h5py==3.10.0', 'pulp>=2.4', 'pyyaml>=5.4.1', 'tables<=3.9.1', 'skforecast==0.11.0', + 'flask>=2.0.3', + 'waitress>=2.1.1', + 'plotly>=5.6.0' ], # Optional entry_points={ # Optional 'console_scripts': [ diff --git a/src/emhass/command_line.py b/src/emhass/command_line.py index 26e638c2..40b84ed6 100644 --- a/src/emhass/command_line.py +++ b/src/emhass/command_line.py @@ -16,10 +16,10 @@ from distutils.util import strtobool from importlib.metadata import version -from emhass.retrieve_hass import retrieve_hass -from emhass.forecast import forecast -from emhass.machine_learning_forecaster import mlforecaster -from emhass.optimization import optimization +from emhass.retrieve_hass import RetrieveHass +from emhass.forecast import Forecast +from emhass.machine_learning_forecaster import MLForecaster +from emhass.optimization import Optimization from emhass import utils @@ -58,12 +58,12 @@ def set_input_data_dict(config_path: pathlib.Path, base_path: str, costfun: str, runtimeparams, params, retrieve_hass_conf, optim_conf, plant_conf, set_type, logger) # Define main objects - rh = retrieve_hass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], + rh = RetrieveHass(retrieve_hass_conf['hass_url'], retrieve_hass_conf['long_lived_token'], retrieve_hass_conf['freq'], retrieve_hass_conf['time_zone'], params, base_path, logger, get_data_from_file=get_data_from_file) - fcst = forecast(retrieve_hass_conf, optim_conf, plant_conf, + fcst = Forecast(retrieve_hass_conf, optim_conf, plant_conf, params, base_path, logger, get_data_from_file=get_data_from_file) - opt = optimization(retrieve_hass_conf, optim_conf, plant_conf, + opt = Optimization(retrieve_hass_conf, optim_conf, plant_conf, fcst.var_load_cost, fcst.var_prod_price, costfun, base_path, logger) # Perform setup based on type of action @@ -288,7 +288,7 @@ def naive_mpc_optim(input_data_dict: dict, logger: logging.Logger, return opt_res_naive_mpc def forecast_model_fit(input_data_dict: dict, logger: logging.Logger, - debug: Optional[bool] = False) -> Tuple[pd.DataFrame, pd.DataFrame, mlforecaster]: + debug: Optional[bool] = False) -> Tuple[pd.DataFrame, pd.DataFrame, MLForecaster]: """Perform a forecast model fit from training data retrieved from Home Assistant. :param input_data_dict: A dictionnary with multiple data used by the action functions @@ -309,7 +309,7 @@ def forecast_model_fit(input_data_dict: dict, logger: logging.Logger, perform_backtest = input_data_dict['params']['passed_data']['perform_backtest'] root = input_data_dict['root'] # The ML forecaster object - mlf = mlforecaster(data, model_type, var_model, sklearn_model, num_lags, root, logger) + mlf = MLForecaster(data, model_type, var_model, sklearn_model, num_lags, root, logger) # Fit the ML model df_pred, df_pred_backtest = mlf.fit(split_date_delta=split_date_delta, perform_backtest=perform_backtest) @@ -322,7 +322,7 @@ def forecast_model_fit(input_data_dict: dict, logger: logging.Logger, def forecast_model_predict(input_data_dict: dict, logger: logging.Logger, use_last_window: Optional[bool] = True, debug: Optional[bool] = False, - mlf: Optional[mlforecaster] = None) -> pd.DataFrame: + mlf: Optional[MLForecaster] = None) -> pd.DataFrame: r"""Perform a forecast model predict using a previously trained skforecast model. :param input_data_dict: A dictionnary with multiple data used by the action functions @@ -388,8 +388,8 @@ def forecast_model_predict(input_data_dict: dict, logger: logging.Logger, return predictions def forecast_model_tune(input_data_dict: dict, logger: logging.Logger, - debug: Optional[bool] = False, mlf: Optional[mlforecaster] = None - ) -> Tuple[pd.DataFrame, mlforecaster]: + debug: Optional[bool] = False, mlf: Optional[MLForecaster] = None + ) -> Tuple[pd.DataFrame, MLForecaster]: """Tune a forecast model hyperparameters using bayesian optimization. :param input_data_dict: A dictionnary with multiple data used by the action functions diff --git a/src/emhass/forecast.py b/src/emhass/forecast.py index 83f88a76..77217a1b 100644 --- a/src/emhass/forecast.py +++ b/src/emhass/forecast.py @@ -19,12 +19,12 @@ from pvlib.temperature import TEMPERATURE_MODEL_PARAMETERS from pvlib.irradiance import disc -from emhass.retrieve_hass import retrieve_hass -from emhass.machine_learning_forecaster import mlforecaster +from emhass.retrieve_hass import RetrieveHass +from emhass.machine_learning_forecaster import MLForecaster from emhass.utils import get_days_list -class forecast(object): +class Forecast(object): r""" Generate weather, load and costs forecasts needed as inputs to the optimization. @@ -448,7 +448,7 @@ def get_power_from_weather(self, df_weather: pd.DataFrame, # Extracting results for AC power P_PV_forecast = mc.results.ac if set_mix_forecast: - P_PV_forecast = forecast.get_mix_forecast( + P_PV_forecast = Forecast.get_mix_forecast( df_now, P_PV_forecast, self.params['passed_data']['alpha'], self.params['passed_data']['beta'], self.var_PV) return P_PV_forecast @@ -533,7 +533,7 @@ def get_forecast_out_from_csv(self, df_final: pd.DataFrame, forecast_dates_csv: def get_load_forecast(self, days_min_load_forecast: Optional[int] = 3, method: Optional[str] = 'naive', csv_path: Optional[str] = "/data/data_load_forecast.csv", set_mix_forecast:Optional[bool] = False, df_now:Optional[pd.DataFrame] = pd.DataFrame(), - use_last_window: Optional[bool] = True, mlf: Optional[mlforecaster] = None, + use_last_window: Optional[bool] = True, mlf: Optional[MLForecaster] = None, debug: Optional[bool] = False) -> pd.Series: r""" Get and generate the load forecast data. @@ -576,7 +576,7 @@ def get_load_forecast(self, days_min_load_forecast: Optional[int] = 3, method: O var_interp = [self.var_load] time_zone_load_foreacast = None # We will need to retrieve a new set of load data according to the days_min_load_forecast parameter - rh = retrieve_hass(self.retrieve_hass_conf['hass_url'], self.retrieve_hass_conf['long_lived_token'], + rh = RetrieveHass(self.retrieve_hass_conf['hass_url'], self.retrieve_hass_conf['long_lived_token'], self.freq, time_zone_load_foreacast, self.params, self.root, self.logger) if self.get_data_from_file: with open(pathlib.Path(self.root) / 'data' / 'test_df_final.pkl', 'rb') as inp: @@ -653,7 +653,7 @@ def get_load_forecast(self, days_min_load_forecast: Optional[int] = 3, method: O self.logger.error("Passed method is not valid") P_Load_forecast = copy.deepcopy(forecast_out['yhat']) if set_mix_forecast: - P_Load_forecast = forecast.get_mix_forecast( + P_Load_forecast = Forecast.get_mix_forecast( df_now, P_Load_forecast, self.params['passed_data']['alpha'], self.params['passed_data']['beta'], self.var_load_new) return P_Load_forecast diff --git a/src/emhass/machine_learning_forecaster.py b/src/emhass/machine_learning_forecaster.py index 60916f1f..ba82db98 100644 --- a/src/emhass/machine_learning_forecaster.py +++ b/src/emhass/machine_learning_forecaster.py @@ -20,7 +20,7 @@ import warnings warnings.filterwarnings("ignore", category=DeprecationWarning) -class mlforecaster: +class MLForecaster: r""" A forecaster class using machine learning models with auto-regressive approach and features\ based on timestamp information (hour, day, week, etc). @@ -105,7 +105,7 @@ def generate_exog(data_last_window, periods, var_name): freq=data_last_window.index.freq) exog = pd.DataFrame({var_name:[np.nan]*periods}, index=forecast_dates) - exog = mlforecaster.add_date_features(exog) + exog = MLForecaster.add_date_features(exog) return exog def fit(self, split_date_delta: Optional[str] = '48h', perform_backtest: Optional[bool] = False @@ -124,7 +124,7 @@ def fit(self, split_date_delta: Optional[str] = '48h', perform_backtest: Optiona self.logger.info("Performing a forecast model fit for "+self.model_type) # Preparing the data: adding exogenous features self.data_exo = pd.DataFrame(index=self.data.index) - self.data_exo = mlforecaster.add_date_features(self.data_exo) + self.data_exo = MLForecaster.add_date_features(self.data_exo) self.data_exo[self.var_model] = self.data[self.var_model] self.data_exo = self.data_exo.interpolate(method='linear', axis=0, limit=None) # train/test split @@ -174,7 +174,7 @@ def fit(self, split_date_delta: Optional[str] = '48h', perform_backtest: Optiona initial_train_size = None, fixed_train_size = False, steps = self.num_lags, - metric = mlforecaster.neg_r2_score, + metric = MLForecaster.neg_r2_score, refit = False, verbose = False ) @@ -202,12 +202,12 @@ def predict(self, data_last_window: Optional[pd.DataFrame] = None else: data_last_window = data_last_window.interpolate(method='linear', axis=0, limit=None) if self.is_tuned: - exog = mlforecaster.generate_exog(data_last_window, self.lags_opt, self.var_model) + exog = MLForecaster.generate_exog(data_last_window, self.lags_opt, self.var_model) predictions = self.forecaster.predict(steps=self.lags_opt, last_window=data_last_window[self.var_model], exog=exog.drop(self.var_model, axis=1)) else: - exog = mlforecaster.generate_exog(data_last_window, self.num_lags, self.var_model) + exog = MLForecaster.generate_exog(data_last_window, self.num_lags, self.var_model) predictions = self.forecaster.predict(steps=self.num_lags, last_window=data_last_window[self.var_model], exog=exog.drop(self.var_model, axis=1)) @@ -276,7 +276,7 @@ def search_space(trial): lags_grid = lags_grid, search_space = search_space, steps = num_lags, - metric = mlforecaster.neg_r2_score, + metric = MLForecaster.neg_r2_score, refit = refit, initial_train_size = len(self.data_exo.loc[:self.date_train]), fixed_train_size = True, diff --git a/src/emhass/optimization.py b/src/emhass/optimization.py index 8e9da3ca..feb2da75 100644 --- a/src/emhass/optimization.py +++ b/src/emhass/optimization.py @@ -11,7 +11,7 @@ from math import ceil -class optimization: +class Optimization: r""" Optimize the deferrable load and battery energy dispatch problem using \ the linear programming optimization technique. All equipement equations, \ @@ -34,7 +34,7 @@ def __init__(self, retrieve_hass_conf: dict, optim_conf: dict, plant_conf: dict, costfun: str, base_path: str, logger: logging.Logger, opt_time_delta: Optional[int] = 24) -> None: r""" - Define constructor for optimization class. + Define constructor for Optimization class. :param retrieve_hass_conf: Configuration parameters used to retrieve data \ from hass @@ -286,7 +286,7 @@ def perform_optimization(self, data_opt: pd.DataFrame, P_PV: np.array, P_load: n }) # Ensure deferrable loads consume energy between def_start_timestep & def_end_timestep self.logger.debug("Deferrable load {}: Proposed optimization window: {} --> {}".format(k, def_start_timestep[k], def_end_timestep[k])) - def_start, def_end, warning = optimization.validate_def_timewindow(def_start_timestep[k], def_end_timestep[k], ceil(def_total_hours[k]/self.timeStep), n) + def_start, def_end, warning = Optimization.validate_def_timewindow(def_start_timestep[k], def_end_timestep[k], ceil(def_total_hours[k]/self.timeStep), n) if warning is not None: self.logger.warning("Deferrable load {} : {}".format(k, warning)) self.logger.debug("Deferrable load {}: Validated optimization window: {} --> {}".format(k, def_start, def_end)) diff --git a/src/emhass/retrieve_hass.py b/src/emhass/retrieve_hass.py index df3fefdd..89da8274 100644 --- a/src/emhass/retrieve_hass.py +++ b/src/emhass/retrieve_hass.py @@ -13,7 +13,7 @@ from emhass.utils import set_df_index_freq -class retrieve_hass: +class RetrieveHass: r""" Retrieve data from Home Assistant using the restful API. @@ -34,7 +34,7 @@ def __init__(self, hass_url: str, long_lived_token: str, freq: pd.Timedelta, time_zone: datetime.timezone, params: str, base_path: str, logger: logging.Logger, get_data_from_file: Optional[bool] = False) -> None: """ - Define constructor for retrieve_hass class. + Define constructor for RetrieveHass class. :param hass_url: The URL of the Home Assistant instance :type hass_url: str @@ -287,25 +287,25 @@ def post_data(self, data_df: pd.DataFrame, idx: int, entity_id: str, else: state = np.round(data_df.loc[data_df.index[idx]],2) if type_var == 'power': - data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, + data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, friendly_name, "forecasts", state) elif type_var == 'deferrable': - data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, + data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, friendly_name, "deferrables_schedule", state) elif type_var == 'batt': - data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, + data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, friendly_name, "battery_scheduled_power", state) elif type_var == 'SOC': - data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, + data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, friendly_name, "battery_scheduled_soc", state) elif type_var == 'unit_load_cost': - data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, + data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, friendly_name, "unit_load_cost_forecasts", state) elif type_var == 'unit_prod_price': - data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, + data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, friendly_name, "unit_prod_price_forecasts", state) elif type_var == 'mlforecaster': - data = retrieve_hass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, + data = RetrieveHass.get_attr_data_dict(data_df, idx, entity_id, unit_of_measurement, friendly_name, "scheduled_forecast", state) elif type_var == 'optim_status': data = { diff --git a/src/emhass/web_server.py b/src/emhass/web_server.py index 596e8c37..1b5bbe5e 100644 --- a/src/emhass/web_server.py +++ b/src/emhass/web_server.py @@ -24,9 +24,12 @@ def get_injection_dict(df, plot_size = 1366): cols_p = [i for i in df.columns.to_list() if 'P_' in i] # Let's round the data in the DF + optim_status = df['optim_status'].unique().item() + df.drop('optim_status', axis=1, inplace=True) cols_else = [i for i in df.columns.to_list() if 'P_' not in i] - df.loc[:, cols_p] = df[cols_p].astype(int) - df.loc[:, cols_else] = df[cols_else].round(2) + df = df.apply(pd.to_numeric) + df[cols_p] = df[cols_p].astype(int) + df[cols_else] = df[cols_else].round(3) # Create plots n_colors = len(cols_p) colors = px.colors.sample_colorscale("jet", [n/(n_colors -1) for n in range(n_colors)]) @@ -54,7 +57,9 @@ def get_injection_dict(df, plot_size = 1366): # The tables table1 = df.reset_index().to_html(classes='mystyle', index=False) cost_cols = [i for i in df.columns if 'cost_' in i] - table2 = df[cost_cols].reset_index().sum(numeric_only=True).to_frame(name='Cost Totals').reset_index().to_html(classes='mystyle', index=False) + table2 = df[cost_cols].reset_index().sum(numeric_only=True) + table2['optim_status'] = optim_status + table2 = table2.to_frame(name='Value').reset_index(names='Variable').to_html(classes='mystyle', index=False) # The dict of plots injection_dict = {} injection_dict['title'] = '