Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Cleanup unused files #515

Open
wants to merge 1 commit into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,17 +8,17 @@ Documentation resides in the [GitHub wiki](https://github.com/NREL/alfalfa/wiki)

## Developer Documentation

We are currently working on increasing our developer documentation. See how to run the tests on the [GitHub wiki](https://github.com/NREL/alfalfa/wiki/Running-Tests). For releasing, see the wiki's [release instructions](https://github.com/NREL/alfalfa/wiki/Release-Instructions).
Learn how to set up a local development environment on the [Github wiki](https://github.com/NREL/alfalfa/wiki/Development#how-to-set-up-a-development-environment). See how to run the tests on the [wiki](https://github.com/NREL/alfalfa/wiki/Running-Tests). For releasing, see the wiki's [release instructions](https://github.com/NREL/alfalfa/wiki/Development#how-to-run-local-tests).

# Related Repositories

## Docker Images

There are several docker images that are provided for easy deployment using [Alfalfa through Helm](https://github.com/NREL/alfalfa-helm) or other docker services. The images include:

- [Alfalfa Web](https://hub.docker.com/repository/docker/nrel/alfalfa-web)
- [Alfalfa Worker](https://hub.docker.com/repository/docker/nrel/alfalfa-worker)
- [Alfalfa Grafana](https://hub.docker.com/repository/docker/nrel/alfalfa-grafana)
- [Alfalfa Web](https://github.com/NREL/alfalfa/pkgs/container/alfalfa%2Fweb)
- [Alfalfa Worker](https://github.com/NREL/alfalfa/pkgs/container/alfalfa%2Fworker)
- [Alfalfa Grafana](https://github.com/NREL/alfalfa/pkgs/container/alfalfa%2Fgrafana)

## Python Notebooks

Expand Down
5 changes: 0 additions & 5 deletions alfalfa_worker/.gitignore

This file was deleted.

7 changes: 3 additions & 4 deletions alfalfa_worker/Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,8 @@ RUN poetry install --only main
ENV PYTHONPATH="/alfalfa:${PYTHONPATH}"

COPY ./alfalfa_worker /alfalfa/alfalfa_worker
COPY ./alfalfa_worker/scripts /opt/scripts

COPY ./deploy /alfalfa/deploy
COPY ./deploy/wait-for-it.sh /usr/local/wait-for-it.sh

# update the path to python, which is in the Poetry virtual environment.
# The path (mfknN3Ol) is generated by Poetry and should be updated if the
Expand All @@ -41,7 +40,7 @@ ENV PATH=$HOME/.cache/pypoetry/virtualenvs/alfalfa-mfknN3Ol-py3.8/bin:$PATH
WORKDIR $HOME/alfalfa_worker


CMD ["/alfalfa/deploy/start_worker.sh"]
CMD ["/opt/scripts/start_worker.sh"]

# **** Staged build for running in development mode ****
FROM base AS dev
Expand All @@ -60,4 +59,4 @@ COPY . /alfalfa

# Enable the ability to restart the service when
# the files change
CMD ["watchmedo", "auto-restart", "--directory=/alfalfa", "--pattern=*.py", "--recursive", "--", "/alfalfa/deploy/start_worker.sh"]
CMD ["watchmedo", "auto-restart", "--directory=/alfalfa", "--pattern=*.py", "--recursive", "--", "/opt/scripts/start_worker.sh"]
5 changes: 3 additions & 2 deletions alfalfa_worker/__main__.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
print("Starting Alfalfa Dispatcher")

import logging
import os
import sys
import traceback
from pathlib import Path

logging.info("Starting Alfalfa Dispatcher")

# Determine which worker to load based on the QUEUE.
# This may be temporary for now, not sure on how else
# to determine which worker gets launched
Expand Down
80 changes: 0 additions & 80 deletions alfalfa_worker/build/make.inc

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -322,7 +322,7 @@ def load_data_and_kpisjson(self):
# Append the case directory to see the config file
sys.path.append(case_dir)

from testcase import TestCase
from alfalfa_worker.jobs.modelica.lib.testcase import TestCase
case = TestCase()
man = Data_Manager(case)
data = man.get_data()
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from pyfmi import load_fmu
from scipy.integrate import trapz

from alfalfa_worker.lib.data.data_manager import Data_Manager
from alfalfa_worker.jobs.modelica.lib.data.data_manager import Data_Manager


class TestCase(object):
Expand Down
2 changes: 1 addition & 1 deletion alfalfa_worker/jobs/modelica/step_run.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import json
from datetime import datetime, timedelta

from alfalfa_worker.jobs.modelica.lib.testcase import TestCase
from alfalfa_worker.jobs.step_run_base import StepRunBase
from alfalfa_worker.lib.enums import PointType
from alfalfa_worker.lib.job import message
from alfalfa_worker.lib.models import Point
from alfalfa_worker.lib.testcase import TestCase


class StepRun(StepRunBase):
Expand Down
15 changes: 7 additions & 8 deletions alfalfa_worker/jobs/openstudio/create_run.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@
from alfalfa_worker.lib.enums import RunStatus, SimType
from alfalfa_worker.lib.job import Job, JobExceptionInvalidModel
from alfalfa_worker.lib.tagutils import make_ids_unique, replace_site_id
from alfalfa_worker.lib.utils import rel_symlink


class CreateRun(Job):
Expand Down Expand Up @@ -67,32 +66,32 @@ def exec(self):
self.logger.info("Setting up symlinks")
idf_src_path = submitted_workflow_path / 'run' / 'in.idf'
idf_dest_path = simulation_dir / 'sim.idf'
rel_symlink(idf_src_path, idf_dest_path)
os.symlink(idf_src_path, idf_dest_path)

haystack_src_path = submitted_workflow_path / 'reports' / 'haystack_report_mapping.json'
haystack_dest_path = simulation_dir / 'haystack_report_mapping.json'
rel_symlink(haystack_src_path, haystack_dest_path)
os.symlink(haystack_src_path, haystack_dest_path)

haystack_src_path = submitted_workflow_path / 'reports' / 'haystack_report_haystack.json'
haystack_dest_path = simulation_dir / 'haystack_report_haystack.json'
rel_symlink(haystack_src_path, haystack_dest_path)
os.symlink(haystack_src_path, haystack_dest_path)

variables_src_path = submitted_workflow_path / 'reports/export_bcvtb_report_variables.cfg'
variables_dest_path = simulation_dir / 'variables.cfg'
rel_symlink(variables_src_path, variables_dest_path)
os.symlink(variables_src_path, variables_dest_path)

# variables.cfg also needs to be located next to the idf to satisfy EnergyPlus conventions
idf_src_dir = idf_src_path.parents[0]
variables_ep_path = idf_src_dir / 'variables.cfg'
rel_symlink(variables_src_path, variables_ep_path)
os.symlink(variables_src_path, variables_ep_path)
self.variables.write_files(simulation_dir)

# hack. need to find a more general approach to preserve osw resources that might be needed at simulation time
for file in self.run.glob(submitted_workflow_path / 'python' / '*'):
idfdir = idf_src_path.parents[0]
filename = os.path.basename(file)
dst = idfdir / filename
rel_symlink(file, dst)
os.symlink(file, dst)

# find weather file (if) defined by osw and copy into simulation directory
with open(submitted_osw_path, 'r') as osw:
Expand All @@ -103,7 +102,7 @@ def exec(self):
if epw_name:
epw_src_path = self.run.glob(submitted_workflow_path / '**' / epw_name)[0]
epw_dst_path = simulation_dir / 'sim.epw'
rel_symlink(epw_src_path, epw_dst_path)
os.symlink(epw_src_path, epw_dst_path)

def validate(self) -> None:
assert (self.dir / 'simulation' / 'sim.idf').exists(), "Idf was not created"
Expand Down
11 changes: 0 additions & 11 deletions alfalfa_worker/jobs/openstudio/lib/translate_osm.rb

This file was deleted.

Loading
Loading