Skip to content

Commit

Permalink
Merge pull request #179 from pepkit/dev
Browse files Browse the repository at this point in the history
v0.9.0 Release
  • Loading branch information
donaldcampbelljr authored Apr 19, 2024
2 parents 2251671 + 2b1f4ec commit e7d4237
Show file tree
Hide file tree
Showing 24 changed files with 886 additions and 349 deletions.
61 changes: 61 additions & 0 deletions .github/workflows/cli-coverage.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
name: Pipestat Test Coverage

on:
push:
branches: [master, dev]
pull_request:
branches: [ dev ]

jobs:
cli-coverage-report:
strategy:
matrix:
python-version: ["3.10"]
os: [ ubuntu-latest ] # can't use macOS when using service containers or container jobs
runs-on: ${{ matrix.os }}
services:
postgres:
image: postgres
env: # needs to match DB config in: ../../tests/data/config.yaml
POSTGRES_USER: postgres
POSTGRES_PASSWORD: pipestat-password
POSTGRES_DB: pipestat-test
POSTGRES_HOST: localhost
ports:
- 5432:5432
options: --health-cmd pg_isready --health-interval 10s --health-timeout 5s --health-retries 5

steps:
- uses: actions/checkout@v2

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python-version }}

- name: Install dev dependencies
run: if [ -f requirements/requirements-dev.txt ]; then pip install -r requirements/requirements-dev.txt; fi

- name: Install test dependencies
run: if [ -f requirements/requirements-test.txt ]; then pip install -r requirements/requirements-test.txt; fi

- name: Install backend dependencies
run: if [ -f requirements/requirements-db-backend.txt ]; then pip install -r requirements/requirements-db-backend.txt; fi

- name: Install pipestat
run: python -m pip install .

- name: Run tests
run: coverage run -m pytest

- name: build coverage
run: coverage html -i

- run: smokeshow upload htmlcov
env:
SMOKESHOW_GITHUB_STATUS_DESCRIPTION: Coverage {coverage-percentage}
#SMOKESHOW_GITHUB_COVERAGE_THRESHOLD: 50
SMOKESHOW_GITHUB_CONTEXT: coverage
SMOKESHOW_GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
SMOKESHOW_GITHUB_PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }}
SMOKESHOW_AUTH_KEY: ${{ secrets.SMOKESHOW_AUTH_KEY }}
13 changes: 8 additions & 5 deletions .github/workflows/run-pytest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,14 +39,17 @@ jobs:
- name: Install test dependencies
run: if [ -f requirements/requirements-test.txt ]; then pip install -r requirements/requirements-test.txt; fi

- name: Install backend dependencies
run: if [ -f requirements/requirements-db-backend.txt ]; then pip install -r requirements/requirements-db-backend.txt; fi

- name: Install pipestat
run: python -m pip install .

- name: Run pytest tests
run: pytest tests -x -vv --cov=./ --cov-report=xml

- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1
with:
file: ./coverage.xml
name: py-${{ matrix.python-version }}-${{ matrix.os }}
# - name: Upload coverage to Codecov
# uses: codecov/codecov-action@v1
# with:
# file: ./coverage.xml
# name: py-${{ matrix.python-version }}-${{ matrix.os }}
15 changes: 15 additions & 0 deletions docs/changelog.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,21 @@

This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html) and [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) format.

## [0.9.0] - 2024-04-19
### Fixed
- Bug with rm_record for filebackend
- Bug when using record_identifier via env variable and the CLI
### Added
- Added results history and history retrieval for both file and db backends via `retrieve_history` [#177](https://github.com/pepkit/pipestat/issues/177).
- Added `remove_record` to Pipestat manager object (it is no longer only on backend classes)
- Added `meta` key to each record for the file backend
- db backend will now create an additional sql history table
- Reporting history is toggleable
### Changed
- Removing the last result no longer removes the entire record.
- `pipestat_created_time, and `pipestat_modified_time` now live under the `meta` key.
- `history` lives under the `meta` key for the filebackend.

## [0.8.2] - 2024-02-22
### Changed
- Changed yacman requirement and using FutureYamlConfigManager.
Expand Down
2 changes: 1 addition & 1 deletion pipestat/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "0.8.2"
__version__ = "0.9.0"
26 changes: 18 additions & 8 deletions pipestat/argparser.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
INSPECT_CMD = "inspect"
REMOVE_CMD = "remove"
RETRIEVE_CMD = "retrieve"
HISTORY_CMD = "history"
STATUS_CMD = "status"
INIT_CMD = "init"
SUMMARIZE_CMD = "summarize"
Expand All @@ -26,6 +27,7 @@
SUMMARIZE_CMD: "Generates HTML Report",
LINK_CMD: "Create symlinks of reported files",
SERVE_CMD: "Initializes pipestatreader API",
HISTORY_CMD: "Retrieve history of reported results for one record identifier",
}

STATUS_GET_CMD = "get"
Expand Down Expand Up @@ -169,7 +171,7 @@ def add_subparser(
)

# remove, report and inspect
for cmd in [REMOVE_CMD, REPORT_CMD, INSPECT_CMD, RETRIEVE_CMD]:
for cmd in [REMOVE_CMD, REPORT_CMD, INSPECT_CMD, RETRIEVE_CMD, HISTORY_CMD]:
sps[cmd].add_argument(
"-f",
"--results-file",
Expand Down Expand Up @@ -239,13 +241,21 @@ def add_subparser(
help=f"ID of the record to report the result for. {_env_txt('record_identifier')}",
)

sps[RETRIEVE_CMD].add_argument(
"-r",
"--record-identifier",
type=str,
metavar="R",
help=f"ID of the record to report the result for. {_env_txt('record_identifier')}",
)
for cmd in [RETRIEVE_CMD, HISTORY_CMD]:
sps[cmd].add_argument(
"-i",
"--result-identifier",
type=str,
metavar="I",
help="ID of the result to report; needs to be defined in the schema",
)
sps[cmd].add_argument(
"-r",
"--record-identifier",
type=str,
metavar="R",
help=f"ID of the record to report the result for. {_env_txt('record_identifier')}",
)

# report
sps[REPORT_CMD].add_argument(
Expand Down
3 changes: 2 additions & 1 deletion pipestat/backends/abstract.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ class PipestatBackend(ABC):
"""Abstract class representing a pipestat backend"""

def __init__(self, pipeline_type):
_LOGGER.warning("Initialize PipestatBackend")
_LOGGER.debug("Initialize PipestatBackend")
self.pipeline_type = pipeline_type

def assert_results_defined(self, results: List[str], pipeline_type: str) -> None:
Expand Down Expand Up @@ -153,6 +153,7 @@ def report(
record_identifier: str,
force_overwrite: bool = False,
result_formatter: Optional[staticmethod] = None,
history_enabled: bool = True,
) -> str:
_LOGGER.warning("Not implemented yet for this backend")

Expand Down
43 changes: 43 additions & 0 deletions pipestat/backends/db_backend/db_parsed_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,6 +196,49 @@ def _get_data_type(type_name):
def file_like_table_name(self):
return self._table_name("files")

def build_history_model(self, pipeline_type):
"""Creates model for history ORM
:param str pipeline_type: project or sample-level pipeline
:return model: (model, table_name)
"""
if pipeline_type == "project":
history_table_name = self.project_table_name + "_history"
data = self.project_level_data
main_table_id = self.project_table_name + ".id"

elif pipeline_type == "sample":
history_table_name = self.sample_table_name + "_history"
data = self.sample_level_data
main_table_id = self.sample_table_name + ".id"

else:
raise PipestatError(
f"Building model requires pipeline type. Provided type: '{pipeline_type}' "
)

if not self.sample_level_data and not self.project_level_data:
return None

field_defs = self._make_field_definitions(data, require_type=True)
field_defs = self._add_status_field(field_defs)
field_defs = self._add_record_identifier_field(field_defs)
field_defs = self._add_id_field(field_defs)
field_defs = self._add_pipeline_name_field(field_defs)
field_defs = self._add_created_time_field(field_defs)
field_defs = self._add_modified_time_field(field_defs)

field_defs["source_record_id"] = (
int,
Field(
default=None,
foreign_key=main_table_id,
),
)

history_model = _create_model(history_table_name, **field_defs)

return history_model, history_table_name

def build_model(self, pipeline_type):
if pipeline_type == "project":
data = self.project_level_data
Expand Down
Loading

0 comments on commit e7d4237

Please sign in to comment.