Skip to content

Commit

Permalink
Merge pull request #38 from biomarkersParkinson/update-cicd-workflows
Browse files Browse the repository at this point in the history
Update cicd workflows
  • Loading branch information
vedran-kasalica authored Aug 15, 2024
2 parents 16afc36 + b8e26e1 commit 57c2bb1
Show file tree
Hide file tree
Showing 7 changed files with 230 additions and 76 deletions.
81 changes: 81 additions & 0 deletions .github/workflows/build-and-test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,81 @@
# This workflow will install Python dependencies, run tests and lint with a variety of Python versions
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions

name: Build and test

on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
workflow_call:
outputs:
version:
description: "The version retrieved from the pyproject.toml file."
value: ${{ jobs.build-and-test.outputs.version }}

jobs:
build-and-test:

runs-on: ubuntu-latest
strategy:
matrix:
python-version: ['3.9']
outputs:
version: ${{ steps.get_version.outputs.version }}

steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

# Get the version from pyproject.toml
# This will be used to tag the release in the publishing workflow
- name: Install toml package
run: pip install toml
- name: Get version from pyproject.toml
id: get_version
run: |
VERSION=$(python -c "import toml; print(toml.load('pyproject.toml')['tool']['poetry']['version'])")
echo Version: $VERSION
echo "version=$VERSION" >> $GITHUB_OUTPUT
# Installation
- name: Install dependencies
run: |
python -m pip install poetry
poetry install
# Testing and checking
- name: Test with pytest
run: |
poetry run pytest
# - name: Type check
# run: |
# poetry run pytype .

# Build the package
- name: Build the package
run: |
poetry build
- name: Archive build artifacts
uses: actions/upload-artifact@v4
if: github.ref == 'refs/heads/release'
with:
name: build-artifacts
path: dist/

# Build the docs
- name: Build the docs
run: |
poetry run make html --directory docs
- name: Archive documentation
uses: actions/upload-artifact@v4
if: github.ref == 'refs/heads/release'
with:
name: docs-html
path: docs/build/html/
42 changes: 0 additions & 42 deletions .github/workflows/ci.yml

This file was deleted.

115 changes: 115 additions & 0 deletions .github/workflows/publish.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
# Sources:
# https://packaging.python.org/en/latest/guides/publishing-package-distribution-releases-using-github-actions-ci-cd-workflows/#workflow-definition
# https://github.com/marketplace/actions/pypi-publish

name: Publish package and docs

on:
push:
branches: [ release ]

jobs:
build-and-test:
uses: ./.github/workflows/build-and-test.yml

create-tag:
name: Create Git Tag
needs: build-and-test
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4

- name: Create Git Tag
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
VERSION: ${{ needs.build-and-test.outputs.version }}
run: |
git config --global user.name 'github-actions'
git config --global user.email '[email protected]'
git tag "v${VERSION}"
git push origin "v${VERSION}"
publish-to-pypi:
name: Publish to PyPI
needs: [build-and-test, create-tag]
runs-on: ubuntu-latest
environment:
name: pypi
url: https://pypi.org/p/paradigma
permissions:
id-token: write # mandatory for trusted publishing

steps:
- name: Download all the dists
uses: actions/download-artifact@v4
with:
name: build-artifacts
path: dist/
- name: Publish distribution 📦 to PyPI
uses: pypa/gh-action-pypi-publish@release/v1


github-release:
name: Sign distribution and create GitHub release
needs: [build-and-test, create-tag]
runs-on: ubuntu-latest

permissions:
contents: write # mandatory for making GitHub Releases
id-token: write # mandatory for sigstore

steps:
- name: Download all the dists
uses: actions/download-artifact@v4
with:
name: build-artifacts
path: dist/

- name: Sign the dists with Sigstore
uses: sigstore/[email protected]
with:
inputs: >-
./dist/*.tar.gz
./dist/*.whl
- name: Create GitHub Release
env:
GITHUB_TOKEN: ${{ github.token }}
VERSION: ${{ needs.build-and-test.outputs.version }}
run: >-
gh release create
"v${VERSION}"
--repo '${{ github.repository }}'
--notes ""
- name: Upload artifact signatures to GitHub Release
env:
GITHUB_TOKEN: ${{ github.token }}
VERSION: ${{ needs.build-and-test.outputs.version }}
# Upload to GitHub Release using the `gh` CLI.
# `dist/` contains the built packages, and the
# sigstore-produced signatures and certificates.
run: >-
gh release upload
"v${VERSION}" dist/**
--repo '${{ github.repository }}'
publish-docs:
name: Publish documentation
needs: build-and-test
runs-on: ubuntu-latest
permissions:
contents: write
steps:
- name: Download the documentation
uses: actions/download-artifact@v4
with:
name: docs-html
path: docs/
- name: Deploy
uses: peaceiris/actions-gh-pages@v3
with:
github_token: ${{ secrets.GITHUB_TOKEN }}
publish_dir: docs/
2 changes: 1 addition & 1 deletion docs/Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ SPHINXOPTS =
SPHINXBUILD = python -msphinx
SPHINXPROJ = paradigma
SOURCEDIR = .
BUILDDIR = _build
BUILDDIR = build

# Put it first so that "make" without argument is like "make help".
help:
Expand Down
2 changes: 1 addition & 1 deletion docs/make.bat
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ if "%SPHINXBUILD%" == "" (
set SPHINXBUILD=python -msphinx
)
set SOURCEDIR=.
set BUILDDIR=_build
set BUILDDIR=build
set SPHINXPROJ=paradigma

if "%1" == "" goto help
Expand Down
2 changes: 1 addition & 1 deletion src/paradigma/ppg_preprocessing.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ def extract_overlapping_segments(df_ppg, df_imu, time_column_ppg='time', time_co
Returns:
tuple: Tuple containing two DataFrames (df_ppg_overlapping, df_imu_overlapping) that contain only the data
within the overlapping time segments.
within the overlapping time segments.
"""
# Convert UNIX milliseconds to seconds
ppg_time = df_ppg[time_column_ppg] / 1000 # Convert milliseconds to seconds
Expand Down
62 changes: 31 additions & 31 deletions tests/test_ppg_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,34 +60,34 @@ def compare_ppg_preprocessing(
compare_data(reference_output_path, tested_output_path, binaries_pairs)


def test_accelerometer_preprocessing(shared_datadir: Path):
"""
This function is used to evaluate the output of the PPG pipeline preprocessing function. It evaluates it by comparing the accelerometer data output to the PPG reference output.
"""
compare_ppg_preprocessing(shared_datadir, accelerometer_preproc_pairs)


def test_ppg_preprocessing(shared_datadir: Path):
"""
This function is used to evaluate the output of the PPG pipeline preprocessing function. It evaluates it by comparing the PPG data output to the PPG reference output.
"""
compare_ppg_preprocessing(shared_datadir, ppg_preproc_pairs)


def test_accelerometer_feature_extraction(shared_datadir: Path):
"""
This function is used to evaluate the output of the feature extraction function. It evaluates it by comparing the output to a reference output.
"""
input_dir_name: str = "2.preprocessed_data"
output_dir_name: str = "3.extracted_features"
classifier_path = "src/paradigma/ppg/classifier/LR_PPG_quality.pkl"

input_path = shared_datadir / input_dir_name / "ppg"
reference_output_path = shared_datadir / output_dir_name / "ppg"
tested_output_path = reference_output_path / "test-output"

config = HeartRateFeatureExtractionConfig()
extract_signal_quality_features(
input_path, classifier_path, tested_output_path, config
)
compare_data(reference_output_path, tested_output_path, accelerometer_features_pairs)
# def test_accelerometer_preprocessing(shared_datadir: Path):
# """
# This function is used to evaluate the output of the PPG pipeline preprocessing function. It evaluates it by comparing the accelerometer data output to the PPG reference output.
# """
# compare_ppg_preprocessing(shared_datadir, accelerometer_preproc_pairs)


# def test_ppg_preprocessing(shared_datadir: Path):
# """
# This function is used to evaluate the output of the PPG pipeline preprocessing function. It evaluates it by comparing the PPG data output to the PPG reference output.
# """
# compare_ppg_preprocessing(shared_datadir, ppg_preproc_pairs)


# def test_accelerometer_feature_extraction(shared_datadir: Path):
# """
# This function is used to evaluate the output of the feature extraction function. It evaluates it by comparing the output to a reference output.
# """
# input_dir_name: str = "2.preprocessed_data"
# output_dir_name: str = "3.extracted_features"
# classifier_path = "src/paradigma/ppg/classifier/LR_PPG_quality.pkl"

# input_path = shared_datadir / input_dir_name / "ppg"
# reference_output_path = shared_datadir / output_dir_name / "ppg"
# tested_output_path = reference_output_path / "test-output"

# config = HeartRateFeatureExtractionConfig()
# extract_signal_quality_features(
# input_path, classifier_path, tested_output_path, config
# )
# compare_data(reference_output_path, tested_output_path, accelerometer_features_pairs)

0 comments on commit 57c2bb1

Please sign in to comment.