Skip to content

add script for evalution using reward score #782

add script for evalution using reward score

add script for evalution using reward score #782

Workflow file for this run

name: CPU tests
on:
pull_request:
branches: [ "main" ]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true
jobs:
unit-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python 3.10
uses: actions/setup-python@v3
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -e .
pip install -r requirements/common-tests.txt
- name: Run all tests
env:
NVIDIA_API_KEY: ${{ secrets.NVIDIA_API_KEY }}
run: |
docker run --rm --name=local-sandbox igitman/nemo-skills-sandbox:0.4.2 &
sleep 120
export NEMO_SKILLS_SANDBOX_HOST=`docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' local-sandbox`
set -o pipefail # this will make sure next line returns non-0 exit code if tests fail
python -m nemo_skills.dataset.prepare
python -m pytest tests/ -m "not gpu" --junitxml=pytest.xml --cov-report=term-missing:skip-covered --cov=nemo_skills --cov=pipeline --durations=30 -rs -s -vvv