diff --git a/.github/actions/test/action.yml b/.github/actions/test/action.yml new file mode 100644 index 0000000..0ca2a08 --- /dev/null +++ b/.github/actions/test/action.yml @@ -0,0 +1,135 @@ +name: 'Test' +description: 'A GitHub Action that tests this action' + +inputs: + os: + description: operating system, e.g. ubuntu-22.04 + required: true + python-version: + description: Python version, e.g. 3.11 + required: true + +runs: + using: 'composite' + steps: + - name: Setup Ubuntu + if: startsWith(inputs.os, 'ubuntu') + run: | + sudo apt-get update + sudo apt-get install language-pack-en language-pack-de + shell: bash + + - name: Setup Python + if: inputs.python-version != 'installed' + uses: actions/setup-python@v4 + with: + python-version: ${{ inputs.python-version }} + + - name: Checkout + uses: actions/checkout@v3 + + - name: Detect OS + id: os + env: + OS: ${{ inputs.os }} + run: | + case "$OS" in + ubuntu*) + echo "pip-cache=~/.cache/pip" >> $GITHUB_OUTPUT + ;; + macos*) + echo "pip-cache=~/Library/Caches/pip" >> $GITHUB_OUTPUT + ;; + windows*) + echo "pip-cache=~\\AppData\\Local\\pip\\Cache" >> $GITHUB_OUTPUT + ;; + esac + echo "date=$(date +%Y%m%d 2> /dev/null || true)" >> $GITHUB_OUTPUT + shell: bash + + - name: Cache PIP Packages + uses: actions/cache@v3 + id: cache + with: + path: ${{ steps.os.outputs.pip-cache }} + key: ${{ inputs.os }}-pip-test-${{ inputs.python-version }}-${{ hashFiles('**/requirements.txt', '**/constraints.txt') }}-${{ steps.os.outputs.date }} + restore-keys: | + ${{ inputs.os }}-pip-test-${{ inputs.python-version }}-${{ hashFiles('**/requirements.txt', '**/constraints.txt') }}- + ${{ inputs.os }}-pip-test-${{ inputs.python-version }}- + ${{ inputs.os }}-pip-test- + + - name: Install Python dependencies + run: | + python3 -V + python3 -m pip freeze | sort + python3 -m pip cache info || true + python3 -m pip cache list || true + python3 -m pip install --upgrade --force pip wheel + python3 -m pip install --force -r python/requirements.txt + python3 -m pip install --force -r python/test/requirements.txt -c python/test/constraints.txt + python3 -m pip freeze | sort + python3 -m pip cache info || true + python3 -m pip cache list || true + shell: bash + + - name: Update expectation files + id: changes + continue-on-error: true + run: | + python/test/files/update_expectations.sh + git status + + if ! git diff --exit-code || [[ $(git ls-files -o --exclude-standard | wc -l) -gt 0 ]] + then + # we only upload the changed files if we can find zip + if which zip + then + (git diff --name-only && git ls-files -o --exclude-standard) | xargs -d "\n" zip changed-expectations.zip + exit 1 + fi + fi + shell: bash + - name: Upload changed expectation files + if: steps.changes.outcome == 'failure' + uses: actions/upload-artifact@v3 + with: + name: Changed expectations + path: changed-expectations.zip + if-no-files-found: error + + - name: PyTest + env: + PYTHONPATH: .. + run: | + cd python/test + python3 -m pytest --capture=tee-sys --continue-on-collection-errors --junit-xml ../../test-results/pytest.xml + shell: bash + + - name: PyTest (EST) + env: + TZ: US/Eastern + LANG: "en_US.UTF-8" + PYTHONPATH: .. + run: | + cd python/test + python3 -m pytest --capture=tee-sys --continue-on-collection-errors --junit-xml ../../test-results/pytest-est.xml + shell: bash + + - name: PyTest (CET) + env: + TZ: Europe/Berlin + LANG: "de_DE.UTF-8" + PYTHONPATH: .. + run: | + cd python/test + python3 -m pytest --capture=tee-sys --continue-on-collection-errors --junit-xml ../../test-results/pytest-cet.xml + shell: bash + + - name: Upload Test Results + if: always() + uses: actions/upload-artifact@v3 + with: + name: Test Results (python-${{ inputs.python-version }}, ${{ inputs.os }}) + path: | + test-results/*.xml + unit-test-results.json diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..35bd16d --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,10 @@ +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "monthly" + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "weekly" diff --git a/.github/upgrade-pip-packages.sh b/.github/upgrade-pip-packages.sh new file mode 100755 index 0000000..bfcfddf --- /dev/null +++ b/.github/upgrade-pip-packages.sh @@ -0,0 +1,13 @@ +#!/bin/bash +set -euo pipefail + +base="$(dirname "$0")" + +pip install --upgrade --force pip==22.0.0 +pip install --upgrade --upgrade-strategy eager -r "$base/../python/requirements-direct.txt" + +pip install pipdeptree +pipdeptree --packages="$(sed -e "s/;.*//" -e "s/=.*//g" "$base/../python/requirements-direct.txt" | paste -s -d ,)" --freeze > "$base/../python/requirements.txt" + +git diff "$base/../python/requirements.txt" + diff --git a/.github/workflows/ci-cd.yml b/.github/workflows/ci-cd.yml new file mode 100644 index 0000000..c8dd108 --- /dev/null +++ b/.github/workflows/ci-cd.yml @@ -0,0 +1,165 @@ +name: CI/CD + +on: + push: + branches: + - 'master*' + - 'devel-*' + tags: + - '*' + pull_request: + schedule: + - cron: '0 16 * * *' + workflow_dispatch: +permissions: {} + +jobs: + dependencies: + name: Test python/requirements.txt + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + - name: Check requirements.txt against requirements-direct.txt + run: | + (diff -w python/requirements-direct.txt python/requirements.txt || true) | (! grep -e "^<") + shell: bash + - name: Check for dependency updates + continue-on-error: true + run: + .github/upgrade-pip-packages.sh + shell: bash + + test-mac: + name: "Test macOS" + uses: "./.github/workflows/test-os.yml" + with: + os: '["macos-11", "macos-12", "macos-13"]' + + test-lnx: + name: "Test Ubuntu" + uses: "./.github/workflows/test-os.yml" + with: + os: '["ubuntu-20.04", "ubuntu-22.04"]' + + test-win: + name: "Test Windows" + uses: "./.github/workflows/test-os.yml" + with: + os: '["windows-2019", "windows-2022"]' + + publish: + name: "Publish" + needs: [test-mac, test-lnx, test-win] + # we run the action from this branch whenever we can (when it runs in our repo's context) + if: > + ! cancelled() && + github.event.sender.login != 'dependabot[bot]' && + ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository ) + uses: "./.github/workflows/publish.yml" + permissions: + checks: write + pull-requests: write + security-events: write + + config-deploy: + name: Configure Deployment + needs: [test-mac, test-lnx, test-win] + # do not build or deploy on forked repositories + if: github.repository_owner == 'step-security' + runs-on: ubuntu-latest + outputs: + image: ${{ steps.action.outputs.image }} + image-exists: ${{ steps.image.outputs.exists }} + image-version: ${{ steps.action.outputs.version }} + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Extract action image and version + # we deploy from a specific commit on main (the one that mentions a new version the first time) + # so we need to tell docker/metadata-action to extract docker tags from that version + id: action + run: | + image=$(grep -A 10 "^runs:" action.yml | grep -E "^\s+image:\s" | sed -E -e "s/^\s+image:\s*'//" -e "s/docker:\/\///" -e "s/'\s*$//") + version=$(cut -d : -f 2 <<< "$image") + echo "image=$image" >>$GITHUB_OUTPUT + echo "version=$version" >>$GITHUB_OUTPUT + shell: bash + + - name: Check action image existence + id: image + env: + DOCKER_CLI_EXPERIMENTAL: enabled + run: | + if docker manifest inspect '${{ steps.action.outputs.image }}' + then + echo "exists=true" >>$GITHUB_OUTPUT + fi + shell: bash + + deploy: + name: Deploy to GitHub + needs: [publish, config-deploy] + + # do not build or deploy on forked repositories + if: github.repository_owner == 'step-security' + runs-on: ubuntu-latest + permissions: + packages: write + steps: + - name: Docker meta + id: docker-meta + uses: docker/metadata-action@v4 + with: + images: ghcr.io/step-security/publish-unit-test-result-action + flavor: | + latest=false + prefix=v + tags: | + type=sha + type=ref,event=tag + type=semver,pattern={{major}},value=${{ needs.config-deploy.outputs.image-version }} + type=semver,pattern={{major}}.{{minor}},value=${{ needs.config-deploy.outputs.image-version }} + type=semver,pattern={{version}},value=${{ needs.config-deploy.outputs.image-version }} + + - name: Set up QEMU + uses: docker/setup-qemu-action@v2 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Login to GitHub Container Registry + uses: docker/login-action@v2 + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build and push Docker image + uses: docker/build-push-action@v4 + with: + tags: ${{ steps.docker-meta.outputs.tags }} + labels: ${{ steps.docker-meta.outputs.labels }} + platforms: linux/amd64,linux/arm64 + pull: true + # deploy image actions from commits pushed to master and + # deploy Dockerfile actions from pushed version tags (no major versions) + push: | + ${{ + github.event_name == 'push' && ( + needs.config-deploy.outputs.image != 'Dockerfile' && startsWith(github.ref, 'refs/heads/master') && needs.config-deploy.outputs.image-exists != 'true' || + needs.config-deploy.outputs.image == 'Dockerfile' && startsWith(github.ref, 'refs/tags/v') && contains(github.ref, '.') + ) + }} + + event_file: + name: "Event File" + runs-on: ubuntu-latest + steps: + - name: Upload + uses: actions/upload-artifact@v3 + with: + name: Event File + path: ${{ github.event_path }} diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml new file mode 100644 index 0000000..3be9041 --- /dev/null +++ b/.github/workflows/codeql.yml @@ -0,0 +1,63 @@ +name: "CodeQL" + +on: + push: + branches: + - master + - 'devel-*' + pull_request: + # The branches below must be a subset of the branches above + branches: + - master + - 'devel-*' + schedule: + - cron: '30 15 * * 3' + +jobs: + analyze: + name: Analyze + runs-on: ubuntu-latest + permissions: + actions: read + contents: read + security-events: write + + strategy: + fail-fast: false + matrix: + language: [ 'python' ] + # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] + # Learn more about CodeQL language support at https://git.io/codeql-language-support + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v2 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main + + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + - name: Autobuild + uses: github/codeql-action/autobuild@v2 + + # ℹ️ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl + + # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language + + #- run: | + # make bootstrap + # make release + + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v2 diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml new file mode 100644 index 0000000..0b2b425 --- /dev/null +++ b/.github/workflows/publish.yml @@ -0,0 +1,375 @@ +name: Publish + +on: + workflow_call: + +jobs: + publish-dockerfile: + name: Publish Test Results (Dockerfile) + runs-on: ubuntu-latest + permissions: + checks: write + pull-requests: write + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Download Artifacts + uses: actions/download-artifact@v3 + with: + path: artifacts + + - name: Prepare publish action from this branch + run: | + sed --in-place "s/image: .*/image: 'Dockerfile'/" action.yml + shell: bash + + - name: Publish Test Results + id: test-results + uses: ./ + with: + check_name: Test Results (Dockerfile) + files: "artifacts/**/*.xml" + json_file: "tests.json" + json_suite_details: true + json_test_case_results: true + report_suite_logs: "any" + log_level: DEBUG + + - name: JSON output + uses: ./misc/action/json-output + with: + json: '${{ steps.test-results.outputs.json }}' + json_file: 'tests.json' + + publish-docker-image: + name: Publish Test Results (Docker Image ${{ matrix.arch }}) + runs-on: ubuntu-latest + permissions: + checks: write + pull-requests: write + security-events: write + strategy: + fail-fast: false + matrix: + arch: [amd64, arm64] + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + with: + image: tonistiigi/binfmt:latest + platforms: ${{ matrix.arch }} + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v2 + + - name: Build Docker image + id: build + uses: docker/build-push-action@v4 + with: + load: true + push: false + platforms: linux/${{ matrix.arch }} + tags: step-security/publish-unit-test-result-action:latest + outputs: type=docker + + - name: Download Artifacts + uses: actions/download-artifact@v3 + with: + path: artifacts + + - name: Publish Test Results + id: test-results + if: always() + env: + INPUT_GITHUB_TOKEN: ${{ github.token }} + INPUT_CHECK_NAME: Test Results (Docker Image ${{ matrix.arch }}) + INPUT_FILES: "artifacts/**/*.xml" + INPUT_JSON_FILE: "tests.json" + INPUT_JSON_SUITE_DETAILS: true + INPUT_JSON_TEST_CASE_RESULTS: true + INPUT_REPORT_SUITE_LOGS: "any" + run: | + docker run --platform linux/${{ matrix.arch }} \ + --workdir $GITHUB_WORKSPACE \ + --rm \ + -e "INPUT_CHECK_NAME" \ + -e "INPUT_JSON_FILE" \ + -e "INPUT_JSON_SUITE_DETAILS" \ + -e "INPUT_JSON_TEST_CASE_RESULTS" \ + -e "INPUT_LOG_LEVEL" \ + -e "INPUT_ROOT_LOG_LEVEL" \ + -e "INPUT_GITHUB_TOKEN" \ + -e "INPUT_GITHUB_TOKEN_ACTOR" \ + -e "INPUT_GITHUB_RETRIES" \ + -e "INPUT_COMMIT" \ + -e "INPUT_COMMENT_TITLE" \ + -e "INPUT_COMMENT_MODE" \ + -e "INPUT_FAIL_ON" \ + -e "INPUT_ACTION_FAIL" \ + -e "INPUT_ACTION_FAIL_ON_INCONCLUSIVE" \ + -e "INPUT_FILES" \ + -e "INPUT_JUNIT_FILES" \ + -e "INPUT_NUNIT_FILES" \ + -e "INPUT_XUNIT_FILES" \ + -e "INPUT_TRX_FILES" \ + -e "INPUT_TIME_UNIT" \ + -e "INPUT_TEST_FILE_PREFIX" \ + -e "INPUT_REPORT_INDIVIDUAL_RUNS" \ + -e "INPUT_REPORT_SUITE_LOGS" \ + -e "INPUT_DEDUPLICATE_CLASSES_BY_FILE_NAME" \ + -e "INPUT_LARGE_FILES" \ + -e "INPUT_IGNORE_RUNS" \ + -e "INPUT_JOB_SUMMARY" \ + -e "INPUT_COMPARE_TO_EARLIER_COMMIT" \ + -e "INPUT_PULL_REQUEST_BUILD" \ + -e "INPUT_EVENT_FILE" \ + -e "INPUT_EVENT_NAME" \ + -e "INPUT_TEST_CHANGES_LIMIT" \ + -e "INPUT_CHECK_RUN_ANNOTATIONS" \ + -e "INPUT_CHECK_RUN_ANNOTATIONS_BRANCH" \ + -e "INPUT_SECONDS_BETWEEN_GITHUB_READS" \ + -e "INPUT_SECONDS_BETWEEN_GITHUB_WRITES" \ + -e "INPUT_SECONDARY_RATE_LIMIT_WAIT_SECONDS" \ + -e "INPUT_JSON_THOUSANDS_SEPARATOR" \ + -e "INPUT_SEARCH_PULL_REQUESTS" \ + -e "HOME" \ + -e "GITHUB_JOB" \ + -e "GITHUB_REF" \ + -e "GITHUB_SHA" \ + -e "GITHUB_REPOSITORY" \ + -e "GITHUB_REPOSITORY_OWNER" \ + -e "GITHUB_RUN_ID" \ + -e "GITHUB_RUN_NUMBER" \ + -e "GITHUB_RETENTION_DAYS" \ + -e "GITHUB_RUN_ATTEMPT" \ + -e "GITHUB_ACTOR" \ + -e "GITHUB_TRIGGERING_ACTOR" \ + -e "GITHUB_WORKFLOW" \ + -e "GITHUB_HEAD_REF" \ + -e "GITHUB_BASE_REF" \ + -e "GITHUB_EVENT_NAME" \ + -e "GITHUB_SERVER_URL" \ + -e "GITHUB_API_URL" \ + -e "GITHUB_GRAPHQL_URL" \ + -e "GITHUB_REF_NAME" \ + -e "GITHUB_REF_PROTECTED" \ + -e "GITHUB_REF_TYPE" \ + -e "GITHUB_WORKSPACE" \ + -e "GITHUB_ACTION" \ + -e "GITHUB_EVENT_PATH" \ + -e "GITHUB_ACTION_REPOSITORY" \ + -e "GITHUB_ACTION_REF" \ + -e "GITHUB_PATH" \ + -e "GITHUB_ENV" \ + -e "GITHUB_STEP_SUMMARY" \ + -e "GITHUB_STATE" \ + -e "GITHUB_OUTPUT" \ + -e "RUNNER_OS" \ + -e "RUNNER_ARCH" \ + -e "RUNNER_NAME" \ + -e "RUNNER_TOOL_CACHE" \ + -e "RUNNER_TEMP" \ + -e "RUNNER_WORKSPACE" \ + -e "ACTIONS_RUNTIME_URL" \ + -e "ACTIONS_RUNTIME_TOKEN" \ + -e "ACTIONS_CACHE_URL" \ + -e GITHUB_ACTIONS=true \ + -e CI=true \ + -v "$RUNNER_TEMP":"$RUNNER_TEMP" \ + -v "/var/run/docker.sock":"/var/run/docker.sock" \ + -v "/home/runner/work/_temp/_github_home":"/github/home" \ + -v "/home/runner/work/_temp/_github_workflow":"/github/workflow" \ + -v "/home/runner/work/_temp/_runner_file_commands":"/github/file_commands" \ + -v "/home/runner/work/publish-unit-test-result-action/publish-unit-test-result-action":"$GITHUB_WORKSPACE" \ + step-security/publish-unit-test-result-action:latest + shell: bash + + - name: JSON output + uses: ./misc/action/json-output + with: + json: '${{ steps.test-results.outputs.json }}' + json_file: 'tests.json' + + - name: Scan for vulnerabilities + id: scan + uses: crazy-max/ghaction-container-scan@v2 + with: + image: step-security/publish-unit-test-result-action:latest + dockerfile: ./Dockerfile + annotations: true + - name: Upload SARIF artifact + uses: actions/upload-artifact@v3 + with: + name: SARIF + path: ${{ steps.scan.outputs.sarif }} + - name: Upload SARIF file + if: always() && steps.scan.outputs.sarif != '' + uses: github/codeql-action/upload-sarif@v2 + with: + sarif_file: ${{ steps.scan.outputs.sarif }} + + publish-composite: + name: Publish Test Results (${{ matrix.os-label }} python ${{ matrix.python }}) + runs-on: ${{ matrix.os }} + permissions: + checks: write + pull-requests: write + + strategy: + fail-fast: false + max-parallel: 3 + matrix: + # https://docs.github.com/en/actions/using-github-hosted-runners/about-github-hosted-runners#supported-runners-and-hardware-resources + # test *-latest and newer (because newer eventually become 'latest' and should be tested to work before that) + include: + - os: macos-latest + os-label: macOS + python: "3.8" + - os: macos-latest + os-label: macOS + python: "installed" + - os: macos-11 + os-label: macOS 11 + python: "installed" + + - os: ubuntu-latest + os-label: Linux + python: "3.8" + - os: ubuntu-latest + os-label: Linux + python: "installed" + - os: ubuntu-20.04 + os-label: Linux 20.04 + python: "installed" + + - os: windows-latest + os-label: Windows + python: "installed" + - os: windows-2019 + os-label: Windows 2019 + python: "installed" + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Setup Python + if: matrix.python != 'installed' + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python }} + + - name: Download Artifacts + uses: actions/download-artifact@v3 + with: + path: artifacts + + - name: Publish Test Results + id: test-results + uses: ./composite + with: + check_name: Test Results (${{ matrix.os-label }} python ${{ matrix.python }}) + files: | + artifacts/**/*.xml + artifacts\**\*.xml + json_file: "tests.json" + json_suite_details: true + json_test_case_results: true + report_suite_logs: "any" + + - name: JSON output + uses: ./misc/action/json-output + with: + json: '${{ steps.test-results.outputs.json }}' + json_file: 'tests.json' + + publish-test-files: + name: Publish Test Files + runs-on: ubuntu-latest + permissions: + checks: write + pull-requests: write + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Copy test result files + run: cp -rv python/test/files test-files + shell: bash + + - name: Prepare publish action from this branch + run: | + sed --in-place "s/image: .*/image: 'Dockerfile'/" action.yml + shell: bash + + - name: Publish Test Results + id: test-results + uses: ./ + with: + check_name: Test Results (Test Files) + fail_on: nothing + files: | + test-files/**/*.xml + test-files/**/*.trx + test-files/**/*.json + junit_files: "test-files/junit-xml/**/*.xml" + nunit_files: "test-files/nunit/**/*.xml" + xunit_files: "test-files/xunit/**/*.xml" + trx_files: "test-files/trx/**/*.trx" + json_file: "tests.json" + json_suite_details: true + json_test_case_results: true + report_suite_logs: "any" + log_level: DEBUG + + - name: JSON output + uses: ./misc/action/json-output + with: + json: '${{ steps.test-results.outputs.json }}' + json_file: 'tests.json' + + publish-test-file: + name: Publish Test File + runs-on: ubuntu-latest + permissions: + checks: write + pull-requests: write + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Copy test junit xml files + run: cp -rv python/test/files/junit-xml test-files + shell: bash + + - name: Prepare publish action from this branch + run: | + sed --in-place "s/image: .*/image: 'Dockerfile'/" action.yml + shell: bash + + - name: Publish Test Results + id: test-results + uses: ./ + with: + check_name: Test Results (Test File) + fail_on: nothing + files: "test-files/pytest/junit.gloo.standalone.xml" + json_file: "tests.json" + json_suite_details: true + json_test_case_results: true + report_suite_logs: "any" + log_level: DEBUG + + - name: JSON output + uses: ./misc/action/json-output + with: + json: '${{ steps.test-results.outputs.json }}' + json_file: 'tests.json' diff --git a/.github/workflows/test-os.yml b/.github/workflows/test-os.yml new file mode 100644 index 0000000..9f90a0c --- /dev/null +++ b/.github/workflows/test-os.yml @@ -0,0 +1,31 @@ +name: Test OS + +on: + workflow_call: + inputs: + os: + required: true + type: string +jobs: + test: + name: Test (python-${{ matrix.python-version }}, ${{ matrix.os }}) + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: ${{ fromJson(inputs.os) }} + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12.0-rc.3", "installed"] + + include: + - os: ${{ fromJson(inputs.os)[0] }} + python-version: "3.7" + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Test + uses: ./.github/actions/test + with: + os: ${{ matrix.os }} + python-version: ${{ matrix.python-version }} diff --git a/.github/workflows/test-results.yml b/.github/workflows/test-results.yml new file mode 100644 index 0000000..4f8d219 --- /dev/null +++ b/.github/workflows/test-results.yml @@ -0,0 +1,71 @@ +name: Test Results (reference) + +on: + workflow_run: + workflows: ["CI/CD"] + types: + - completed +permissions: {} + +jobs: + test-results: + name: Test Results (reference) + if: github.event.workflow_run.conclusion != 'skipped' + runs-on: ubuntu-latest + permissions: + checks: write + pull-requests: write + + steps: + - name: Download and Extract Artifacts + uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615 + with: + run_id: ${{ github.event.workflow_run.id }} + path: artifacts + + - name: Publish Test Results + id: test-results + uses: step-security/publish-unit-test-result-action/composite@main + with: + commit: ${{ github.event.workflow_run.head_sha }} + check_name: Test Results (reference) + event_file: artifacts/Event File/event.json + event_name: ${{ github.event.workflow_run.event }} + check_run_annotations_branch: "master, master-1.x, devel-1.0, devel-2.0" + files: "artifacts/**/*.xml" + log_level: DEBUG + + - name: Set badge color + if: github.event.workflow_run.event != 'schedule' + run: | + case ${{ fromJSON( steps.test-results.outputs.json ).conclusion }} in + success) + echo "BADGE_COLOR=31c653" >> $GITHUB_ENV + ;; + failure) + echo "BADGE_COLOR=800000" >> $GITHUB_ENV + ;; + neutral) + echo "BADGE_COLOR=696969" >> $GITHUB_ENV + ;; + esac + shell: bash + + - name: Create badge + if: github.event.workflow_run.event != 'schedule' + uses: emibcn/badge-action@4209421db54c8764d8932070ffd0f81715a629bf + with: + label: Tests + status: '${{ fromJSON( steps.test-results.outputs.json ).formatted.stats.tests }} tests, ${{ fromJSON( steps.test-results.outputs.json ).formatted.stats.runs }} runs: ${{ fromJSON( steps.test-results.outputs.json ).conclusion }}' + color: ${{ env.BADGE_COLOR }} + path: tests.svg + + - name: Upload badge to Gist + # Upload only for master branch and not for scheduled event + if: > + github.event_name == 'workflow_run' && github.event.workflow_run.head_branch == 'master' && github.event.workflow_run.event != 'schedule' + uses: andymckay/append-gist-action@1fbfbbce708a39bd45846f0955ed5521f2099c6d + with: + token: ${{ secrets.GIST_TOKEN }} + gistURL: https://gist.githubusercontent.com/step-security/612cb538c14731f1a8fefe504f519395 + file: tests.svg diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..e70ac1d --- /dev/null +++ b/Dockerfile @@ -0,0 +1,20 @@ +FROM python:3.8-alpine + +LABEL repository="https://github.com/step-security/publish-unit-test-result-action" +LABEL homepage="https://github.com/step-security/publish-unit-test-result-action" + +LABEL com.github.actions.name="Publish Test Results" +LABEL com.github.actions.description="A GitHub Action to publish test results." + +RUN apk add --no-cache --upgrade expat libuuid + +COPY python/requirements.txt /action/ +RUN apk add --no-cache build-base libffi-dev; \ + pip install --upgrade --force --no-cache-dir pip && \ + pip install --upgrade --force --no-cache-dir -r /action/requirements.txt; \ + apk del build-base libffi-dev + +COPY python/publish /action/publish +COPY python/publish_test_results.py /action/ + +ENTRYPOINT ["python", "/action/publish_test_results.py"] diff --git a/README.md b/README.md new file mode 100644 index 0000000..1ea41e8 --- /dev/null +++ b/README.md @@ -0,0 +1,806 @@ +# GitHub Action to Publish Test Results + +![Arm badge](misc/badge-arm.svg) +![Ubuntu badge](misc/badge-ubuntu.svg) +![macOS badge](misc/badge-macos.svg) +![Windows badge](misc/badge-windows.svg) +![XML badge](misc/badge-xml.svg) +![TRX badge](misc/badge-trx.svg) +![JS badge](misc/badge-js.svg) + +This [GitHub Action](https://github.com/actions) analyses test result files and +publishes the results on GitHub. It supports [JSON (Dart, Mocha), TRX (MSTest, VS) and XML (JUnit, NUnit, XUnit) file formats](#generating-test-result-files), +and runs on Linux, macOS and Windows. + +You can use this action with ![Ubuntu Linux](misc/badge-ubuntu.svg) runners (e.g. `runs-on: ubuntu-latest`) +or ![ARM Linux](misc/badge-arm.svg) self-hosted runners: + +```yaml +- name: Publish Test Results + uses: step-security/publish-unit-test-result-action@v1 + if: always() + with: + files: | + test-results/**/*.xml + test-results/**/*.trx + test-results/**/*.json +``` + +See the [notes on running this action with absolute paths](#running-with-absolute-paths) if you cannot use relative test result file paths. + +Use this for ![macOS](misc/badge-macos.svg) (e.g. `runs-on: macos-latest`) +and ![Windows](misc/badge-windows.svg) (e.g. `runs-on: windows-latest`) runners: + +```yaml +- name: Publish Test Results + uses: step-security/publish-unit-test-result-action/composite@v1 + if: always() + with: + files: | + test-results/**/*.xml + test-results/**/*.trx + test-results/**/*.json +``` + +See the [notes on running this action as a composite action](#running-as-a-composite-action) if you run it on Windows or macOS. + +If you see the `"Resource not accessible by integration"` error, you have to grant additional [permissions](#permissions), or +[setup the support for pull requests from fork repositories and branches created by Dependabot](#support-fork-repositories-and-dependabot-branches). + +The `if: always()` clause guarantees that this action always runs, even if earlier steps (e.g., the test step) in your workflow fail. + +When run multiple times in one workflow, the [option](#configuration) `check_name` has to be set to a unique value for each instance. +Otherwise, the multiple runs overwrite each other's results. + +***Note:** By default, this action does not fail if tests failed. This can be [configured](#configuration) via `action_fail`. +The action that executed the tests should fail on test failure. The published results however indicate failure if tests fail or errors occur, +which can be [configured](#configuration) via `fail_on`.* + +## Permissions + +Minimal [workflow job permissions](https://docs.github.com/en/actions/using-jobs/assigning-permissions-to-jobs#example-setting-permissions-for-a-specific-job) +required by this action in **public** GitHub repositories are: + +```yaml +permissions: + checks: write + pull-requests: write +``` + +The following permissions are required in **private** GitHub repos: + +```yaml +permissions: + contents: read + issues: read + checks: write + pull-requests: write +``` + +With `comment_mode: off`, the `pull-requests: write` permission is not needed. + +## Generating test result files + +Supported test result files can be generated by many test environments. Here is a small overview, by far not complete. +Check your favorite development and test environment for its JSON, TRX file or JUnit, NUnit, XUnit XML file support. + +|Test Environment |Language| JUnit
XML | NUnit
XML | XUnit
XML | TRX
file | JSON
file | +|-----------------|:------:|:---------:|:---------:|:---------:|:---:|:---:| +|[Dart](https://github.com/dart-lang/test/blob/master/pkgs/test/doc/json_reporter.md)|Dart, Flutter| | | | | :heavy_check_mark: | +|[Jest](https://jestjs.io/docs/configuration#default-reporter)|JavaScript|:heavy_check_mark:| | | | | +|[Maven](https://maven.apache.org/surefire/maven-surefire-plugin/examples/junit.html)|Java, Scala, Kotlin|:heavy_check_mark:| | | | | +|[Mocha](https://mochajs.org/#xunit)|JavaScript|:heavy_check_mark:| |[not xunit](https://github.com/mochajs/mocha/issues/4758)| | :heavy_check_mark: | +|[MStest / dotnet](https://github.com/Microsoft/vstest-docs/blob/main/docs/report.md#syntax-of-default-loggers)|.Net|[:heavy_check_mark:](https://github.com/spekt/junit.testlogger#usage)|[:heavy_check_mark:](https://github.com/spekt/nunit.testlogger#usage)|[:heavy_check_mark:](https://github.com/spekt/xunit.testlogger#usage)|[:heavy_check_mark:](https://github.com/Microsoft/vstest-docs/blob/main/docs/report.md#syntax-of-default-loggers)| | +|[pytest](https://docs.pytest.org/en/latest/how-to/output.html#creating-junitxml-format-files)|Python|:heavy_check_mark:| | | | | +|[sbt](https://www.scala-sbt.org/release/docs/Testing.html#Test+Reports)|Scala|:heavy_check_mark:| | | | | +|Your favorite
environment|Your favorite
language|probably
:heavy_check_mark:| | | | | + +## What is new in version 2 + +
+These changes have to be considered when moving from version 1 to version 2: + +### Default value for `check_name` changed +Unless `check_name` is set in your config, the check name used to publish test results changes from `"Unit Test Results"` to `"Test Results"`. + +**Impact:** +The check with the old name will not be updated once moved to version 2. + +**Workaround to get version 1 behaviour:** +Add `check_name: "Unit Test Results"` to your config. + +### Default value for `comment_title` changed +Unless `comment_title` or `check_name` are set in your config, the title used to comment on open pull requests changes from `"Unit Test Results"` to `"Test Results"`. + +**Impact:** +Existing comments with the old title will not be updated once moved to version 2, but a new comment is created. + +**Workaround to get version 1 behaviour:** +See workaround for `check_name`. + +### Modes `create new` and `update last` removed for option `comment_mode` +The action always updates an earlier pull request comment, which is the exact behaviour of mode `update last`. +The [configuration](#configuration) options `create new` and `update last` are therefore removed. + +**Impact:** +An existing pull request comment is always updated. + +**Workaround to get version 1 behaviour:** +Not supported. + +### Option `hiding_comments` removed +The action always updates an earlier pull request comment, so hiding comments is not required anymore. + +### Option `comment_on_pr` removed +Option `comment_on_pr` has been removed. + +**Workaround to get version 1 behaviour:** +Set `comment_mode` to `always` (the default) or `off`. + +
+ + +## Publishing test results + +Test results are published on GitHub at various ([configurable](#configuration)) places: + +- as [a comment](#pull-request-comment) in related pull requests +- as [a check](#commit-and-pull-request-checks) in the checks section of a commit and related pull requests +- as [annotations](#commit-and-pull-request-annotations) in the checks section and changed files section of a commit and related pull requests +- as [a job summary](#github-actions-job-summary) of the GitHub Actions workflow +- as [a check summary](#github-actions-check-summary-of-a-commit) in the GitHub Actions section of the commit + +### Pull request comment + +A comment is posted on pull requests related to the commit. + +![pull request comment example](misc/github-pull-request-comment.png) + +In presence of failures or errors, the comment links to the respective [check summary](#github-actions-check-summary-of-a-commit) with failure details. + +Subsequent runs of the action will update this comment. You can access earlier results in the comment edit history: + +![pull request comment history example](misc/github-pull-request-comment-update-history.png) + +The result distinguishes between tests and runs. In some situations, tests run multiple times, +e.g. in different environments. Displaying the number of runs allows spotting unexpected +changes in the number of runs as well. + +When tests run only a single time, no run information is displayed. Results are then shown differently then: + +![pull request comment example without runs](misc/github-pull-request-comment-without-runs.png) + +The change statistics (e.g. 5 tests ±0) might sometimes hide test removal. +Those are highlighted in pull request comments to easily spot unintended test removal: + +![pull request comment example with test changes](misc/github-pull-request-comment-with-test-changes.png) + +***Note:** This requires `check_run_annotations` to be set to `all tests, skipped tests`.* + +### Commit and pull request checks + +The checks section of a commit and related pull requests list a short summary (here `1 fail, 1 skipped, …`), +and a link to the [check summary](#github-actions-check-summary-of-a-commit) in the GitHub Actions section (here `Details`): + +Commit checks: + +![commit checks example](misc/github-checks-commit.png) + +Pull request checks: + +![pull request checks example](misc/github-pull-request-checks.png) + +### Commit and pull request annotations + +Each failing test produces an annotation with failure details in the checks section of a commit: + +![annotations example check](misc/github-checks-annotation.png) + +and the changed files section of related pull requests: + +![annotations example changed files](misc/github-pull-request-changes-annotation.png) + +***Note:** Annotations for test files are only supported when test file paths in test result files are relative to the repository root. +Use option `test_file_prefix` to add a prefix to, or remove a prefix from these file paths. See [Configuration](#configuration) section for details.* + +***Note:** Only the first failure of a test is shown. If you want to see all failures, set `report_individual_runs: "true"`.* + +### GitHub Actions job summary + +The results are added to the job summary page of the workflow that runs this action: + +![job summary example](misc/github-job-summary-full.png) + +In presence of failures or errors, the job summary links to the respective [check summary](#github-actions-check-summary-of-a-commit) with failure details. + +***Note:** Job summary requires [GitHub Actions runner v2.288.0](https://github.com/actions/runner/releases/tag/v2.288.0) or above.* + +### GitHub Actions check summary of a commit + +Test results are published in the GitHub Actions check summary of the respective commit: + +![checks comment example](misc/github-checks-comment.png) + +## The symbols +[comment]: <> (This heading is linked to from method get_link_and_tooltip_label_md) + +The symbols have the following meaning: + +|Symbol|Meaning| +|:----:|-------| +||A successful test or run| +||A skipped test or run| +||A failed test or run| +||An erroneous test or run| +||The duration of all tests or runs| + +***Note:*** For simplicity, "disabled" tests count towards "skipped" tests. + +## Configuration + +Files can be selected via the `files` option. It supports [glob wildcards](https://docs.python.org/3/library/glob.html#glob.glob) +like `*`, `**`, `?`, and `[]` character ranges. The `**` wildcard matches all files and directories recursively: `./`, `./*/`, `./*/*/`, etc. + +You can provide multiple file patterns, one pattern per line. Patterns starting with `!` exclude the matching files. +There have to be at least one pattern starting without a `!`: + +```yaml +with: + files: | + *.xml + !config.xml +``` + +The list of most notable options: + +|Option|Default Value|Description| +|:-----|:-----:|:----------| +|`files`|_no default_|File patterns of test result files. Relative paths are known to work best, while the composite action [also works with absolute paths](#running-with-absolute-paths). Supports `*`, `**`, `?`, and `[]` character ranges. Use multiline string for multiple patterns. Patterns starting with `!` exclude the matching files. There have to be at least one pattern starting without a `!`.| +|`check_name`|`"Test Results"`|An alternative name for the check result. Required to be unique for each instance in one workflow.| +|`comment_title`|same as `check_name`|An alternative name for the pull request comment.| +|`comment_mode`|`always`|The action posts comments to pull requests that are associated with the commit. Set to:
`always` - always comment
`changes` - comment when changes w.r.t. the target branch exist
`changes in failures` - when changes in the number of failures and errors exist
`changes in errors` - when changes in the number of (only) errors exist
`failures` - when failures or errors exist
`errors` - when (only) errors exist
`off` - to not create pull request comments.| +|`large_files`|`false` unless
`ignore_runs` is `true`|Support for large files is enabled when set to `true`. Defaults to `false`, unless ignore_runs is `true`.| +|`ignore_runs`|`false`|Does not collect test run information from the test result files, which is useful for very large files. This disables any check run annotations.| + +
+Options related to Git and GitHub + +|Option|Default Value|Description| +|:-----|:-----:|:----------| +|`commit`|`${{env.GITHUB_SHA}}`|An alternative commit SHA to which test results are published. The `push` and `pull_request`events are handled, but for other [workflow events](https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows#push) `GITHUB_SHA` may refer to different kinds of commits. See [GitHub Workflow documentation](https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows) for details.| +|`github_token`|`${{github.token}}`|An alternative GitHub token, other than the default provided by GitHub Actions runner.| +|`github_token_actor`|`github-actions`|The name of the GitHub app that owns the GitHub API Access Token (see github_token). Used to identify pull request comments created by this action during earlier runs. Has to be set when `github_token` is set to a GitHub app installation token (other than GitHub actions). Otherwise, existing comments will not be updated, but new comments created. Note: this does not change the bot name of the pull request comments.| +|`github_retries`|`10`|Requests to the GitHub API are retried this number of times. The value must be a positive integer or zero.| +|`seconds_between_github_reads`|`0.25`|Sets the number of seconds the action waits between concurrent read requests to the GitHub API.| +|`seconds_between_github_writes`|`2.0`|Sets the number of seconds the action waits between concurrent write requests to the GitHub API.| +|`secondary_rate_limit_wait_seconds`|`60.0`|Sets the number of seconds to wait before retrying secondary rate limit errors. If not set, the default defined in the PyGithub library is used (currently 60 seconds).| +|`pull_request_build`|`"merge"`|As part of pull requests, GitHub builds a merge commit, which combines the commit and the target branch. If tests ran on the actual pushed commit, then set this to `"commit"`.| +|`event_file`|`${{env.GITHUB_EVENT_PATH}}`|An alternative event file to use. Useful to replace a `workflow_run` event file with the actual source event file.| +|`event_name`|`${{env.GITHUB_EVENT_NAME}}`|An alternative event name to use. Useful to replace a `workflow_run` event name with the actual source event name: `${{ github.event.workflow_run.event }}`.| +|`search_pull_requests`|`false`|Prior to v2.6.0, the action used the `/search/issues` REST API to find pull requests related to a commit. If you need to restore that behaviour, set this to "true". Defaults to `false`.| +
+ +
+Options related to reporting test results + +|Option|Default Value|Description| +|:-----|:-----:|:----------| +|`time_unit`|`seconds`|Time values in the test result files have this unit. Supports `seconds` and `milliseconds`.| +|`test_file_prefix`|`none`|Paths in the test result files should be relative to the git repository for annotations to work best. This prefix is added to (if starting with "+"), or remove from (if starting with "-") test file paths. Examples: "+src/" or "-/opt/actions-runner".| +|`job_summary`|`true`|Set to `true`, the results are published as part of the [job summary page](https://github.blog/2022-05-09-supercharging-github-actions-with-job-summaries/) of the workflow run.| +|`compare_to_earlier_commit`|`true`|Test results are compared to results of earlier commits to show changes:
`false` - disable comparison, `true` - compare across commits.'| +|`test_changes_limit`|`10`|Limits the number of removed or skipped tests reported on pull request comments. This report can be disabled with a value of `0`.| +|`report_individual_runs`|`false`|Individual runs of the same test may see different failures. Reports all individual failures when set `true`, and the first failure only otherwise.| +|`report_suite_logs`|`none`|In addition to reporting regular test logs, also report test suite logs. These are logs provided on suite level, not individual test level. Set to `info` for normal output, `error` for error output, `any` for both, or `none` for no suite logs at all. Defaults to `none`.| +|`deduplicate_classes_by_file_name`|`false`|De-duplicates classes with same name by their file name when set `true`, combines test results for those classes otherwise.| +|`check_run_annotations`|`all tests, skipped tests`|Adds additional information to the check run. This is a comma-separated list of any of the following values:
`all tests` - list all found tests,
`skipped tests` - list all skipped tests
Set to `none` to add no extra annotations at all.| +|`check_run_annotations_branch`|`event.repository.default_branch` or `"main, master"`|Adds check run annotations only on given branches. If not given, this defaults to the default branch of your repository, e.g. `main` or `master`. Comma separated list of branch names allowed, asterisk `"*"` matches all branches. Example: `main, master, branch_one`.| +|`json_file`|no file|Results are written to this JSON file.| +|`json_thousands_separator`|`" "`|Formatted numbers in JSON use this character to separate groups of thousands. Common values are "," or ".". Defaults to punctuation space (\u2008).| +|`json_suite_details`|`false`|Write out all suite details to the JSON file. Setting this to `true` can greatly increase the size of the output. Defaults to `false`.| +|`json_test_case_results`|`false`|Write out all individual test case results to the JSON file. Setting this to `true` can greatly increase the size of the output. Defaults to `false`.| +|`fail_on`|`"test failures"`|Configures the state of the created test result check run. With `"test failures"` it fails if any test fails or test errors occur. It never fails when set to `"nothing"`, and fails only on errors when set to `"errors"`.| +|`action_fail`|`false`|When set `true`, the action itself fails when tests have failed (see `fail_on`).| +|`action_fail_on_inconclusive`|`false`|When set `true`, the action itself fails when tests are inconclusive (no test results).| + +Pull request comments highlight removal of tests or tests that the pull request moves into skip state. +Those removed or skipped tests are added as a list, which is limited in length by `test_changes_limit`, +which defaults to `10`. Reporting these tests can be disabled entirely by setting this limit to `0`. +This feature requires `check_run_annotations` to contain `all tests` in order to detect test addition +and removal, and `skipped tests` to detect new skipped and un-skipped tests, as well as +`check_run_annotations_branch` to contain your default branch. +
+ +## JSON result + +The gathered test information are accessible as JSON via [GitHub Actions steps outputs](https://docs.github.com/en/actions/learn-github-actions/contexts#steps-context) string or JSON file. + +
+Access JSON via step outputs + +The `json` output of the action can be accessed through the expression `steps..outputs.json`. + +```yaml +- name: Publish Test Results + uses: step-security/publish-unit-test-result-action@v1 + id: test-results + if: always() + with: + files: "test-results/**/*.xml" + +- name: Conclusion + run: echo "Conclusion is ${{ fromJSON( steps.test-results.outputs.json ).conclusion }}" +``` + +Here is an example JSON: +```json +{ + "title": "4 parse errors, 4 errors, 23 fail, 18 skipped, 227 pass in 39m 12s", + "summary": "  24 files  ±0      4 errors  21 suites  ±0   39m 12s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action \"duration of all tests\") ±0s\n272 tests ±0  227 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action \"passed tests\") ±0  18 [:zzz:](https://github.com/step-security/publish-unit-test-result-action \"skipped / disabled tests\") ±0  23 [:x:](https://github.com/step-security/publish-unit-test-result-action \"failed tests\") ±0  4 [:fire:](https://github.com/step-security/publish-unit-test-result-action \"test errors\") ±0 \n437 runs  ±0  354 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action \"passed tests\") ±0  53 [:zzz:](https://github.com/step-security/publish-unit-test-result-action \"skipped / disabled tests\") ±0  25 [:x:](https://github.com/step-security/publish-unit-test-result-action \"failed tests\") ±0  5 [:fire:](https://github.com/step-security/publish-unit-test-result-action \"test errors\") ±0 \n\nResults for commit 11c02e56. ± Comparison against earlier commit d8ce4b6c.\n", + "conclusion": "success", + "stats": { + "files": 24, + "errors": 4, + "suites": 21, + "duration": 2352, + "tests": 272, + "tests_succ": 227, + "tests_skip": 18, + "tests_fail": 23, + "tests_error": 4, + "runs": 437, + "runs_succ": 354, + "runs_skip": 53, + "runs_fail": 25, + "runs_error": 5, + "commit": "11c02e561e0eb51ee90f1c744c0ca7f306f1f5f9" + }, + "stats_with_delta": { + "files": { + "number": 24, + "delta": 0 + }, + …, + "commit": "11c02e561e0eb51ee90f1c744c0ca7f306f1f5f9", + "reference_type": "earlier", + "reference_commit": "d8ce4b6c62ebfafe1890c55bf7ea30058ebf77f2" + }, + "formatted": { + "stats": { + "duration": "2 352", + … + }, + "stats_with_delta": { + "duration": { + "number": "2 352", + "delta": "+12" + }, + … + } + }, + "annotations": 31 +} +``` + +The `formatted` key provides a copy of `stats` and `stats_with_delta`, where numbers are formatted to strings. +For example, `"duration": 2352` is formatted as `"duration": "2 352"`. The thousands separator can be configured +via `json_thousands_separator`. Formatted numbers are especially useful when those values are used where formatting +is not easily available, e.g. when [creating a badge from test results](#create-a-badge-from-test-results). + +
+ +
+Access JSON via file + +The optional `json_file` allows to [configure](#configuration) a file where extended JSON information are to be written. +Compared to `"Access JSON via step outputs"` above, `errors` and `annotations` contain more information +than just the number of errors and annotations, respectively. + +Additionally, `json_test_case_results` can be enabled to add the `cases` field to the JSON file, which provides +all test results of all tests. Enabling this may greatly increase the output size of the JSON file. + +```json +{ + …, + "stats": { + …, + "errors": [ + { + "file": "test-files/empty.xml", + "message": "File is empty.", + "line": null, + "column": null + } + ], + … + }, + …, + "annotations": [ + { + "path": "test/test.py", + "start_line": 819, + "end_line": 819, + "annotation_level": "warning", + "message": "test-files/junit.fail.xml", + "title": "1 out of 3 runs failed: test_events (test.Tests)", + "raw_details": "self = \n\n def test_events(self):\n > self.do_test_events(3)\n\n test.py:821:\n _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\n test.py:836: in do_test_events\n self.do_test_rsh(command, 143, events=events)\n test.py:852: in do_test_rsh\n self.assertEqual(expected_result, res)\n E AssertionError: 143 != 0\n " + } + ], + …, + "cases": [ + { + "class_name": "test.test_spark_keras.SparkKerasTests", + "test_name": "test_batch_generator_fn", + "states": { + "success": [ + { + "result_file": "test-files/junit-xml/pytest/junit.spark.integration.1.xml", + "test_file": "test/test_spark_keras.py", + "line": 454, + "class_name": "test.test_spark_keras.SparkKerasTests", + "test_name": "test_batch_generator_fn", + "result": "success", + "time": 0.006 + }, + { + "result_file": "test-files/junit-xml/pytest/junit.spark.integration.2.xml", + "test_file": "test/test_spark_keras.py", + "line": 454, + "class_name": "test.test_spark_keras.SparkKerasTests", + "test_name": "test_batch_generator_fn", + "result": "success", + "time": 0.006 + } + ] + } + }, + … + ], + … +} +``` + +
+ +See [Create a badge from test results](#create-a-badge-from-test-results) for an example on how to create a badge from this JSON. + +## Use with matrix strategy + +In a scenario where your tests run multiple times in different environments (e.g. a [strategy matrix](https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstrategymatrix)), +the action should run only once over all test results. For this, put the action into a separate job +that depends on all your test environments. Those need to upload the test results as artifacts, which +are then all downloaded by your publish job. + +
+Example workflow YAML + +```yaml +name: CI + +on: [push] +permissions: {} + +jobs: + build-and-test: + name: Build and Test (Python ${{ matrix.python-version }}) + runs-on: ubuntu-latest + + strategy: + fail-fast: false + matrix: + python-version: [3.6, 3.7, 3.8] + + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Setup Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: PyTest + run: python -m pytest test --junit-xml pytest.xml + + - name: Upload Test Results + if: always() + uses: actions/upload-artifact@v3 + with: + name: Test Results (Python ${{ matrix.python-version }}) + path: pytest.xml + + publish-test-results: + name: "Publish Tests Results" + needs: build-and-test + runs-on: ubuntu-latest + permissions: + checks: write + + # only needed unless run with comment_mode: off + pull-requests: write + + # only needed for private repository + contents: read + + # only needed for private repository + issues: read + if: always() + + steps: + - name: Download Artifacts + uses: actions/download-artifact@v3 + with: + path: artifacts + + - name: Publish Test Results + uses: step-security/publish-unit-test-result-action@v1 + with: + files: "artifacts/**/*.xml" +``` +
+ +Please consider to [support fork repositories and dependabot branches](#support-fork-repositories-and-dependabot-branches) +together with your matrix strategy. + +## Support fork repositories and dependabot branches +[comment]: <> (This heading is linked to from main method in publish_unit_test_results.py) + +Getting test results of pull requests created by contributors from fork repositories or by +[Dependabot](https://docs.github.com/en/github/administering-a-repository/keeping-your-dependencies-updated-automatically) +requires some additional setup. Without this, the action will fail with the +`"Resource not accessible by integration"` error for those situations. + +In this setup, your CI workflow does not need to publish test results anymore as they are **always** published from a separate workflow. + +1. Your CI workflow has to upload the GitHub event file and test result files. +2. Set up an additional workflow on `workflow_run` events, which starts on completion of the CI workflow, + downloads the event file and the test result files, and runs this action on them. + This workflow publishes the test results for pull requests from fork repositories and dependabot, + as well as all "ordinary" runs of your CI workflow. + +
+Step-by-step instructions + +1. Add the following job to your CI workflow to upload the event file as an artifact: + +```yaml +event_file: + name: "Event File" + runs-on: ubuntu-latest + steps: + - name: Upload + uses: actions/upload-artifact@v3 + with: + name: Event File + path: ${{ github.event_path }} +``` + +2. Add the following action step to your CI workflow to upload test results as artifacts. +Adjust the value of `path` to fit your setup: + +```yaml +- name: Upload Test Results + if: always() + uses: actions/upload-artifact@v3 + with: + name: Test Results + path: | + test-results/*.xml +``` + +3. If you run tests in a [strategy matrix](https://docs.github.com/en/actions/reference/workflow-syntax-for-github-actions#jobsjob_idstrategymatrix), +make the artifact name unique for each job, e.g.: +```yaml + with: + name: Test Results (${{ matrix.python-version }}) + path: … +``` + +4. Add the following workflow that publishes test results. It downloads and extracts +all artifacts into `artifacts/ARTIFACT_NAME/`, where `ARTIFACT_NAME` will be `Upload Test Results` +when setup as above, or `Upload Test Results (…)` when run in a strategy matrix. + + It then runs the action on files matching `artifacts/**/*.xml`. +Change the `files` pattern with the path to your test artifacts if it does not work for you. +The publish action uses the event file of the CI workflow. + + Also adjust the value of `workflows` (here `"CI"`) to fit your setup: + +```yaml +name: Test Results + +on: + workflow_run: + workflows: ["CI"] + types: + - completed +permissions: {} + +jobs: + test-results: + name: Test Results + runs-on: ubuntu-latest + if: github.event.workflow_run.conclusion != 'skipped' + + permissions: + checks: write + + # needed unless run with comment_mode: off + pull-requests: write + + # only needed for private repository + contents: read + + # only needed for private repository + issues: read + + # required by download step to access artifacts API + actions: read + + steps: + - name: Download and Extract Artifacts + uses: dawidd6/action-download-artifact@246dbf436b23d7c49e21a7ab8204ca9ecd1fe615 + with: + run_id: ${{ github.event.workflow_run.id }} + path: artifacts + + - name: Publish Test Results + uses: step-security/publish-unit-test-result-action@v1 + with: + commit: ${{ github.event.workflow_run.head_sha }} + event_file: artifacts/Event File/event.json + event_name: ${{ github.event.workflow_run.event }} + files: "artifacts/**/*.xml" +``` + +Note: Running this action on `pull_request_target` events is [dangerous if combined with code checkout and code execution](https://securitylab.github.com/research/github-actions-preventing-pwn-requests). +This event is therefore not use here intentionally! +
+ +## Running with multiple event types (pull_request, push, schedule, …) + +This action comments on a pull request each time it is executed via any event type. +When run for more than one event type, runs will overwrite earlier pull request comments. + +Note that `pull_request` events may produce different test results than any other event type. +The `pull_request` event runs the workflow on a merge commit, i.e. the commit merged into the target branch. +All other event types run on the commit itself. + +If you want to distinguish between test results from `pull_request` and `push`, or want to distinguish the original test results +of the `push` to master from subsequent `schedule` events, you may want to add the following to your workflow. + +
+There are two possible ways to avoid the publish action to overwrite results from other event types: + +### Test results per event type + +Add the event name to `check_name` to avoid different event types overwriting each other's results: + +```yaml +- name: Publish Test Results + uses: step-security/publish-unit-test-result-action@v1 + if: always() + with: + check_name: "Test Results (${{ github.event.workflow_run.event || github.event_name }})" + files: "test-results/**/*.xml" +``` + +### Pull request comments only for pull_request events + +Disabling the pull request comment mode (`"off"`) for events other than `pull_request` avoids that any other event type overwrites pull request comments: + +```yaml +- name: Publish Test Results + uses: step-security/publish-unit-test-result-action@v1 + if: always() + with: + # set comment_mode to "always" for pull_request event, set to "off" for all other event types + comment_mode: ${{ (github.event.workflow_run.event == 'pull_request' || github.event_name == 'pull_request') && 'always' || 'off' }} + files: "test-results/**/*.xml" +``` +
+ +## Create a badge from test results + +
+Example workflow YAML + +```yaml +steps: +- … +- name: Publish Test Results + uses: step-security/publish-unit-test-result-action@v1 + id: test-results + if: always() + with: + files: "test-results/**/*.xml" + +- name: Set badge color + shell: bash + run: | + case ${{ fromJSON( steps.test-results.outputs.json ).conclusion }} in + success) + echo "BADGE_COLOR=31c653" >> $GITHUB_ENV + ;; + failure) + echo "BADGE_COLOR=800000" >> $GITHUB_ENV + ;; + neutral) + echo "BADGE_COLOR=696969" >> $GITHUB_ENV + ;; + esac + +- name: Create badge + uses: emibcn/badge-action@d6f51ff11b5c3382b3b88689ae2d6db22d9737d1 + with: + label: Tests + status: '${{ fromJSON( steps.test-results.outputs.json ).formatted.stats.tests }} tests, ${{ fromJSON( steps.test-results.outputs.json ).formatted.stats.runs }} runs: ${{ fromJSON( steps.test-results.outputs.json ).conclusion }}' + color: ${{ env.BADGE_COLOR }} + path: badge.svg + +- name: Upload badge to Gist + # Upload only for master branch + if: > + github.event_name == 'workflow_run' && github.event.workflow_run.head_branch == 'master' || + github.event_name != 'workflow_run' && github.ref == 'refs/heads/master' + uses: andymckay/append-gist-action@1fbfbbce708a39bd45846f0955ed5521f2099c6d + with: + token: ${{ secrets.GIST_TOKEN }} + gistURL: https://gist.githubusercontent.com/{user}/{id} + file: badge.svg +``` + +You have to create a personal access toke (PAT) with `gist` permission only. Add it to your GitHub Actions secrets, in above example with secret name `GIST_TOKEN`. + +Set the `gistURL` to the Gist that you want to write the badge file to, in the form of `https://gist.githubusercontent.com/{user}/{id}`. + +You can then use the badge via this URL: https://gist.githubusercontent.com/{user}/{id}/raw/badge.svg +
+ +## Running with absolute paths + +It is known that this action works best with relative paths (e.g. `test-results/**/*.xml`), +but most absolute paths (e.g. `/tmp/test-results/**/*.xml`) require to use the composite variant +of this action (`uses: step-security/publish-unit-test-result-action/composite@v1`). + +If you have to use absolute paths with the non-composite variant of this action (`uses: step-security/publish-unit-test-result-action@v1`), +you have to copy files to a relative path first, and then use the relative path: + +```yaml +- name: Copy Test Results + if: always() + run: | + cp -Lpr /tmp/test-results test-results + shell: bash + +- name: Publish Test Results + uses: step-security/publish-unit-test-result-action@v1 + if: always() + with: + files: | + test-results/**/*.xml + test-results/**/*.trx + test-results/**/*.json +``` + +Using the non-composite variant of this action is recommended as it starts up much quicker. + +## Running as a composite action + +Running this action as a composite action allows to run it on various operating systems as it +does not require Docker. The composite action, however, requires a Python3 environment to be setup +on the action runner. All GitHub-hosted runners (Ubuntu, Windows Server and macOS) provide a suitable +Python3 environment out-of-the-box. + +Self-hosted runners may require setting up a Python environment first: + +```yaml +- name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: 3.8 +``` + +Self-hosted runners for Windows require Bash shell to be installed. Easiest way to have one is by installing +Git for Windows, which comes with Git BASH. Make sure that the location of `bash.exe` is part of the `PATH` +environment variable seen by the self-hosted runner. diff --git a/action.yml b/action.yml new file mode 100644 index 0000000..181f02e --- /dev/null +++ b/action.yml @@ -0,0 +1,150 @@ +name: 'Publish Test Results' +description: 'Publishes JUnit, NUnit, XUnit, TRX, JSON test results on GitHub for .NET, Dart, Java, JS, Jest, Mocha, Python, Scala, …' + +inputs: + github_token: + description: 'GitHub API Access Token.' + default: ${{ github.token }} + required: false + github_token_actor: + description: 'The name of the GitHub app that owns the GitHub API Access Token (see github_token). Used to identify pull request comments created by this action during earlier runs. Has to be set when `github_token` is set to a GitHub app installation token (other than GitHub actions). Otherwise, existing comments will not be updated, but new comments created. Note: this does not change the bot name of the pull request comments. Defaults to "github-actions".' + default: 'github-actions' + required: false + github_retries: + description: 'Requests to the GitHub API are retried this number of times. The value must be a positive integer or zero.' + default: '10' + required: false + commit: + description: 'Commit SHA to which test results are published. Only needed if the value of GITHUB_SHA does not work for you.' + required: false + check_name: + description: 'Name of the created check run.' + default: 'Test Results' + required: false + comment_title: + description: 'An alternative title for the pull request comment. Defaults to value of check_name input.' + required: false + comment_mode: + description: 'The action posts comments to pull requests that are associated with the commit. Set to "always" - always comment, "changes" - comment when changes w.r.t. the target branch exist, "changes in failures" - when changes in the number of failures and errors exist, "changes in errors" - when changes in the number of (only) errors exist, "failures" - when failures or errors exist, "errors" - when (only) errors exist, "off" - to not create pull request comments.' + default: 'always' + required: false + fail_on: + description: 'The created test result check run has failure state if any test fails or test errors occur. Never fails when set to "nothing", fails only on errors when set to "errors". Default is "test failures".' + default: 'test failures' + required: false + action_fail: + description: 'When set "true", the action itself fails when tests have failed (see option fail_on).' + default: 'false' + required: false + action_fail_on_inconclusive: + description: 'When set "true", the action itself fails when tests are inconclusive (no test results).' + default: 'false' + required: false + files: + description: 'File patterns of test result files. Relative paths are known to work best, while the composite action also works with absolute paths. Supports "*", "**", "?", and "[]" character ranges. Use multiline string for multiple patterns. Patterns starting with "!" exclude the matching files. There have to be at least one pattern starting without a "!".' + required: false + junit_files: + description: 'Deprecated, use "files" option instead.' + required: false + nunit_files: + description: 'Deprecated, use "files" option instead.' + required: false + xunit_files: + description: 'Deprecated, use "files" option instead.' + required: false + trx_files: + description: 'Deprecated, use "files" option instead.' + required: false + time_unit: + description: 'Time values in the test result files have this unit. Supports "seconds" and "milliseconds".' + default: 'seconds' + required: false + test_file_prefix: + description: 'Paths in the test result files should be relative to the git repository for annotations to work best. This prefix is added to (if starting with "+"), or remove from (if starting with "-") test file paths. Examples: "+src/" or "-/opt/actions-runner".' + required: false + report_individual_runs: + description: 'Individual runs of the same test may see different failures. Reports all individual failures when set "true" or the first only otherwise.' + required: false + report_suite_logs: + description: 'In addition to reporting regular test logs, also report test suite logs. These are logs provided on suite level, not individual test level. Set to "info" for normal output, "error" for error output, "any" for both, or "none" for no suite logs at all. Defaults to "none".' + default: 'none' + required: false + deduplicate_classes_by_file_name: + description: 'De-duplicates classes with same name by their file name when set "true", combines test results for those classes otherwise.' + required: false + large_files: + description: 'Support for large files is enabled when set to "true". Defaults to "false", unless ignore_runs is "true".' + required: false + ignore_runs: + description: 'Does not collect test run information from the test result files, which is useful for very large files. This disables any check run annotations.' + default: 'false' + required: false + job_summary: + description: 'Set to "true", the results are published as part of the job summary page of the workflow run.' + default: 'true' + required: false + compare_to_earlier_commit: + description: 'Test results are compared to results of earlier commits to highlight changes: "false" - disable comparison, "true" - compare across commits.' + default: 'true' + required: false + pull_request_build: + description: 'As part of pull requests, GitHub builds a merge commit, which combines the commit and the target branch. If tests ran on the actual pushed commit, then set this to "commit". Defaults to "merge".' + default: 'merge' + required: false + event_file: + description: 'An alternative event file to use. Useful to replace a "workflow_run" event file with the actual source event file.' + required: false + event_name: + description: 'An alternative event name to use. Useful to replace a "workflow_run" event name with the actual source event name: github.event.workflow_run.event.' + required: false + test_changes_limit: + description: 'Limits the number of removed or skipped tests reported on pull request comments. This report can be disabled with a value of 0. The default is 10.' + required: false + check_run_annotations: + description: 'Adds additional information to the check run. This is a comma-separated list of any of the following values: "all tests" - list all found tests, "skipped tests" - list all skipped tests. Set to "none" to add no extra annotations at all.' + default: 'all tests, skipped tests' + required: false + check_run_annotations_branch: + description: 'Adds check run annotations only on given branches. Comma-separated list of branch names allowed, asterisk "*" matches all branches. Defaults to event.repository.default_branch or "main, master".' + required: false + seconds_between_github_reads: + description: 'Sets the number of seconds the action waits between concurrent read requests to the GitHub API. This throttles the API usage to avoid abuse rate limits: https://docs.github.com/en/rest/overview/resources-in-the-rest-api#abuse-rate-limits.' + default: '0.25' + required: false + seconds_between_github_writes: + description: 'Sets the number of seconds the action waits between concurrent write requests to the GitHub API. This throttles the API usage to avoid abuse rate limits: https://docs.github.com/en/rest/overview/resources-in-the-rest-api#abuse-rate-limits.' + default: '2.0' + required: false + secondary_rate_limit_wait_seconds: + description: 'Sets the number of seconds to wait before retrying secondary rate limit errors. If not set, the default defined in the PyGithub library is used (currently 60 seconds).' + required: false + json_file: + description: 'Results are written to this JSON file.' + required: false + json_thousands_separator: + description: 'Formatted numbers in JSON use this character to separate groups of thousands. Common values are "," or ".". Defaults to punctuation space (\u2008).' + default: ' ' + required: false + json_suite_details: + description: 'Write out all suite details to the JSON file. Setting this to "true" can greatly increase the size of the output. Defaults to "false".' + default: 'false' + required: false + json_test_case_results: + description: 'Write out all individual test case results to the JSON file. Setting this to "true" can greatly increase the size of the output. Defaults to "false".' + default: 'false' + required: false + search_pull_requests: + description: 'Prior to v2.6.0, the action used the "/search/issues" REST API to find pull requests related to a commit. If you need to restore that behaviour, set this to "true". Defaults to "false".' + default: 'false' + required: false +outputs: + json: + description: "Test results as JSON" + +runs: + using: 'docker' + image: 'docker://ghcr.io/step-security.io/publish-unit-test-result-action:v1.0.0' + +branding: + icon: 'check-square' + color: 'green' \ No newline at end of file diff --git a/composite/action.yml b/composite/action.yml new file mode 100644 index 0000000..3164bec --- /dev/null +++ b/composite/action.yml @@ -0,0 +1,313 @@ +name: 'Publish Test Results' +description: 'Publishes JUnit, NUnit, XUnit, TRX, JSON test results on GitHub for .NET, Dart, Java, JS, Jest, Mocha, Python, Scala, …' + +inputs: + github_token: + description: 'GitHub API Access Token.' + default: ${{ github.token }} + required: false + github_token_actor: + description: 'The name of the GitHub app that owns the GitHub API Access Token (see github_token). Used to identify pull request comments created by this action during earlier runs. Has to be set when `github_token` is set to a GitHub app installation token (other than GitHub actions). Otherwise, existing comments will not be updated, but new comments created. Note: this does not change the bot name of the pull request comments. Defaults to "github-actions".' + default: 'github-actions' + required: false + github_retries: + description: 'Requests to the GitHub API are retried this number of times. The value must be a positive integer or zero.' + default: '10' + required: false + commit: + description: 'Commit SHA to which test results are published. Only needed if the value of GITHUB_SHA does not work for you.' + required: false + check_name: + description: 'Name of the created check run.' + default: 'Test Results' + required: false + comment_title: + description: 'An alternative title for the pull request comment. Defaults to value of check_name input.' + required: false + comment_mode: + description: 'The action posts comments to pull requests that are associated with the commit. Set to "always" - always comment, "changes" - comment when changes w.r.t. the target branch exist, "changes in failures" - when changes in the number of failures and errors exist, "changes in errors" - when changes in the number of (only) errors exist, "failures" - when failures or errors exist, "errors" - when (only) errors exist, "off" - to not create pull request comments.' + default: 'always' + required: false + fail_on: + description: 'The created test result check run has failure state if any test fails or test errors occur. Never fails when set to "nothing", fails only on errors when set to "errors". Default is "test failures".' + default: 'test failures' + required: false + action_fail: + description: 'When set "true", the action itself fails when tests have failed (see option fail_on).' + default: 'false' + required: false + action_fail_on_inconclusive: + description: 'When set "true", the action itself fails when tests are inconclusive (no test results).' + default: 'false' + required: false + files: + description: 'File patterns of test result files. Relative paths are known to work best, while the composite action also works with absolute paths. Supports "*", "**", "?", and "[]" character ranges. Use multiline string for multiple patterns. Patterns starting with "!" exclude the matching files. There have to be at least one pattern starting without a "!".' + required: false + junit_files: + description: 'Deprecated, use "files" option instead.' + required: false + nunit_files: + description: 'Deprecated, use "files" option instead.' + required: false + xunit_files: + description: 'Deprecated, use "files" option instead.' + required: false + trx_files: + description: 'Deprecated, use "files" option instead.' + required: false + time_unit: + description: 'Time values in the test result files have this unit. Supports "seconds" and "milliseconds".' + default: 'seconds' + required: false + test_file_prefix: + description: 'Paths in the test result files should be relative to the git repository for annotations to work best. This prefix is added to (if starting with "+"), or remove from (if starting with "-") test file paths. Examples: "+src/" or "-/opt/actions-runner".' + required: false + report_individual_runs: + description: 'Individual runs of the same test may see different failures. Reports all individual failures when set "true" or the first only otherwise.' + required: false + report_suite_logs: + description: 'In addition to reporting regular test logs, also report test suite logs. These are logs provided on suite level, not individual test level. Set to "info" for normal output, "error" for error output, "any" for both, or "none" for no suite logs at all. Defaults to "none".' + default: 'none' + required: false + deduplicate_classes_by_file_name: + description: 'De-duplicates classes with same name by their file name when set "true", combines test results for those classes otherwise.' + required: false + large_files: + description: 'Support for large files is enabled when set to "true". Defaults to "false", unless ignore_runs is "true".' + required: false + ignore_runs: + description: 'Does not collect test run information from the test result files, which is useful for very large files. This disables any check run annotations.' + default: 'false' + required: false + job_summary: + description: 'Set to "true", the results are published as part of the job summary page of the workflow run.' + default: 'true' + required: false + compare_to_earlier_commit: + description: 'Test results are compared to results of earlier commits to highlight changes: "false" - disable comparison, "true" - compare across commits.' + default: 'true' + required: false + pull_request_build: + description: 'As part of pull requests, GitHub builds a merge commit, which combines the commit and the target branch. If tests ran on the actual pushed commit, then set this to "commit". Defaults to "merge".' + default: 'merge' + required: false + event_file: + description: 'An alternative event file to use. Useful to replace a "workflow_run" event file with the actual source event file.' + required: false + event_name: + description: 'An alternative event name to use. Useful to replace a "workflow_run" event name with the actual source event name: github.event.workflow_run.event.' + required: false + test_changes_limit: + description: 'Limits the number of removed or skipped tests reported on pull request comments. This report can be disabled with a value of 0. The default is 10.' + required: false + check_run_annotations: + description: 'Adds additional information to the check run. This is a comma-separated list of any of the following values: "all tests" - list all found tests, "skipped tests" - list all skipped tests. Set to "none" to add no extra annotations at all.' + default: 'all tests, skipped tests' + required: false + check_run_annotations_branch: + description: 'Adds check run annotations only on given branches. Comma-separated list of branch names allowed, asterisk "*" matches all branches. Defaults to event.repository.default_branch or "main, master".' + required: false + seconds_between_github_reads: + description: 'Sets the number of seconds the action waits between concurrent read requests to the GitHub API. This throttles the API usage to avoid abuse rate limits: https://docs.github.com/en/rest/overview/resources-in-the-rest-api#abuse-rate-limits.' + default: '0.25' + required: false + seconds_between_github_writes: + description: 'Sets the number of seconds the action waits between concurrent write requests to the GitHub API. This throttles the API usage to avoid abuse rate limits: https://docs.github.com/en/rest/overview/resources-in-the-rest-api#abuse-rate-limits.' + default: '2.0' + required: false + secondary_rate_limit_wait_seconds: + description: 'Sets the number of seconds to wait before retrying secondary rate limit errors. If not set, the default defined in the PyGithub library is used (currently 60 seconds).' + required: false + json_file: + description: 'Results are written to this JSON file.' + required: false + json_thousands_separator: + description: 'Formatted numbers in JSON use this character to separate groups of thousands. Common values are "," or ".". Defaults to punctuation space (\u2008).' + default: ' ' + required: false + json_suite_details: + description: 'Write out all suite details to the JSON file. Setting this to "true" can greatly increase the size of the output. Defaults to "false".' + default: 'false' + required: false + json_test_case_results: + description: 'Write out all individual test case results to the JSON file. Setting this to "true" can greatly increase the size of the output. Defaults to "false".' + default: 'false' + required: false + search_pull_requests: + description: 'Prior to v2.6.0, the action used the "/search/issues" REST API to find pull requests related to a commit. If you need to restore that behaviour, set this to "true". Defaults to "false".' + default: 'false' + required: false + +outputs: + json: + description: "Test results as JSON" + value: ${{ steps.test-results.outputs.json }} + +runs: + using: 'composite' + steps: + - name: Check for Python3 + id: python + run: | + echo '##[group]Check for Python3' + # we check version here just to execute `python3` with an argument + # on Windows, there is a `python3.exe` that is a proxy to trigger installation from app store + # command `which python3` finds that, but `python3 -V` does not return the version on stdout + if ! which python3 || [[ $(python3 -V) != *"python 3."* && $(python3 -V) != *"Python 3."* ]] + then + if ! which python || [[ $(python -V) != *"python 3."* && $(python -V) != *"Python 3."* ]] + then + echo "::error::No python3 interpreter found. Please setup python before running this action. You could use https://github.com/actions/setup-python." + exit 1 + fi + + interpreter="$(which python)" + if [[ ! -e "${interpreter}3" ]] + then + mkdir -p "$RUNNER_TEMP/bin/" + ln -s "$interpreter" "$RUNNER_TEMP/bin/python3" + echo "$RUNNER_TEMP/bin" >> $GITHUB_PATH + fi + fi + echo "version=$(python3 -V)" >> $GITHUB_OUTPUT + echo '##[endgroup]' + shell: bash + + - name: Detect OS + id: os + run: | + case "$RUNNER_OS" in + Linux*) + echo "pip-cache=~/.cache/pip" >> $GITHUB_OUTPUT + ;; + macOS*) + echo "pip-cache=~/Library/Caches/pip" >> $GITHUB_OUTPUT + ;; + Windows*) + echo "pip-cache=~\\AppData\\Local\\pip\\Cache" >> $GITHUB_OUTPUT + echo "pip-options=--user" >> $GITHUB_OUTPUT + ;; + esac + shell: bash + + - name: Restore PIP packages cache + uses: actions/cache/restore@v3 + id: cache + continue-on-error: true + with: + path: ${{ steps.os.outputs.pip-cache }} + key: step-security-publish-action-${{ runner.os }}-${{ runner.arch }}-pip-${{ steps.python.outputs.version }}-df386fe4e04a72c96e140f0566a5c849 + + - name: Create virtualenv + id: venv + continue-on-error: true + env: + PIP_OPTIONS: ${{ steps.os.outputs.pip-options }} + run: | + echo '##[group]Create virtualenv' + # install virtualenv, if it is not yet installed + python3 -m pip install $PIP_OPTIONS virtualenv + python3 -m virtualenv step-security-publish-action-venv + # test activating virtualenv + case "$RUNNER_OS" in + Linux*|macOS*) + source step-security-publish-action-venv/bin/activate;; + Windows*) + source step-security-publish-action-venv\\Scripts\\activate;; + esac + which python3 + echo '##[endgroup]' + shell: bash + + - name: Install Python dependencies + env: + PIP_OPTIONS: ${{ steps.os.outputs.pip-options }} + run: | + echo '##[group]Install Python dependencies' + if [ "${{ steps.venv.outcome }}" == "success" ] + then + # activate virtualenv + case "$RUNNER_OS" in + Linux*|macOS*) + source step-security-publish-action-venv/bin/activate;; + Windows*) + source step-security-publish-action-venv\\Scripts\\activate;; + esac + fi + which python3 + + # make sure wheel is installed, which improves installing our dependencies + python3 -m pip install $PIP_OPTIONS wheel + python3 -m pip install $PIP_OPTIONS -r $GITHUB_ACTION_PATH/../python/requirements.txt + echo '##[endgroup]' + shell: bash + + - name: Publish Test Results + id: test-results + run: | + echo '##[group]Publish Test Results' + # activate virtualenv + case "$RUNNER_OS" in + Linux*|macOS*) + source step-security-publish-action-venv/bin/activate;; + Windows*) + source step-security-publish-action-venv\\Scripts\\activate;; + esac + python3 $GITHUB_ACTION_PATH/../python/publish_test_results.py + echo '##[endgroup]' + env: + GITHUB_TOKEN: ${{ inputs.github_token }} + GITHUB_TOKEN_ACTOR: ${{ inputs.github_token_actor }} + GITHUB_RETRIES: ${{ inputs.github_retries }} + COMMIT: ${{ inputs.commit }} + CHECK_NAME: ${{ inputs.check_name }} + COMMENT_TITLE: ${{ inputs.comment_title }} + COMMENT_MODE: ${{ inputs.comment_mode }} + FAIL_ON: ${{ inputs.fail_on }} + ACTION_FAIL: ${{ inputs.action_fail }} + ACTION_FAIL_ON_INCONCLUSIVE: ${{ inputs.action_fail_on_inconclusive }} + FILES: ${{ inputs.files }} + JUNIT_FILES: ${{ inputs.junit_files }} + NUNIT_FILES: ${{ inputs.nunit_files }} + XUNIT_FILES: ${{ inputs.xunit_files }} + TRX_FILES: ${{ inputs.trx_files }} + TIME_UNIT: ${{ inputs.time_unit }} + TEST_FILE_PREFIX: ${{ inputs.test_file_prefix }} + REPORT_INDIVIDUAL_RUNS: ${{ inputs.report_individual_runs }} + REPORT_SUITE_LOGS: ${{ inputs.report_suite_logs }} + DEDUPLICATE_CLASSES_BY_FILE_NAME: ${{ inputs.deduplicate_classes_by_file_name }} + LARGE_FILES: ${{ inputs.large_files }} + IGNORE_RUNS: ${{ inputs.ignore_runs }} + COMPARE_TO_EARLIER_COMMIT: ${{ inputs.compare_to_earlier_commit }} + PULL_REQUEST_BUILD: ${{ inputs.pull_request_build }} + EVENT_FILE: ${{ inputs.event_file }} + EVENT_NAME: ${{ inputs.event_name }} + TEST_CHANGES_LIMIT: ${{ inputs.test_changes_limit }} + CHECK_RUN_ANNOTATIONS: ${{ inputs.check_run_annotations }} + CHECK_RUN_ANNOTATIONS_BRANCH: ${{ inputs.check_run_annotations_branch }} + SECONDS_BETWEEN_GITHUB_READS: ${{ inputs.seconds_between_github_reads }} + SECONDS_BETWEEN_GITHUB_WRITES: ${{ inputs.seconds_between_github_writes }} + SECONDARY_RATE_LIMIT_WAIT_SECONDS: ${{ inputs.secondary_rate_limit_wait_seconds }} + JSON_FILE: ${{ inputs.json_file }} + JSON_THOUSANDS_SEPARATOR: ${{ inputs.json_thousands_separator }} + JSON_SUITE_DETAILS: ${{ inputs.json_suite_details }} + JSON_TEST_CASE_RESULTS: ${{ inputs.json_test_case_results }} + JOB_SUMMARY: ${{ inputs.job_summary }} + SEARCH_PULL_REQUESTS: ${{ inputs.search_pull_requests }} + # not documented + ROOT_LOG_LEVEL: ${{ inputs.root_log_level }} + # not documented + LOG_LEVEL: ${{ inputs.log_level }} + shell: bash + + - name: Save PIP packages cache + uses: actions/cache/save@v3 + if: ( success() || failure() ) && ! steps.cache.outputs.cache-hit + continue-on-error: true + with: + path: ${{ steps.os.outputs.pip-cache }} + key: ${{ steps.cache.outputs.cache-primary-key }} + +branding: + icon: 'check-square' + color: 'green' diff --git a/misc/action/fetch-workflows/action.yml b/misc/action/fetch-workflows/action.yml new file mode 100644 index 0000000..0e5151b --- /dev/null +++ b/misc/action/fetch-workflows/action.yml @@ -0,0 +1,32 @@ +name: 'Fetch workflows' +description: 'A GitHub Action to find workflows matching a query' + +outputs: + total_workflows: + description: 'Total number of workflows using this action' + value: ${{ steps.workflows.outputs.total }} + +runs: + using: 'composite' + steps: + - name: Fetch workflows + id: workflows + shell: bash + run: | + for i in {1..60} + do + workflows=$(curl -s https://github.com/step-security/publish-unit-test-result-action | (grep "Used by" || true) | sed -e "s/.*title=//" -e 's/["]//g' | cut -d " " -f 1) + if [ -n "$workflows" ] + then + echo "total=$workflows" >> $GITHUB_OUTPUT + exit 0 + fi + echo "Attempt $i failed" + sleep 60 + done + echo "Giving up" + exit 1 + +branding: + icon: 'download-cloud' + color: 'green' diff --git a/misc/action/find-workflows/action.yml b/misc/action/find-workflows/action.yml new file mode 100644 index 0000000..669ae19 --- /dev/null +++ b/misc/action/find-workflows/action.yml @@ -0,0 +1,41 @@ +name: 'Find workflows' +description: 'A GitHub Action to find workflows matching a query' + +inputs: + url: + description: 'GitHub API URL' + required: true + query: + description: 'Query to find workflows' + required: true +outputs: + total_workflows: + description: 'Total number of workflows' + value: ${{ steps.workflows.outputs.total }} + +runs: + using: 'composite' + steps: + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + + - name: Install Python dependencies + shell: bash + run: | + python -m pip install --upgrade --force --no-cache-dir pip + pip install --force --no-cache-dir -r ${{ github.action_path }}/requirements.txt + pip freeze | sort + + - name: Find workflows + id: workflows + env: + GITHUB_TOKEN: ${{ github.token }} + shell: bash + run: | + python ${{ github.action_path }}/script.py ${{ inputs.url }} ${{ inputs.query }} + +branding: + icon: 'download-cloud' + color: 'green' diff --git a/misc/action/find-workflows/requirements.txt b/misc/action/find-workflows/requirements.txt new file mode 100644 index 0000000..663bd1f --- /dev/null +++ b/misc/action/find-workflows/requirements.txt @@ -0,0 +1 @@ +requests \ No newline at end of file diff --git a/misc/action/find-workflows/script.py b/misc/action/find-workflows/script.py new file mode 100644 index 0000000..d9b3846 --- /dev/null +++ b/misc/action/find-workflows/script.py @@ -0,0 +1,28 @@ +import os +import sys + +import requests + + +if len(sys.argv) != 3: + print('Please provide GitHub API URL and the query string') + sys.exit(1) + +if 'GITHUB_TOKEN' not in os.environ: + print('Please provide GitHub token via GITHUB_TOKEN environment variable') + sys.exit(1) + +url = sys.argv[1] +query = sys.argv[2] + +headers = {'Authorization': f'token {os.environ.get("GITHUB_TOKEN")}'} +response = requests.get(f'{url}/search/code?q=%22{query}%22+path%3A.github%2Fworkflows%2F+language%3AYAML&type=Code', headers=headers).json() + +total = f'{response["total_count"]:,}' +print(f'found {total} workflows') + +if 'GITHUB_OUTPUT' in os.environ: + with open(os.environ['GITHUB_OUTPUT'], 'wt') as w: + print(f'total={total}', file=w) +else: + print(f'::set-output name=total::{total}') diff --git a/misc/action/json-output/action.yml b/misc/action/json-output/action.yml new file mode 100644 index 0000000..e12e588 --- /dev/null +++ b/misc/action/json-output/action.yml @@ -0,0 +1,62 @@ +name: 'Assert JSON output' +description: 'A GitHub Action that asserts the publish action''s JSON output' + +inputs: + json: + description: 'JSON content to assess.' + required: true + json_file: + description: 'Path to the JSON file to assess.' + required: true + +runs: + using: 'composite' + steps: + - name: JSON file + shell: bash + run: | + jq . "${{ inputs.json_file }}" + jq .conclusion "${{ inputs.json_file }}" + + - name: JSON output + if: always() + env: + TITLE: ${{ fromJSON( inputs.json ).title }} + SUMMARY: ${{ fromJSON( inputs.json ).summary }} + CONCLUSION: ${{ fromJSON( inputs.json ).conclusion }} + STATS: ${{ toJSON( fromJSON( inputs.json ).stats ) }} + STATS_WITH_DELTA: ${{ toJSON( fromJSON( inputs.json ).stats_with_delta ) }} + FORMATTED_STATS: ${{ toJSON( fromJSON( inputs.json ).formatted.stats ) }} + FORMATTED_STATS_WITH_DELTA: ${{ toJSON( fromJSON( inputs.json ).formatted.stats_with_delta ) }} + COMMIT: ${{ fromJSON( inputs.json ).stats.commit }} + REFERENCE: ${{ fromJSON( inputs.json ).stats_with_delta.reference_commit }} + ANNOTATIONS: ${{ fromJSON( inputs.json ).annotations }} + shell: bash + run: | + echo "title=$TITLE" + echo "summary=$SUMMARY" + echo "conclusion=$CONCLUSION" + echo "stats=$STATS" + echo "stats-with-delta=$STATS_WITH_DELTA" + echo "formatted-stats=$FORMATTED_STATS" + echo "formatted-stats-with-delta=$FORMATTED_STATS_WITH_DELTA" + echo "commit=$COMMIT" + echo "reference=$REFERENCE" + echo "annotations=$ANNOTATIONS" + + echo + echo "JSON output:" + cat < 1000: + suffix = 'k' + n = n / 1000 + if n > 1000: + suffix = 'M' + n = n / 1000 + if n > 1000: + suffix = 'B' + n = n / 1000 + if n > 100: + return f'{n:.0f}{suffix}' + else: + return f'{n:.1f}{suffix}' + + +total = humanize(int(total)) +per_day = humanize(int(per_day)) + +print(f'total={total}') +print(f'per_day={per_day}') + +if 'GITHUB_OUTPUT' in os.environ: + print(f'output file is {os.environ["GITHUB_OUTPUT"]}') + with open(os.environ['GITHUB_OUTPUT'], 'at') as w: + print(f'total={total}', file=w) + print(f'per_day={per_day}', file=w) +else: + print(f'::set-output name=total::{total}') + print(f'::set-output name=per_day::{per_day}') diff --git a/misc/badge-arm.svg b/misc/badge-arm.svg new file mode 100644 index 0000000..2fba611 --- /dev/null +++ b/misc/badge-arm.svg @@ -0,0 +1,20 @@ + + ARM + + + + + + + + + + + + + \ No newline at end of file diff --git a/misc/badge-js.svg b/misc/badge-js.svg new file mode 100644 index 0000000..c87b6b0 --- /dev/null +++ b/misc/badge-js.svg @@ -0,0 +1,20 @@ + + JS / Dart / Mocha: JSON + + + + + + + + + + + + + \ No newline at end of file diff --git a/misc/badge-license.svg b/misc/badge-license.svg new file mode 100644 index 0000000..c77ea68 --- /dev/null +++ b/misc/badge-license.svg @@ -0,0 +1,20 @@ + + license: Apache-2.0 + + + + + + + + + + + + + \ No newline at end of file diff --git a/misc/badge-macos.svg b/misc/badge-macos.svg new file mode 100644 index 0000000..8e19a12 --- /dev/null +++ b/misc/badge-macos.svg @@ -0,0 +1,20 @@ + + macOS + + + + + + + + + + + + + \ No newline at end of file diff --git a/misc/badge-trx.svg b/misc/badge-trx.svg new file mode 100644 index 0000000..010c5b2 --- /dev/null +++ b/misc/badge-trx.svg @@ -0,0 +1,20 @@ + + .Net: TRX + + + + + + + + + + + + + \ No newline at end of file diff --git a/misc/badge-ubuntu.svg b/misc/badge-ubuntu.svg new file mode 100644 index 0000000..258b384 --- /dev/null +++ b/misc/badge-ubuntu.svg @@ -0,0 +1,20 @@ + + Ubuntu + + + + + + + + + + + + + \ No newline at end of file diff --git a/misc/badge-windows.svg b/misc/badge-windows.svg new file mode 100644 index 0000000..9467f1a --- /dev/null +++ b/misc/badge-windows.svg @@ -0,0 +1,20 @@ + + Windows + + + + + + + + + + + + + \ No newline at end of file diff --git a/misc/badge-xml.svg b/misc/badge-xml.svg new file mode 100644 index 0000000..f935934 --- /dev/null +++ b/misc/badge-xml.svg @@ -0,0 +1,20 @@ + + JUnit / NUnit / XUnit: XML + + + + + + + + + + + + + \ No newline at end of file diff --git a/misc/github-checks-annotation.png b/misc/github-checks-annotation.png new file mode 100644 index 0000000..406be60 Binary files /dev/null and b/misc/github-checks-annotation.png differ diff --git a/misc/github-checks-comment.png b/misc/github-checks-comment.png new file mode 100644 index 0000000..5589ba5 Binary files /dev/null and b/misc/github-checks-comment.png differ diff --git a/misc/github-checks-commit.png b/misc/github-checks-commit.png new file mode 100644 index 0000000..8e97bf7 Binary files /dev/null and b/misc/github-checks-commit.png differ diff --git a/misc/github-job-summary-full.png b/misc/github-job-summary-full.png new file mode 100644 index 0000000..cfbbdca Binary files /dev/null and b/misc/github-job-summary-full.png differ diff --git a/misc/github-job-summary.png b/misc/github-job-summary.png new file mode 100644 index 0000000..e62eb6c Binary files /dev/null and b/misc/github-job-summary.png differ diff --git a/misc/github-pull-request-changes-annotation.png b/misc/github-pull-request-changes-annotation.png new file mode 100644 index 0000000..beb6bc5 Binary files /dev/null and b/misc/github-pull-request-changes-annotation.png differ diff --git a/misc/github-pull-request-checks.png b/misc/github-pull-request-checks.png new file mode 100644 index 0000000..cdad4a0 Binary files /dev/null and b/misc/github-pull-request-checks.png differ diff --git a/misc/github-pull-request-comment-update-history.png b/misc/github-pull-request-comment-update-history.png new file mode 100644 index 0000000..4c4e5c5 Binary files /dev/null and b/misc/github-pull-request-comment-update-history.png differ diff --git a/misc/github-pull-request-comment-with-test-changes.png b/misc/github-pull-request-comment-with-test-changes.png new file mode 100644 index 0000000..1d16671 Binary files /dev/null and b/misc/github-pull-request-comment-with-test-changes.png differ diff --git a/misc/github-pull-request-comment-without-runs.png b/misc/github-pull-request-comment-without-runs.png new file mode 100644 index 0000000..1e4bdd1 Binary files /dev/null and b/misc/github-pull-request-comment-without-runs.png differ diff --git a/misc/github-pull-request-comment.png b/misc/github-pull-request-comment.png new file mode 100644 index 0000000..2b53288 Binary files /dev/null and b/misc/github-pull-request-comment.png differ diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..56711bb --- /dev/null +++ b/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +junit_family=xunit1 diff --git a/python/.gitignore b/python/.gitignore new file mode 100644 index 0000000..0d20b64 --- /dev/null +++ b/python/.gitignore @@ -0,0 +1 @@ +*.pyc diff --git a/python/publish/__init__.py b/python/publish/__init__.py new file mode 100644 index 0000000..15a1c6b --- /dev/null +++ b/python/publish/__init__.py @@ -0,0 +1,1014 @@ +import base64 +import gzip +import json +import logging +import re +from collections import defaultdict +from dataclasses import dataclass +from typing import List, Any, Union, Optional, Tuple, Mapping, Iterator, Set, Iterable, Dict + +from publish.unittestresults import Numeric, UnitTestSuite, UnitTestCaseResults, UnitTestRunResults, \ + UnitTestRunDeltaResults, UnitTestRunResultsOrDeltaResults, ParseError + +# keep the version in sync with action.yml +__version__ = 'v1.0.0' + +logger = logging.getLogger('publish') +digest_prefix = '[test-results]:data:' +digest_mime_type = 'application/gzip' +digest_encoding = 'base64' +digest_header = f'{digest_prefix}{digest_mime_type};{digest_encoding},' +digit_space = '  ' +punctuation_space = ' ' + +comment_mode_off = 'off' +comment_mode_always = 'always' +comment_mode_changes = 'changes' +comment_mode_changes_failures = 'changes in failures' # includes comment_mode_changes_errors +comment_mode_changes_errors = 'changes in errors' +comment_mode_failures = 'failures' # includes comment_mode_errors +comment_mode_errors = 'errors' +comment_modes = [ + comment_mode_off, + comment_mode_always, + comment_mode_changes, + comment_mode_changes_failures, + comment_mode_changes_errors, + comment_mode_failures, + comment_mode_errors +] + +fail_on_mode_nothing = 'nothing' +fail_on_mode_errors = 'errors' +fail_on_mode_failures = 'test failures' +fail_on_modes = [ + fail_on_mode_nothing, + fail_on_mode_errors, + fail_on_mode_failures +] + +report_suite_out_log = 'info' +report_suite_err_log = 'error' +report_suite_logs = 'any' +report_no_suite_logs = 'none' +available_report_suite_logs = [report_suite_out_log, report_suite_err_log, report_suite_logs, report_no_suite_logs] +default_report_suite_logs = report_no_suite_logs + +pull_request_build_mode_commit = 'commit' +pull_request_build_mode_merge = 'merge' +pull_request_build_modes = [ + pull_request_build_mode_commit, + pull_request_build_mode_merge +] + +all_tests_list = 'all tests' +skipped_tests_list = 'skipped tests' +none_annotations = 'none' +available_annotations = [all_tests_list, skipped_tests_list, none_annotations] +default_annotations = [all_tests_list, skipped_tests_list] + + +class CaseMessages(defaultdict): + def __init__(self, items=None): + if items is None: + items = [] + super(CaseMessages, self).__init__( + lambda: defaultdict(lambda: defaultdict(lambda: defaultdict(list))), + items + ) + + +class SomeTestChanges: + def __init__(self, + all_tests_before: Optional[List[str]], + all_tests_current: Optional[List[str]], + skipped_tests_before: Optional[List[str]], + skipped_tests_current: Optional[List[str]]): + self._all_tests_before = set(all_tests_before) if all_tests_before is not None else None + self._all_tests_current = set(all_tests_current) if all_tests_current is not None else None + self._skipped_tests_before = set(skipped_tests_before) if skipped_tests_before is not None else None + self._skipped_tests_current = set(skipped_tests_current) if skipped_tests_current is not None else None + + @property + def has_changes(self) -> bool: + return (self.adds() is not None and self.removes() is not None and len(self.adds().union(self.removes())) > 0 or + self.skips() is not None and self.un_skips() is not None and len(self.skips().union(self.un_skips())) > 0) + + def adds(self) -> Optional[Set[str]]: + if self._all_tests_before is None or self._all_tests_current is None: + return None + return self._all_tests_current - self._all_tests_before + + def removes(self) -> Optional[Set[str]]: + if self._all_tests_before is None or self._all_tests_current is None: + return None + return self._all_tests_before - self._all_tests_current + + def remains(self) -> Optional[Set[str]]: + if self._all_tests_before is None or self._all_tests_current is None: + return None + return self._all_tests_before.intersection(self._all_tests_current) + + def has_no_tests(self) -> bool: + return (len(self._all_tests_current) == 0) if self._all_tests_current is not None else False + + def skips(self) -> Optional[Set[str]]: + if self._skipped_tests_before is None or self._skipped_tests_current is None: + return None + return self._skipped_tests_current - self._skipped_tests_before + + def un_skips(self) -> Optional[Set[str]]: + if self._skipped_tests_before is None or self._skipped_tests_current is None: + return None + return self._skipped_tests_before - self._skipped_tests_current + + def added_and_skipped(self) -> Optional[Set[str]]: + added = self.adds() + skipped = self.skips() + if added is None or skipped is None: + return None + return added.intersection(skipped) + + def remaining_and_skipped(self) -> Optional[Set[str]]: + remaining = self.remains() + skipped = self.skips() + if remaining is None or skipped is None: + return None + return remaining.intersection(skipped) + + def remaining_and_un_skipped(self) -> Optional[Set[str]]: + remaining = self.remains() + un_skipped = self.un_skips() + if remaining is None or un_skipped is None: + return None + return remaining.intersection(un_skipped) + + def removed_skips(self) -> Optional[Set[str]]: + removed = self.removes() + skipped_before = self._skipped_tests_before + if removed is None or skipped_before is None: + return None + return skipped_before.intersection(removed) + + +def get_json_path(json: Dict[str, Any], path: Union[str, List[str]]) -> Any: + if isinstance(path, str): + path = path.split('.') + + if path[0] not in json: + return None + + elem = json[path[0]] + + if len(path) > 1: + if isinstance(elem, dict): + return get_json_path(elem, path[1:]) + else: + return None + else: + return elem + + +def utf8_character_length(c: int) -> int: + if c >= 0x00010000: + return 4 + if c >= 0x00000800: + return 3 + if c >= 0x00000080: + return 2 + return 1 + + +# Github API does not like Unicode characters above 0xffff +# Those characters are replaced here by \U00000000 +def restrict_unicode(text: Optional[str]) -> Optional[str]: + if text is None: + return None + return ''.join([r"\U{:08x}".format(ord(c)) if ord(c) > 0xffff else c + for c in text]) + + +def restrict_unicode_list(texts: List[Optional[str]]) -> List[Optional[str]]: + return [restrict_unicode(text) for text in texts] + + +def alternating_range(positive_first: bool = True) -> Iterator[int]: + i = 0 + yield i + + if positive_first: + while True: + i += 1 + yield i + yield -i + else: + while True: + i += 1 + yield -i + yield i + + +def abbreviate_bytes(string: Optional[str], length: int) -> Optional[str]: + if length < 3: + raise ValueError(f'Length must at least allow for the replacement character: {length}') + + if string is None: + return None + + char_length = len(string) + byte_length = len(string.encode('utf8')) + if byte_length <= length: + return string + + odd = char_length % 2 + middle = char_length // 2 + pre = middle + suf = char_length - middle + for index in alternating_range(odd == 1): + if index >= 0: + suf -= 1 + else: + pre -= 1 + byte_length -= utf8_character_length(ord(string[middle + index])) + if byte_length <= length - 3: + return string[:pre] + '…' + (string[-suf:] if suf else '') + + +def abbreviate(string: Optional[str], length: int) -> Optional[str]: + if length < 1: + raise ValueError(f'Length must at least allow for the replacement character: {length}') + + if string is None: + return None + + char_length = len(string) + if char_length <= length: + return string + + pre = length // 2 + suf = (length - 1) // 2 + return string[:pre] + '…' + (string[-suf:] if suf else '') + + +def get_formatted_digits(*numbers: Union[Optional[int], Numeric]) -> Tuple[int, int]: + def get_abs_number(num): + if isinstance(num, dict): + return abs(num.get('number')) if num.get('number') is not None else None + return abs(num) + + def get_abs_delta(num): + if isinstance(num, dict): + return abs(num.get('delta')) if num.get('delta') is not None else None + return 0 + + if isinstance(numbers[0], dict): + # only the first number is a dict, other still might be an int + number_digits = max([len(as_stat_number(get_abs_number(number))) for number in numbers]) + delta_digits = max([len(as_stat_number(get_abs_delta(number))) for number in numbers]) + return number_digits, delta_digits + + return max([len(as_stat_number(abs(number) if number is not None else None)) + for number in numbers]), 0 + + +def get_magnitude(value: Union[int, dict]) -> Optional[int]: + if value is None: + return None + if isinstance(value, int): + return value + if isinstance(value, dict): + if 'number' in value: + return value.get('number') + if 'duration' in value: + return value.get('duration') + return None + + +def get_delta(value: Optional[Union[int, Numeric]]) -> Optional[int]: + if isinstance(value, int): + return None + if isinstance(value, Mapping): # Numeric + return value.get('delta') + return None + + +def as_short_commit(commit: Optional[str]) -> str: + return commit[0:8] if commit else None + + +def as_delta(number: int, digits: int) -> str: + string = as_stat_number(abs(number), digits) + if number == 0: + sign = '±' + elif number > 0: + sign = '+' + else: + sign = ' - ' + return f'{sign}{string}' + + +def as_stat_number(number: Optional[Union[int, Numeric]], + number_digits: int = 0, + delta_digits: int = 0, + label: Optional[str] = None) -> str: + if number is None: + if label: + return 'N/A {}'.format(label) + return 'N/A' + if isinstance(number, int): + formatted = '{number:0{digits},}'.format(number=number, digits=number_digits) + res = re.search('[^0,]', formatted) + pos = res.start() if res else len(formatted)-1 + formatted = '{}{}'.format(formatted[:pos].replace('0', digit_space), formatted[pos:]) + formatted = formatted.replace(',', punctuation_space) + if label: + return '{} {}'.format(formatted, label) + return formatted + elif isinstance(number, dict): + extra_fields = [ + as_delta(number['delta'], delta_digits) if 'delta' in number else '', + as_stat_number(number['new'], 0, 0, 'new') if 'new' in number else '', + as_stat_number(number['gone'], 0, 0, 'gone') if 'gone' in number else '', + ] + extra = ', '.join([field for field in extra_fields if field != '']) + + return ''.join([ + as_stat_number(number.get('number'), number_digits, delta_digits, label), + f' {extra} ' if extra != '' else '' + ]) + else: + logger.warning(f'unsupported stats number type {type(number)}: {number}') + return 'N/A' + + +def as_stat_duration(duration: Optional[Union[float, int, Numeric]], label=None) -> str: + if duration is None: + if label: + return f'N/A {label}' + return 'N/A' + if isinstance(duration, float): + duration = int(duration) + if isinstance(duration, int): + duration = abs(duration) + strings = [] + for unit, denominator in [('s', 60), ('m', 60), ('h', 24)]: + if unit == 's' or duration: + strings.insert(0, f'{duration % denominator}{unit}') + duration //= denominator + if duration: + strings.insert(0, f'{duration}d') + string = ' '.join(strings) + if label: + return f'{string} {label}' + return string + elif isinstance(duration, dict): + delta = duration.get('delta') + duration = duration.get('duration') + sign = '' if delta is None else '±' if delta == 0 else '+' if delta > 1 else '-' + if delta and abs(delta) >= 60: + sign += ' ' + return as_stat_duration(duration, label) + (f' {sign}{as_stat_duration(delta)}' if delta is not None else '') + else: + logger.warning(f'unsupported stats duration type {type(duration)}: {duration}') + return 'N/A' + + +def digest_string(string: str) -> str: + return str(base64.encodebytes(gzip.compress(bytes(string, 'utf8'), compresslevel=9)), 'utf8') \ + .replace('\n', '') + + +def ungest_string(string: str) -> str: + return str(gzip.decompress(base64.decodebytes(bytes(string, 'utf8'))), 'utf8') + + +def get_digest_from_stats(stats: UnitTestRunResults) -> str: + d = stats.to_dict() + del d['errors'] # we don't need errors in the digest + return digest_string(json.dumps(d, ensure_ascii=False)) + + +def get_stats_from_digest(digest: str) -> UnitTestRunResults: + return UnitTestRunResults.from_dict(json.loads(ungest_string(digest))) + + +def get_short_summary(stats: UnitTestRunResults) -> str: + """Provides a single-line summary for the given stats.""" + perrors = len(stats.errors) + tests = get_magnitude(stats.tests) + success = get_magnitude(stats.tests_succ) + skipped = get_magnitude(stats.tests_skip) + failure = get_magnitude(stats.tests_fail) + error = get_magnitude(stats.tests_error) + duration = get_magnitude(stats.duration) + + def get_test_summary(): + if tests == 0: + if perrors == 0: + return 'No tests found' + else: + return f'{perrors} parse errors' + if tests > 0: + if (failure is None or failure == 0) and \ + (error is None or error == 0) and perrors == 0: + if skipped == 0 and success == tests: + return 'All {} pass'.format(as_stat_number(tests, 0, 0, 'tests')) + if skipped > 0 and success == tests - skipped: + return 'All {} pass, {}'.format( + as_stat_number(success, 0, 0, 'tests'), + as_stat_number(skipped, 0, 0, 'skipped') + ) + + summary = [as_stat_number(number, 0, 0, label) + for number, label in [(perrors, 'parse errors'), + (error, 'errors'), (failure, 'fail'), + (skipped, 'skipped'), (success, 'pass')] + if number > 0] + summary = ', '.join(summary) + + # when all except tests are None or 0 + if len(summary) == 0: + return f'{as_stat_number(tests, 0, 0, "tests")} found' + return summary + + if tests is None or tests == 0 or duration is None: + return get_test_summary() + + return f'{get_test_summary()} in {as_stat_duration(duration)}' + + +def get_link_and_tooltip_label_md(label: str, tooltip: str) -> str: + return '[{label}]({link} "{tooltip}")'.format( + label=label, + link=f'https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols', + tooltip=tooltip + ) + + +all_tests_label_md = 'tests' +passed_tests_label_md = get_link_and_tooltip_label_md(':heavy_check_mark:', 'passed tests') +skipped_tests_label_md = get_link_and_tooltip_label_md(':zzz:', 'skipped / disabled tests') +failed_tests_label_md = get_link_and_tooltip_label_md(':x:', 'failed tests') +test_errors_label_md = get_link_and_tooltip_label_md(':fire:', 'test errors') +duration_label_md = get_link_and_tooltip_label_md(':stopwatch:', 'duration of all tests') + + +def get_short_summary_md(stats: UnitTestRunResultsOrDeltaResults) -> str: + """Provides a single-line summary with markdown for the given stats.""" + md = ('{tests} {tests_succ} {tests_skip} {tests_fail} {tests_error}'.format( + tests=as_stat_number(stats.tests, 0, 0, all_tests_label_md), + tests_succ=as_stat_number(stats.tests_succ, 0, 0, passed_tests_label_md), + tests_skip=as_stat_number(stats.tests_skip, 0, 0, skipped_tests_label_md), + tests_fail=as_stat_number(stats.tests_fail, 0, 0, failed_tests_label_md), + tests_error=as_stat_number(stats.tests_error, 0, 0, test_errors_label_md), + )) + return md + + +def get_test_changes_summary_md(changes: Optional[SomeTestChanges], list_limit: Optional[int]) -> str: + if not changes or list_limit == 0 or changes.has_no_tests(): + return '' + + test_changes_details = [] + if changes.removes(): + if changes.adds(): + test_changes_details.append( + get_test_changes_md( + 'This pull request removes {} and adds {} tests. ' + 'Note that renamed tests count towards both.'.format( + len(changes.removes()), + len(changes.adds()), + ), + list_limit, + changes.removes(), + changes.adds() + ) + ) + else: + test_changes_details.append( + get_test_changes_md( + 'This pull request removes {} test{}.'.format( + len(changes.removes()), + 's' if len(changes.removes()) > 1 else '' + ), + list_limit, + list(changes.removes()) + ) + ) + + if changes.removed_skips() and changes.added_and_skipped(): + test_changes_details.append( + get_test_changes_md( + 'This pull request removes {} skipped test{} and adds {} skipped test{}. ' + 'Note that renamed tests count towards both.'.format( + len(changes.removed_skips()), + 's' if len(changes.removed_skips()) > 1 else '', + len(changes.added_and_skipped()), + 's' if len(changes.added_and_skipped()) > 1 else '' + ), + list_limit, + changes.removed_skips(), + changes.added_and_skipped() + ) + ) + + if changes.remaining_and_skipped(): + if changes.remaining_and_un_skipped(): + test_changes_details.append( + get_test_changes_md( + 'This pull request skips {} and un-skips {} tests.'.format( + len(changes.remaining_and_skipped()), + len(changes.remaining_and_un_skipped()) + ), + list_limit, + changes.remaining_and_skipped(), + changes.remaining_and_un_skipped() + ) + ) + else: + test_changes_details.append( + get_test_changes_md( + 'This pull request skips {} test{}.'.format( + len(changes.remaining_and_skipped()), + 's' if len(changes.remaining_and_skipped()) > 1 else '' + ), + list_limit, + changes.remaining_and_skipped() + ) + ) + + return '\n'.join(test_changes_details) + + +def get_test_changes_md(summary: str, list_limit: Optional[int], *tests: Iterable[str]) -> str: + tests = '\n'.join([get_test_changes_list_md(sorted(test), list_limit) for test in tests]) + return ( + f'
\n' + f' {summary}\n' + f'\n' + f'{tests}' + f'
\n' + ) + + +def get_test_changes_list_md(tests: List[str], limit: Optional[int]) -> str: + if limit: + tests = tests[:limit] + (['…'] if len(tests) > limit else []) + tests = '\n'.join(tests) + return f'```\n{tests}\n```\n' + + +def get_long_summary_md(stats: UnitTestRunResultsOrDeltaResults, + details_url: Optional[str] = None, + test_changes: Optional[SomeTestChanges] = None, + test_list_changes_limit: Optional[int] = None) -> str: + """Provides a long summary in Markdown notation for the given stats.""" + trivial_runs = stats.runs == stats.tests and \ + stats.runs_succ == stats.tests_succ and \ + stats.runs_skip == stats.tests_skip and \ + stats.runs_fail == stats.tests_fail and \ + stats.runs_error == stats.tests_error + + if trivial_runs: + return get_long_summary_without_runs_md(stats, details_url, test_changes, test_list_changes_limit) + else: + return get_long_summary_with_runs_md(stats, details_url, test_changes, test_list_changes_limit) + + +def get_details_line_md(stats: UnitTestRunResultsOrDeltaResults, + details_url: Optional[str] = None) -> str: + errors = len(stats.errors) + details_on = (['parsing errors'] if errors > 0 else []) + \ + (['failures'] if get_magnitude(stats.tests_fail) > 0 else []) + \ + (['errors'] if get_magnitude(stats.tests_error) > 0 else []) + details_on = details_on[0:-2] + [' and '.join(details_on[-2:])] if details_on else [] + + return 'For more details on these {details_on}, see [this check]({url}).'.format( + details_on=', '.join(details_on), + url=details_url + ) if details_url and details_on else '' + + +def get_commit_line_md(stats: UnitTestRunResultsOrDeltaResults) -> str: + commit = stats.commit + is_delta_stats = isinstance(stats, UnitTestRunDeltaResults) + reference_type = stats.reference_type if is_delta_stats else None + reference_commit = stats.reference_commit if is_delta_stats else None + + return 'Results for commit {commit}.{compare}'.format( + commit=as_short_commit(commit), + compare=' ± Comparison against {reference_type} commit {reference_commit}.'.format( + reference_type=reference_type, + reference_commit=as_short_commit(reference_commit) + ) if reference_type and reference_commit else '' + ) + + +def get_long_summary_with_runs_md(stats: UnitTestRunResultsOrDeltaResults, + details_url: Optional[str] = None, + test_changes: Optional[SomeTestChanges] = None, + test_list_changes_limit: Optional[int] = None) -> str: + files_digits, files_delta_digits = get_formatted_digits(stats.files, stats.tests, stats.runs) + success_digits, success_delta_digits = get_formatted_digits(stats.suites, stats.tests_succ, stats.runs_succ) + skip_digits, skip_delta_digits = get_formatted_digits(stats.tests_skip, stats.runs_skip) + fail_digits, fail_delta_digits = get_formatted_digits(stats.tests_fail, stats.runs_fail) + error_digits, error_delta_digits = get_formatted_digits(stats.tests_error, stats.runs_error) + + errors = len(stats.errors) + misc_line = '{files} {errors}{suites}  {duration}\n'.format( + files=as_stat_number(stats.files, files_digits, files_delta_digits, 'files '), + errors='{} '.format(as_stat_number(errors, success_digits, 0, 'errors ')) if errors > 0 else '', + suites=as_stat_number(stats.suites, success_digits if errors == 0 else skip_digits, 0, 'suites '), + duration=as_stat_duration(stats.duration, duration_label_md) + ) + + tests_error_part = ' {tests_error}'.format( + tests_error=as_stat_number(stats.tests_error, error_digits, error_delta_digits, test_errors_label_md) + ) if get_magnitude(stats.tests_error) else '' + tests_line = '{tests} {tests_succ} {tests_skip} {tests_fail}{tests_error_part}\n'.format( + tests=as_stat_number(stats.tests, files_digits, files_delta_digits, all_tests_label_md), + tests_succ=as_stat_number(stats.tests_succ, success_digits, success_delta_digits, passed_tests_label_md), + tests_skip=as_stat_number(stats.tests_skip, skip_digits, skip_delta_digits, skipped_tests_label_md), + tests_fail=as_stat_number(stats.tests_fail, fail_digits, fail_delta_digits, failed_tests_label_md), + tests_error_part=tests_error_part + ) + + runs_error_part = ' {runs_error}'.format( + runs_error=as_stat_number(stats.runs_error, error_digits, error_delta_digits, test_errors_label_md) + ) if get_magnitude(stats.runs_error) else '' + runs_line = '{runs} {runs_succ} {runs_skip} {runs_fail}{runs_error_part}\n'.format( + runs=as_stat_number(stats.runs, files_digits, files_delta_digits, 'runs '), + runs_succ=as_stat_number(stats.runs_succ, success_digits, success_delta_digits, passed_tests_label_md), + runs_skip=as_stat_number(stats.runs_skip, skip_digits, skip_delta_digits, skipped_tests_label_md), + runs_fail=as_stat_number(stats.runs_fail, fail_digits, fail_delta_digits, failed_tests_label_md), + runs_error_part=runs_error_part, + ) + + details_line = get_details_line_md(stats, details_url) + commit_line = get_commit_line_md(stats) + test_changes_details = get_test_changes_summary_md(test_changes, test_list_changes_limit) + + return '{misc}{tests}{runs}{details}{commit}{test_changes_details}'.format( + misc=misc_line, + tests=tests_line, + runs=runs_line, + details=new_lines(details_line), + commit=new_lines(commit_line), + test_changes_details=new_line(test_changes_details) + ) + + +def new_line(text: str, before: bool = True) -> str: + if before: + return ('\n' + text) if text else text + else: + return (text + '\n') if text else text + + +def new_lines(text: str) -> str: + return ('\n' + text + '\n') if text else text + + +def get_long_summary_without_runs_md(stats: UnitTestRunResultsOrDeltaResults, + details_url: Optional[str] = None, + test_changes: Optional[SomeTestChanges] = None, + test_list_changes_limit: Optional[int] = None) -> str: + sep = '  ' + + errors = len(stats.errors) + tests_digits, tests_delta_digits = get_formatted_digits(stats.tests, stats.suites, stats.files, errors) + passs_digits, passs_delta_digits = get_formatted_digits(stats.tests_succ, stats.tests_skip, stats.tests_fail, stats.tests_error) + + tests = as_stat_number(stats.tests, tests_digits, tests_delta_digits, all_tests_label_md + ' ') + suites = as_stat_number(stats.suites, tests_digits, tests_delta_digits, 'suites') + files = as_stat_number(stats.files, tests_digits, tests_delta_digits, 'files  ') + parse_errors = as_stat_number(errors, tests_digits, tests_delta_digits, 'errors') if errors else '' + + passs = as_stat_number(stats.tests_succ, passs_digits, passs_delta_digits, passed_tests_label_md) + skips = as_stat_number(stats.tests_skip, passs_digits, passs_delta_digits, skipped_tests_label_md) + fails = as_stat_number(stats.tests_fail, passs_digits, passs_delta_digits, failed_tests_label_md) + + duration = as_stat_duration(stats.duration, duration_label_md) + errors = sep + as_stat_number(stats.tests_error, label=test_errors_label_md) if get_magnitude(stats.tests_error) else '' + + details_line = get_details_line_md(stats, details_url) + commit_line = get_commit_line_md(stats) + test_changes_details = get_test_changes_summary_md(test_changes, test_list_changes_limit) + + return '{tests}{sep}{passs}{sep}{duration}\n' \ + '{suites}{sep}{skips}\n' \ + '{files}{sep}{fails}{errors}\n' \ + '{parse_errors}{details}{commit}{test_changes_details}'.format( + sep=sep, + tests=tests, + passs=passs, + duration=duration, + suites=suites, + skips=skips, + files=files, + fails=fails, + errors=errors, + parse_errors=new_line(parse_errors, before=False), + details=new_lines(details_line), + commit=new_lines(commit_line), + test_changes_details=new_line(test_changes_details) + ) + + +def get_long_summary_with_digest_md(stats: UnitTestRunResultsOrDeltaResults, + digest_stats: Optional[UnitTestRunResults] = None, + details_url: Optional[str] = None, + test_changes: Optional[SomeTestChanges] = None, + test_list_changes_limit: Optional[int] = None) -> str: + """ + Provides the summary of stats with digest of digest_stats if given, otherwise + digest of stats. In that case, stats must be UnitTestRunResults. + + :param stats: stats to summarize + :param digest_stats: stats to digest + :return: summary with digest + """ + if digest_stats is None and isinstance(stats, UnitTestRunDeltaResults): + raise ValueError('stats must be UnitTestRunResults when no digest_stats is given') + summary = get_long_summary_md(stats, details_url, test_changes, test_list_changes_limit) + digest = get_digest_from_stats(stats if digest_stats is None else digest_stats) + return f'{summary}\n{digest_header}{digest}\n' + + +def get_case_messages(case_results: UnitTestCaseResults) -> CaseMessages: + """ Re-index cases from test+state to test+state+message. """ + messages = defaultdict(lambda: defaultdict(lambda: defaultdict(list))) + for test in case_results: + for state in case_results[test]: + for case in case_results[test][state]: + message = case.message if case.result in ['skipped', 'disabled'] else case.content + messages[test][state][message].append(case) + return CaseMessages(messages) + + +@dataclass(frozen=True) +class Annotation: + path: str + start_line: int + end_line: int + start_column: Optional[int] + end_column: Optional[int] + annotation_level: str + message: str + title: Optional[str] + raw_details: Optional[str] + + def to_dict(self) -> Mapping[str, Any]: + dictionary = self.__dict__.copy() + dictionary['path'] = restrict_unicode(dictionary['path']) + dictionary['message'] = abbreviate_bytes(restrict_unicode(dictionary['message']), 64000) + dictionary['title'] = abbreviate(restrict_unicode(dictionary['title']), 255) + dictionary['raw_details'] = abbreviate(restrict_unicode(dictionary['raw_details']), 64000) + if not dictionary.get('start_column'): + del dictionary['start_column'] + if not dictionary.get('end_column'): + del dictionary['end_column'] + if not dictionary.get('title'): + del dictionary['title'] + if not dictionary.get('raw_details'): + del dictionary['raw_details'] + return dictionary + + +def message_is_contained_in_content(message: Optional[str], content: Optional[str]) -> bool: + # ignore new lines and any leading or trailing white spaces + if content and message: + content = re.sub(r'\s+', ' ', content.strip()) + message = re.sub(r'\s+', ' ', message.strip()) + return content.startswith(message) + return False + + +def get_case_annotation(messages: CaseMessages, + key: Tuple[Optional[str], Optional[str], Optional[str]], + state: str, + message: Optional[str], + report_individual_runs: bool) -> Annotation: + case = messages[key][state][message][0] + same_cases = len(messages[key][state][message] if report_individual_runs else + [case + for m in messages[key][state] + for case in messages[key][state][m]]) + all_cases = len([case + for s in messages[key] + for m in messages[key][s] + for case in messages[key][s][m]]) + same_result_files = {case.result_file: case.time + for case in (messages[key][state][message] if report_individual_runs else + [c + for m in messages[key][state] + for c in messages[key][state][m]]) + if case.result_file} + test_file = case.test_file + line = case.line or 0 + test_name = case.test_name if case.test_name else 'Unknown test' + class_name = case.class_name + title = test_name if not class_name else f'{test_name} ({class_name})' + title_state = \ + 'pass' if state == 'success' else \ + 'failed' if state == 'failure' else \ + 'with error' if state == 'error' else \ + 'skipped' + if all_cases > 1: + if same_cases == all_cases: + title = f'All {all_cases} runs {title_state}: {title}' + else: + title = f'{same_cases} out of {all_cases} runs {title_state}: {title}' + else: + title = f'{title} {title_state}' + + level = ( + 'warning' if case.result == 'failure' else + 'failure' if case.result == 'error' else # failure is used for test errors + 'notice' + ) + + # pick details from message and content, but try to avoid redundancy (e.g. when content repeats message) + # always add stdout and stderr if they are not empty + maybe_message = [case.message] if not message_is_contained_in_content(case.message, case.content) else [] + details = [detail.rstrip() + for detail in maybe_message + [case.content, case.stdout, case.stderr] + if detail and detail.rstrip()] + + return Annotation( + path=test_file or class_name or '/', + start_line=line, + end_line=line, + start_column=None, + end_column=None, + annotation_level=level, + message='\n'.join([file if time is None else f'{file} [took {as_stat_duration(time)}]' + for file, time in sorted(same_result_files.items())]), + title=title, + raw_details='\n'.join(details) if details else None + ) + + +def get_case_annotations(case_results: UnitTestCaseResults, + report_individual_runs: bool) -> List[Annotation]: + messages = get_case_messages(case_results) + return [ + get_case_annotation(messages, key, state, message, report_individual_runs) + for key in messages + for state in messages[key] if state not in ['success', 'skipped'] + for message in (messages[key][state] if report_individual_runs else + [list(messages[key][state].keys())[0]]) + ] + + +def get_error_annotation(error: ParseError) -> Annotation: + return Annotation( + path=error.file, + start_line=error.line or 0, + end_line=error.line or 0, + start_column=error.column, + end_column=error.column, + annotation_level='failure', + message=error.message, + title=f'Error processing result file', + raw_details=error.file + ) + + +def get_error_annotations(parse_errors: List[ParseError]) -> List[Annotation]: + return [get_error_annotation(error) for error in parse_errors] + + +def get_suite_annotations_for_suite(suite: UnitTestSuite, with_suite_out_logs: bool, with_suite_err_logs: bool) -> List[Annotation]: + return [ + Annotation( + path=suite.name, + start_line=0, + end_line=0, + start_column=None, + end_column=None, + annotation_level='warning' if source == 'stderr' else 'notice', + message=f'Test suite {suite.name} has the following {source} output (see Raw output).', + title=f'Logging on {source} of test suite {suite.name}', + raw_details=details + ) + for details, source in ([(suite.stdout, 'stdout')] if with_suite_out_logs else []) + + ([(suite.stderr, 'stderr')] if with_suite_err_logs else []) + if details and details.strip() + ] + + +def get_suite_annotations(suites: List[UnitTestSuite], with_suite_out_logs: bool, with_suite_err_logs: bool) -> List[Annotation]: + return [annotation + for suite in suites + for annotation in get_suite_annotations_for_suite(suite, with_suite_out_logs, with_suite_err_logs)] + + +def get_test_name(file_name: Optional[str], + class_name: Optional[str], + test_name: Optional[str]) -> str: + if not test_name: + test_name = 'Unknown test' + + name = [] + token = ' ‑ ' # U+2011 non-breaking hyphen + for part in [file_name, class_name, test_name]: + if part: + name.append(part.replace(token, ' ‐ ')) # U+2010 breaking hyphen + + return token.join(name) + + +def get_all_tests_list(cases: UnitTestCaseResults) -> List[str]: + if not cases: + return [] + return [get_test_name(file_name, class_name, test_name) + for (file_name, class_name, test_name) in cases.keys()] + + +def get_skipped_tests_list(cases: UnitTestCaseResults) -> List[str]: + if not cases: + return [] + return [get_test_name(file_name, class_name, test_name) + for (file_name, class_name, test_name), result in cases.items() + if 'skipped' in result and len(result) == 1] + + +def get_all_tests_list_annotation(cases: UnitTestCaseResults, max_chunk_size: int = 64000) -> List[Annotation]: + return get_test_list_annotation(restrict_unicode_list(get_all_tests_list(cases)), 'test', max_chunk_size) + + +def get_skipped_tests_list_annotation(cases: UnitTestCaseResults, max_chunk_size: int = 64000) -> List[Annotation]: + return get_test_list_annotation(restrict_unicode_list(get_skipped_tests_list(cases)), 'skipped test', max_chunk_size) + + +def get_test_list_annotation(tests: List[str], label: str, max_chunk_size: int = 64000) -> List[Annotation]: + if len(tests) == 0: + return [] + + # the max_chunk_size must not be larger than the abbreviate_bytes limit in Annotation.to_dict + test_chunks = chunk_test_list(sorted(tests), '\n', max_chunk_size) + + if len(test_chunks) == 1: + if len(tests) == 1: + title = f'{len(tests)} {label} found' + message = f'There is 1 {label}, see "Raw output" for the name of the {label}.' + else: + title = f'{len(tests)} {label}s found' + message = f'There are {len(tests)} {label}s, see "Raw output" for the full list of {label}s.' + + return [create_tests_list_annotation(title=title, message=message, raw_details='\n'.join(test_chunks[0]))] + + first = 1 + annotations = [] + for chunk in test_chunks: + last = first + len(chunk) - 1 + title = f'{len(tests)} {label}s found (test {first} to {last})' + message = f'There are {len(tests)} {label}s, see "Raw output" for the list of {label}s {first} to {last}.' + annotation = create_tests_list_annotation(title=title, message=message, raw_details='\n'.join(chunk)) + annotations.append(annotation) + first = last + 1 + + return annotations + + +def chunk_test_list(tests: List[str], delimiter: str, max_chunk_size: int) -> List[List[str]]: + if not tests: + return [] + + sizes = [len(f'{test}{delimiter}'.encode('utf8')) for test in tests] + if sum(sizes) <= max_chunk_size: + return [tests] + + if any(size > max_chunk_size for size in sizes): + logger.warning(f'Dropping all test names because some names are longer ' + f'than max_chunk_size of {max_chunk_size} bytes') + return [] + + chunks = [] + while tests: + size = 0 + length = 0 + while length < len(tests) and size + sizes[length] < max_chunk_size: + size = size + sizes[length] + length = length + 1 + + chunks.append(tests[:length]) + tests = tests[length:] + sizes = sizes[length:] + + return chunks + + +def create_tests_list_annotation(title: str, message: str, raw_details: Optional[str]) -> Annotation: + return Annotation( + path='.github', + start_line=0, + end_line=0, + start_column=None, + end_column=None, + annotation_level='notice', + message=message, + title=title, + raw_details=raw_details + ) diff --git a/python/publish/dart.py b/python/publish/dart.py new file mode 100644 index 0000000..d93d67b --- /dev/null +++ b/python/publish/dart.py @@ -0,0 +1,126 @@ +import json +from collections import defaultdict +from typing import Dict, Any, List + +from junitparser.junitparser import etree + +from publish.junit import JUnitTree + + +def is_dart_json(path: str) -> bool: + if not path.endswith('.json'): + return False + + try: + with open(path, 'rt') as r: + line = r.readline() + event = json.loads(line) + # {"protocolVersion":"0.1.1","runnerVersion":"1.23.1","pid":1705,"type":"start","time":0} + return event.get('type') == 'start' and 'protocolVersion' in event + except BaseException: + return False + + +def parse_dart_json_file(path: str) -> JUnitTree: + tests: Dict[int, Dict[Any, Any]] = defaultdict(lambda: dict()) + suites: Dict[int, Dict[Any, Any]] = defaultdict(lambda: dict()) + suite_tests: Dict[int, List[Any]] = defaultdict(lambda: list()) + suite_start = None + suite_time = None + + with open(path, 'rt') as r: + for line in r: + # https://github.com/dart-lang/test/blob/master/pkgs/test/doc/json_reporter.md + event = json.loads(line) + type = event.get('type') + + if type == 'start': + suite_start = event.get('time') + elif type == 'suite' and 'suite' in event and 'id' in event['suite']: + suite = event['suite'] + id = suite['id'] + suites[id]['path'] = suite.get('path') + suites[id]['start'] = event.get('time') + elif type == 'testStart' and 'test' in event and 'id' in event['test']: + test = event['test'] + id = test['id'] + tests[id]['name'] = test.get('name') + tests[id]['suite'] = test.get('suiteID') + tests[id]['line'] = test.get('line') # 1-based + tests[id]['column'] = test.get('column') # 1-based + tests[id]['url'] = test.get('url') + tests[id]['start'] = event.get('time') + if test.get('suiteID') is not None: + suite_tests[test.get('suiteID')].append(tests[id]) + elif type == 'testDone' and 'testID' in event: + id = event['testID'] + tests[id]['result'] = event.get('result') + tests[id]['hidden'] = event.get('hidden') + tests[id]['skipped'] = event.get('skipped') + tests[id]['end'] = event.get('time') + elif type == 'error' and 'testID' in event: + id = event['testID'] + tests[id]['error'] = event.get('error') + tests[id]['stackTrace'] = event.get('stackTrace') + tests[id]['isFailure'] = event.get('isFailure') + elif type == 'print' and 'testID' in event and event.get('messageType') == 'skip': + tests[id]['reason'] = event.get('message') + elif type == 'done': + suite_time = event.get('time') + + def create_test(test): + testcase = etree.Element('testcase', attrib={k: str(v) for k, v in dict( + name=test.get('name'), + file=test.get('url'), + line=test.get('line'), + time=(test['end'] - test['start']) / 1000.0 if test.get('start') is not None and test.get('end') is not None else None, + ).items() if isinstance(v, str) and v or v is not None}) + + test_result = test.get('result', 'error') + if test_result != 'success': + result = etree.Element('error' if test_result != 'failure' else test_result, attrib={k: v for k, v in dict( + message=test.get('error') + ).items() if v}) + result.text = etree.CDATA('\n'.join(text + for text in [test.get('error'), test.get('stackTrace')] + if text)) + testcase.append(result) + elif test.get('skipped', False): + result = etree.Element('skipped', attrib={k: v for k, v in dict( + message=test.get('reason') + ).items() if v}) + testcase.append(result) + + return testcase + + def create_suite(suite, tests): + testsuite = etree.Element('testsuite', attrib={k: str(v) for k, v in dict( + name=suite.get('path'), + time=(suite['end'] - suite['start']) / 1000.0 if suite.get('start') is not None and suite.get('end') is not None else None, + tests=str(len(tests)), + failures=str(len([test for test in tests if test.get('isFailure', False)])), + errors=str(len([test for test in tests if not test.get('isFailure', True)])), + skipped=str(len([test for test in tests if test.get('skipped', False)])), + ).items() if isinstance(v, str) and v or v is not None}) + + testsuite.extend(create_test(test) for test in tests) + + return testsuite + + # do not count hidden tests (unless not successful) + visible_tests = [test for test in tests.values() if test.get('hidden') is not True or test.get('result') != 'success'] + testsuites = etree.Element('testsuites', attrib={k: str(v) for k, v in dict( + time=(suite_time - suite_start) / 1000.0 if suite_start is not None and suite_time is not None else None, + tests=str(len(visible_tests)), + failures=str(len([test for test in visible_tests if test.get('isFailure', False)])), + errors=str(len([test for test in visible_tests if not test.get('isFailure', True)])), + skipped=str(len([test for test in visible_tests if test.get('skipped', False)])), + ).items() if v is not None}) + + testsuites.extend([create_suite(suite, [test + for test in suite_tests[suite_id] + if test.get('hidden') is not True]) + for suite_id, suite in suites.items()]) + + xml = etree.ElementTree(testsuites) + return xml diff --git a/python/publish/github_action.py b/python/publish/github_action.py new file mode 100644 index 0000000..5ae1764 --- /dev/null +++ b/python/publish/github_action.py @@ -0,0 +1,186 @@ +import logging +import os +import sys +import traceback +from io import TextIOWrapper +from typing import Mapping, Any, Optional + +from publish import logger + + +class GithubAction: + + # GitHub Actions environment file variable names + # https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files + ENV_FILE_VAR_NAME = 'GITHUB_ENV' + PATH_FILE_VAR_NAME = 'GITHUB_PATH' + OUTPUT_FILE_VAR_NAME = 'GITHUB_OUTPUT' + JOB_SUMMARY_FILE_VAR_NAME = 'GITHUB_STEP_SUMMARY' + + def __init__(self, file: Optional[TextIOWrapper] = None): + if file is None: + file = sys.stdout + # pre Python 3.7, TextIOWrapper does not have reconfigure + if isinstance(file, TextIOWrapper) and hasattr(file, 'reconfigure'): + # ensure we have utf8 encoding, the default encoding of sys.stdout on Windows is cp1252 + file.reconfigure(encoding='utf-8') + + self._file: TextIOWrapper = file + + def add_mask(self, value: str): + self._command(self._file, 'add-mask', value) + + def stop_commands(self, end_token: str): + self._command(self._file, 'stop-commands', end_token) + + def continue_commands(self, end_token: str): + self._command(self._file, end_token) + + def group(self, title: str): + self._command(self._file, 'group', title) + + def group_end(self): + self._command(self._file, 'endgroup') + + def debug(self, message: str): + logger.debug(message) + self._command(self._file, 'debug', message) + + def notice(self, + message: str, + title: Optional[str] = None, + file: Optional[str] = None, + line: Optional[int] = None, + end_line: Optional[int] = None, + column: Optional[int] = None, + end_column: Optional[int] = None): + logger.info(message) + + params = {var: val + for var, val in [("title", title), + ("file", file), + ("col", column), + ("endColumn", end_column), + ("line", line), + ("endLine", end_line)] + if val is not None} + self._command(self._file, 'notice', message, params) + + def warning(self, message: str, file: Optional[str] = None, line: Optional[int] = None, column: Optional[int] = None): + logger.warning(message) + + params = {} + if file is not None: + params.update(file=file) + if line is not None: + params.update(line=line) + if column is not None: + params.update(col=column) + self._command(self._file, 'warning', message, params) + + def _exception(self, te: traceback.TracebackException): + def exception_str(te: traceback.TracebackException) -> str: + # we take the last line, which ends with a newline, that we strip + return list(te.format_exception_only())[-1].split('\n')[0] + + self.error('{te}{caused}{context}'.format( + te=exception_str(te), + caused=f' caused by {exception_str(te.__cause__)}' if te.__cause__ else '', + context=f' while handling {exception_str(te.__context__)}' if te.__context__ else '' + ), exception=None) + + for lines in te.format(chain=False): + for line in lines.split('\n'): + if line: + logger.debug(line) + + cause = te.__cause__ + while cause: + self._exception(cause) + cause = cause.__cause__ + + context = te.__context__ + while context: + self._exception(context) + context = context.__context__ + + def error(self, + message: str, + file: Optional[str] = None, line: Optional[int] = None, column: Optional[int] = None, + exception: Optional[BaseException] = None): + if exception: + self._exception(traceback.TracebackException.from_exception(exception)) + else: + logger.error(message) + + params = {} + if file is not None: + params.update(file=file) + if line is not None: + params.update(line=line) + if column is not None: + params.update(col=column) + self._command(self._file, 'error', message, params) + + def echo(self, on: bool): + self._command(self._file, 'echo', 'on' if on else 'off') + + @staticmethod + def _command(file: TextIOWrapper, command: str, value: str = '', params: Optional[Mapping[str, Any]] = None): + # take first line of value if multiline + value = value.split('\n', 1)[0] + + if params is None: + params = {} + params = ','.join([f'{key}={str(value)}' + for key, value in params.items()]) + params = f' {params}' if params else '' + + try: + file.write(f'::{command}{params}::{value}') + file.write(os.linesep) + except Exception as e: + logging.error(f'Failed to forward command {command} to GithubActions: {e}') + + def add_to_env(self, var: str, val: str): + if '\n' in val: + # if this is really needed, implement it as describe here: + # https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings + raise ValueError('Multiline values not supported for environment variables') + self._append_to_file(f'{var}={val}\n', self.ENV_FILE_VAR_NAME) + + def add_to_path(self, path: str): + self._append_to_file(f'{path}\n', self.PATH_FILE_VAR_NAME) + + def add_to_output(self, var: str, val: str): + if '\n' in val: + # if this is really needed, implement it as describe here: + # https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings + raise ValueError('Multiline values not supported for environment variables') + + if not self._append_to_file(f'{var}={val}\n', self.OUTPUT_FILE_VAR_NAME, warn=False): + # this has been deprecated but we fall back if there is no env file + self._command(self._file, 'set-output', val, {'name': var}) + + def add_to_job_summary(self, markdown: str): + self._append_to_file(markdown, self.JOB_SUMMARY_FILE_VAR_NAME) + + def _append_to_file(self, content: str, env_file_var_name: str, warn: bool = True) -> bool: + # appends content to an environment file denoted by an environment variable name + # https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files + filename = os.getenv(env_file_var_name) + if not filename: + if warn: + self.warning(f'Cannot append to environment file {env_file_var_name} as it is not set. ' + f'See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files') + return False + + try: + with open(filename, 'a', encoding='utf-8') as file: + file.write(content) + except Exception as e: + self.warning(f'Failed to write to environment file {filename}: {str(e)}. ' + f'See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files') + return False + + return True diff --git a/python/publish/junit.py b/python/publish/junit.py new file mode 100644 index 0000000..79a348c --- /dev/null +++ b/python/publish/junit.py @@ -0,0 +1,346 @@ +import math +import os +from collections import defaultdict +from typing import Optional, Iterable, Union, List, Dict, Callable, Tuple + +import junitparser +from junitparser import Element, JUnitXml, JUnitXmlError, TestCase, TestSuite, Skipped +from junitparser.junitparser import etree + +from publish.unittestresults import ParsedUnitTestResults, UnitTestSuite, UnitTestCase, ParseError + +try: + import lxml.etree + lxml_available = True +except ImportError: + lxml_available = False + + +def xml_has_root_element(path: str, allowed_root_elements: List[str]) -> bool: + try: + with open(path, 'rb') as r: + it = etree.iterparse(r, events=['start']) + action, elem = next(it, (None, None)) + return action == 'start' and elem is not None and etree.QName(elem).localname in allowed_root_elements + except: + return False + + +def is_junit(path: str) -> bool: + return xml_has_root_element(path, ['testsuites', 'testsuite']) + + +def get_results(results: Union[Element, List[Element]], status: Optional[str] = None) -> List[Element]: + """ + Returns the results with the most severe state. + For example: If there are failures and succeeded tests, returns only the failures. + """ + if isinstance(results, List): + d = defaultdict(list) + for result in results: + if result: + d[get_result(result)].append(result) + + for state in ['error', 'failure', 'success', 'skipped', 'disabled']: + if state in d: + return d[state] + + if status and status in ['disabled']: + return [Disabled()] + + return [] + + return [results] + + +def get_result(results: Union[Element, List[Element]]) -> str: + """ + Returns the result of the given results. + All results are expected to be of the same state. + :param results: + :return: + """ + if isinstance(results, List): + return get_result(results[0]) if results else 'success' + return results._tag if results else 'success' + + +def get_message(results: Union[Element, List[Element]]) -> str: + """ + Returns an aggregated message from all given results. + :param results: + :return: + """ + if isinstance(results, List): + messages = [result.message + for result in results + if result and result.message] + message = '\n'.join(messages) if messages else None + else: + message = results.message if results else None + return message + + +def get_content(results: Union[Element, List[Element]]) -> str: + """ + Returns an aggregated content form all given results. + :param results: + :return: + """ + if isinstance(results, List): + contents = [result.text + for result in results + if result is not None and result.text is not None] + content = '\n'.join(contents) if contents else None + else: + content = results.text if results else None + return content + + +class DropTestCaseBuilder(etree.TreeBuilder): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self._stack = [] + + def start(self, tag: Union[str, bytes], attrs: Dict[Union[str, bytes], Union[str, bytes]]) -> Element: + self._stack.append(tag) + if junitparser.TestCase._tag not in self._stack: + return super().start(tag, attrs) + + def end(self, tag: Union[str, bytes]) -> Element: + try: + if junitparser.TestCase._tag not in self._stack: + return super().end(tag) + finally: + if self._stack: + self._stack.pop() + + def close(self) -> Element: + # when lxml is around, we have to return an ElementTree here, otherwise + # XMLParser(target=...).parse(..., parser=...) + # returns an Element, not a ElementTree, but junitparser expects an ElementTree + # + # https://lxml.de/parsing.html: + # Note that the parser does not build a tree when using a parser target. The result of the parser run is + # whatever the target object returns from its .close() method. If you want to return an XML tree here, you + # have to create it programmatically in the target object. + if lxml_available: + return lxml.etree.ElementTree(super().close()) + else: + return super().close() + + +JUnitTree = etree.ElementTree +JUnitTreeOrParseError = Union[JUnitTree, ParseError] +JUnitXmlOrParseError = Union[JUnitXml, ParseError] +ParsedJUnitFile = Tuple[str, JUnitTreeOrParseError] + + +def safe_parse_xml_file(path: str, parse: Callable[[str], JUnitTree]) -> JUnitTreeOrParseError: + """Parses an xml file and returns either a JUnitTree or a ParseError.""" + if not os.path.exists(path): + return ParseError.from_exception(path, FileNotFoundError(f'File does not exist.')) + if os.stat(path).st_size == 0: + return ParseError.from_exception(path, Exception(f'File is empty.')) + + try: + return parse(path) + except BaseException as e: + return ParseError.from_exception(path, e) + + +def progress_safe_parse_xml_file(files: Iterable[str], + parse: Callable[[str], JUnitTree], + progress: Callable[[ParsedJUnitFile], ParsedJUnitFile]) -> Iterable[ParsedJUnitFile]: + return [progress((file, safe_parse_xml_file(file, parse))) for file in files] + + +def parse_junit_xml_file(path: str, large_files: bool, drop_testcases: bool) -> JUnitTree: + if drop_testcases: + builder = DropTestCaseBuilder() + parser = etree.XMLParser(target=builder, encoding='utf-8', huge_tree=large_files) + return etree.parse(path, parser=parser) + elif large_files: + parser = etree.XMLParser(huge_tree=True) + return etree.parse(path, parser=parser) + return etree.parse(path) + + +def parse_junit_xml_files(files: Iterable[str], large_files: bool, drop_testcases: bool, + progress: Callable[[ParsedJUnitFile], ParsedJUnitFile] = lambda x: x) -> Iterable[ParsedJUnitFile]: + """Parses junit xml files.""" + def parse(path: str) -> JUnitTree: + return parse_junit_xml_file(path, large_files, drop_testcases) + + return progress_safe_parse_xml_file(files, parse, progress) + + +def adjust_prefix(file: Optional[str], prefix: Optional[str]) -> Optional[str]: + if prefix is None or file is None: + return file + + # prefix starts either with '+' or '-' + if prefix.startswith('+'): + # add prefix + return "".join([prefix[1:], file]) + + # remove prefix + return file[len(prefix)-1:] if file.startswith(prefix[1:]) else file + + +def process_junit_xml_elems(trees: Iterable[ParsedJUnitFile], + *, + time_factor: float = 1.0, + test_file_prefix: Optional[str] = None, + add_suite_details: bool = False) -> ParsedUnitTestResults: + def create_junitxml(filepath: str, tree: JUnitTree) -> JUnitXmlOrParseError: + try: + instance = JUnitXml.fromroot(tree.getroot()) + instance.filepath = filepath + return instance + except JUnitXmlError as e: + return ParseError.from_exception(filepath, e) + + processed = [(result_file, create_junitxml(result_file, tree) if not isinstance(tree, ParseError) else tree) + for result_file, tree in trees] + junits = [(result_file, junit) + for result_file, junit in processed + if not isinstance(junit, ParseError)] + errors = [error + for _, error in processed + if isinstance(error, ParseError)] + + suites = [(result_file, suite) + for result_file, junit in junits + for suite in (junit if junit._tag == "testsuites" else [junit])] + + suite_tests = sum([suite.tests for result_file, suite in suites if suite.tests]) + suite_skipped = sum([suite.skipped + suite.disabled for result_file, suite in suites if suite.skipped and not math.isnan(suite.skipped)]) + suite_failures = sum([suite.failures for result_file, suite in suites if suite.failures and not math.isnan(suite.failures)]) + suite_errors = sum([suite.errors for result_file, suite in suites if suite.errors and not math.isnan(suite.errors)]) + suite_time = int(sum([suite.time for result_file, suite in suites + if suite.time and not math.isnan(suite.time)]) * time_factor) + + def int_opt(string: Optional[str]) -> Optional[int]: + try: + return int(string) if string else None + except ValueError: + return None + + def get_cases(suite: TestSuite) -> List[TestCase]: + """ + JUnit allows for testsuite tags inside testsuite tags at any depth. + https://llg.cubic.org/docs/junit/ + + This skips all inner testsuite tags and returns a list of all contained testcase tags. + """ + suites = list(suite.iterchildren(TestSuite)) + cases = list(suite.iterchildren(TestCase)) + return [case + for suite in suites + for case in get_cases(suite)] + cases + + def get_leaf_suites(suite: TestSuite) -> List[TestSuite]: + """ + JUnit allows for testsuite tags inside testsuite tags at any depth. + https://llg.cubic.org/docs/junit/ + + This enumerates all leaf testsuite tags and those with testcases tags. + """ + suites = list(suite.iterchildren(TestSuite)) + cases = list(suite.iterchildren(TestCase)) + return [leaf_suite + for suite in suites + for leaf_suite in get_leaf_suites(suite)] + ([suite] if cases or not suites else []) + + leaf_suites = [leaf_suite + for _, suite in suites + for leaf_suite in get_leaf_suites(suite)] + + def get_text(elem, tag): + child = elem.find(tag) + if child is not None: + text = child.text.strip() + return text if text else None + return None + + suite_details = [ + UnitTestSuite( + leaf_suite.name, + leaf_suite.tests, + leaf_suite.skipped, + leaf_suite.failures, + leaf_suite.errors, + get_text(leaf_suite._elem, 'system-out'), + get_text(leaf_suite._elem, 'system-err'), + ) + for leaf_suite in leaf_suites + ] if add_suite_details else [] + + # junit allows for multiple results for a single test case (e.g. success and failure for the same test) + # we pick the most severe result, which could still be multiple results, so we aggregate those, which is messy + cases = [ + UnitTestCase( + result_file=result_file, + test_file=adjust_prefix(case._elem.get('file'), test_file_prefix), + line=int_opt(case._elem.get('line')), + class_name=case.classname, + test_name=case.name, + result=get_result(results), + message=get_message(results), + content=get_content(results), + stdout=case.system_out, + stderr=case.system_err, + time=case.time * time_factor if case.time is not None else case.time + ) + for result_file, suite in suites + for case in get_cases(suite) + if case.classname is not None or case.name is not None + # junit allows for multiple results in one test case, pick the most severe results + for results in [get_results(case.result, case.status)] + ] + + return ParsedUnitTestResults( + files=len(list(trees)), + errors=errors, + # test state counts from suites + suites=len(leaf_suites), + suite_tests=suite_tests, + suite_skipped=suite_skipped, + suite_failures=suite_failures, + suite_errors=suite_errors, + suite_time=suite_time, + suite_details=suite_details, + # test cases + cases=cases + ) + + +@property +def disabled(self) -> int: + disabled = self._elem.get('disabled', '0') + if disabled.isnumeric(): + return int(disabled) + return 0 + + +# add special type of test case result to TestSuite +TestSuite.disabled = disabled + + +@property +def status(self) -> str: + return self._elem.get('status') + + +# special attribute of TestCase +TestCase.status = status + + +class Disabled(Skipped): + """Test result when the test is disabled.""" + + _tag = "disabled" + + def __eq__(self, other): + return super(Disabled, self).__eq__(other) diff --git a/python/publish/mocha.py b/python/publish/mocha.py new file mode 100644 index 0000000..bd9d6a3 --- /dev/null +++ b/python/publish/mocha.py @@ -0,0 +1,75 @@ +import json + +from junitparser.junitparser import etree + +from publish.junit import JUnitTree + + +def is_mocha_json(path: str) -> bool: + if not path.endswith('.json'): + return False + + try: + with open(path, 'rt') as r: + results = json.load(r) + return 'stats' in results and isinstance(results.get('stats'), dict) and 'suites' in results.get('stats') and \ + 'tests' in results and isinstance(results.get('tests'), list) and all(isinstance(test, dict) for test in results.get('tests')) and ( + len(results.get('tests')) == 0 or all(test.get('fullTitle') for test in results.get('tests')) + ) + except BaseException: + return False + + +def parse_mocha_json_file(path: str) -> JUnitTree: + with open(path, 'rt') as r: + results = json.load(r) + + stats = results.get('stats', {}) + skippedTests = {test.get('fullTitle') for test in results.get('pending', [])} + suite = etree.Element('testsuite', attrib={k: str(v) for k, v in dict( + time=stats.get('duration'), + timestamp=stats.get('start') + ).items() if v}) + + tests = 0 + failures = 0 + errors = 0 + skipped = 0 + for test in results.get('tests', []): + tests = tests + 1 + testcase = etree.Element('testcase', + attrib={k: str(v) for k, v in dict( + name=test.get('fullTitle'), + file=test.get('file'), + time=test.get('duration') + ).items() if v} + ) + + err = test.get('err') + if err: + if err.get('errorMode'): + errors = errors + 1 + type = 'error' + else: + failures = failures + 1 + type = 'failure' + + result = etree.Element(type, attrib={k: v for k, v in dict( + message=err.get('message').translate(dict.fromkeys(range(32))), + type=err.get('errorMode') + ).items() if v}) + result.text = etree.CDATA('\n'.join(text.translate(dict.fromkeys(range(32))) + for text in [err.get('name'), err.get('message'), err.get('stack')] + if text)) + testcase.append(result) + elif test.get('fullTitle') in skippedTests: + skipped = skipped + 1 + result = etree.Element('skipped') + testcase.append(result) + + suite.append(testcase) + + suite.attrib.update(dict(tests=str(tests), failures=str(failures), errors=str(errors), skipped=str(skipped))) + xml = etree.ElementTree(suite) + + return xml diff --git a/python/publish/nunit.py b/python/publish/nunit.py new file mode 100644 index 0000000..8708bf2 --- /dev/null +++ b/python/publish/nunit.py @@ -0,0 +1,31 @@ +import pathlib +from typing import Iterable, Callable + +from lxml import etree + +from publish.junit import JUnitTree, ParsedJUnitFile, progress_safe_parse_xml_file, xml_has_root_element + +with (pathlib.Path(__file__).resolve().parent / 'xslt' / 'nunit3-to-junit.xslt').open('r', encoding='utf-8') as r: + transform_nunit_to_junit = etree.XSLT(etree.parse(r), regexp=False, access_control=etree.XSLTAccessControl.DENY_ALL) + + +def is_nunit(path: str) -> bool: + return xml_has_root_element(path, ['test-results', 'test-run', 'test-suite']) + + +def parse_nunit_file(path: str, large_files: bool) -> JUnitTree: + if large_files: + parser = etree.XMLParser(huge_tree=True) + nunit = etree.parse(path, parser=parser) + else: + nunit = etree.parse(path) + return transform_nunit_to_junit(nunit) + + +def parse_nunit_files(files: Iterable[str], large_files: bool, + progress: Callable[[ParsedJUnitFile], ParsedJUnitFile] = lambda x: x) -> Iterable[ParsedJUnitFile]: + """Parses nunit files.""" + def parse(path: str) -> JUnitTree: + return parse_nunit_file(path, large_files) + + return progress_safe_parse_xml_file(files, parse, progress) diff --git a/python/publish/progress.py b/python/publish/progress.py new file mode 100644 index 0000000..4137e84 --- /dev/null +++ b/python/publish/progress.py @@ -0,0 +1,86 @@ +import contextlib +from datetime import datetime +from logging import Logger +from threading import Timer +from typing import Generic, TypeVar, Optional, Callable, Type, Any + +import humanize + +from publish import punctuation_space + +T = TypeVar('T') + + +@contextlib.contextmanager +def progress_logger(items: int, + interval_seconds: int, + progress_template: str, + finish_template: Optional[str], + logger: Logger, + progress_item_type: Type[T] = Any) -> Callable[[T], T]: + progress = Progress[progress_item_type](items) + plogger = ProgressLogger(progress, interval_seconds, progress_template, logger).start() + try: + yield progress.observe + finally: + plogger.finish(finish_template) + + +class Progress(Generic[T]): + def __init__(self, items: int): + self.items = items + self.observations = 0 + + def observe(self, observation: T) -> T: + self.observations = self.observations + 1 + return observation + + def get_progress(self) -> str: + return '{observations:,} of {items:,}'.format( + observations=self.observations, items=self.items + ).replace(',', punctuation_space) + + +class ProgressLogger: + def __init__(self, progress: Progress, interval_seconds: int, template: str, logger: Logger): + self._progress = progress + self._interval_seconds = interval_seconds + self._template = template + self._logger = logger + + self._start = None + self._duration = None + self._timer = self._get_progress_timer() + + def start(self) -> 'ProgressLogger': + self._start = datetime.utcnow() + self._timer.start() + return self + + def finish(self, template: Optional[str] = None): + self._duration = datetime.utcnow() - self._start + self._start = None + self._timer.cancel() + + if template: + self._logger.info(template.format(items=self._progress.items, + observations=self._progress.observations, + duration=self.duration)) + + @property + def duration(self) -> str: + return humanize.precisedelta(self._duration) + + def _get_progress_timer(self): + timer = Timer(self._interval_seconds, self._log_progress) + timer.setDaemon(daemonic=True) + return timer + + def _log_progress(self): + if self._start is None: + return + + delta = datetime.utcnow() - self._start + self._logger.info(self._template.format(progress=self._progress.get_progress(), time=humanize.precisedelta(delta))) + self._timer = self._get_progress_timer() + self._timer.start() diff --git a/python/publish/publisher.py b/python/publish/publisher.py new file mode 100644 index 0000000..ca96127 --- /dev/null +++ b/python/publish/publisher.py @@ -0,0 +1,740 @@ +import dataclasses +import json +import logging +import os +import re +from dataclasses import dataclass +from typing import List, Set, Any, Optional, Tuple, Mapping, Dict, Union, Callable +from copy import deepcopy + +from github import Github, GithubException, UnknownObjectException +from github.CheckRun import CheckRun +from github.CheckRunAnnotation import CheckRunAnnotation +from github.PullRequest import PullRequest +from github.IssueComment import IssueComment + +from publish import __version__, get_json_path, comment_mode_off, digest_prefix, restrict_unicode_list, \ + comment_mode_always, comment_mode_changes, comment_mode_changes_failures, comment_mode_changes_errors, \ + comment_mode_failures, comment_mode_errors, \ + get_stats_from_digest, digest_header, get_short_summary, get_long_summary_md, \ + get_long_summary_with_digest_md, get_error_annotations, get_case_annotations, get_suite_annotations, \ + get_all_tests_list_annotation, get_skipped_tests_list_annotation, get_all_tests_list, \ + get_skipped_tests_list, all_tests_list, skipped_tests_list, pull_request_build_mode_merge, \ + Annotation, SomeTestChanges +from publish import logger +from publish.github_action import GithubAction +from publish.unittestresults import UnitTestCaseResults, UnitTestRunResults, UnitTestRunDeltaResults, \ + UnitTestRunResultsOrDeltaResults, get_stats_delta, create_unit_test_case_results + + +@dataclass(frozen=True) +class Settings: + token: str + actor: str + api_url: str + graphql_url: str + api_retries: int + event: dict + event_file: Optional[str] + event_name: str + is_fork: bool + repo: str + commit: str + json_file: Optional[str] + json_thousands_separator: str + json_suite_details: bool + json_test_case_results: bool + fail_on_errors: bool + fail_on_failures: bool + action_fail: bool + action_fail_on_inconclusive: bool + # one of these *files_glob must be set + files_glob: Optional[str] + junit_files_glob: Optional[str] + nunit_files_glob: Optional[str] + xunit_files_glob: Optional[str] + trx_files_glob: Optional[str] + test_file_prefix: Optional[str] + time_factor: float + check_name: str + comment_title: str + comment_mode: str + job_summary: bool + compare_earlier: bool + pull_request_build: str + test_changes_limit: int + report_individual_runs: bool + report_suite_out_logs: bool + report_suite_err_logs: bool + dedup_classes_by_file_name: bool + large_files: bool + ignore_runs: bool + check_run_annotation: List[str] + seconds_between_github_reads: float + seconds_between_github_writes: float + secondary_rate_limit_wait_seconds: float + search_pull_requests: bool + + +@dataclasses.dataclass(frozen=True) +class PublishData: + title: str + summary: str + conclusion: str + stats: UnitTestRunResults + stats_with_delta: Optional[UnitTestRunDeltaResults] + annotations: List[Annotation] + check_url: str + cases: Optional[UnitTestCaseResults] + + def without_exceptions(self) -> 'PublishData': + return dataclasses.replace( + self, + # remove exceptions + stats=self.stats.without_exceptions(), + stats_with_delta=self.stats_with_delta.without_exceptions() if self.stats_with_delta else None, + # turn defaultdict into simple dict + cases={test: {state: cases for state, cases in states.items()} + for test, states in self.cases.items()} if self.cases else None + ) + + def without_suite_details(self) -> 'PublishData': + return dataclasses.replace(self, stats=self.stats.without_suite_details()) + + def without_cases(self) -> 'PublishData': + return dataclasses.replace(self, cases=None) + + @classmethod + def _format_digit(cls, value: Union[int, Mapping[str, int], Any], thousands_separator: str) -> Union[str, Mapping[str, str], Any]: + if isinstance(value, int): + return f'{value:,}'.replace(',', thousands_separator) + if isinstance(value, Mapping): + return {k: cls._format_digit(v, thousands_separator) for (k, v) in value.items()} + return value + + @classmethod + def _format(cls, stats: Mapping[str, Any], thousands_separator: str) -> Dict[str, Any]: + return {k: cls._format_digit(v, thousands_separator) for (k, v) in stats.items()} + + @classmethod + def _formatted_stats_and_delta(cls, + stats: Optional[Mapping[str, Any]], + stats_with_delta: Optional[Mapping[str, Any]], + thousands_separator: str) -> Mapping[str, Any]: + d = {} + if stats is not None: + d.update(stats=cls._format(stats, thousands_separator)) + if stats_with_delta is not None: + d.update(stats_with_delta=cls._format(stats_with_delta, thousands_separator)) + return d + + def _as_dict(self) -> Dict[str, Any]: + # the dict_factory removes None values + return dataclasses.asdict(self, dict_factory=lambda x: {k: v for (k, v) in x if v is not None}) + + def to_dict(self, thousands_separator: str, with_suite_details: bool, with_cases: bool) -> Mapping[str, Any]: + data = self.without_exceptions() + if not with_suite_details: + data = data.without_suite_details() + if not with_cases: + data = data.without_cases() + d = data._as_dict() + + # beautify cases, turn tuple-key into proper fields + if d.get('cases'): + d['cases'] = [{k: v for k, v in [('file_name', test[0]), + ('class_name', test[1]), + ('test_name', test[2]), + ('states', states)] + if v} + for test, states in d['cases'].items()] + + # provide formatted stats and delta + d.update(formatted=self._formatted_stats_and_delta( + d.get('stats'), d.get('stats_with_delta'), thousands_separator + )) + + return d + + def to_reduced_dict(self, thousands_separator: str) -> Mapping[str, Any]: + # remove exceptions, suite details and cases + data = self.without_exceptions().without_suite_details().without_cases()._as_dict() + + # replace some large fields with their lengths and delete individual test cases if present + def reduce(d: Dict[str, Any]) -> Dict[str, Any]: + d = deepcopy(d) + if d.get('stats', {}).get('errors') is not None: + d['stats']['errors'] = len(d['stats']['errors']) + if d.get('stats_with_delta', {}).get('errors') is not None: + d['stats_with_delta']['errors'] = len(d['stats_with_delta']['errors']) + if d.get('annotations') is not None: + d['annotations'] = len(d['annotations']) + return d + + data = reduce(data) + data.update(formatted=self._formatted_stats_and_delta( + data.get('stats'), data.get('stats_with_delta'), thousands_separator + )) + + return data + + +class Publisher: + + def __init__(self, settings: Settings, gh: Github, gha: GithubAction): + self._settings = settings + self._gh = gh + self._gha = gha + self._repo = gh.get_repo(self._settings.repo) + self._req = gh._Github__requester + + def publish(self, + stats: UnitTestRunResults, + cases: UnitTestCaseResults, + conclusion: str): + logger.info(f'Publishing {conclusion} results for commit {self._settings.commit}') + if logger.isEnabledFor(logging.DEBUG): + logger.debug(f'Publishing {stats}') + + if self._settings.is_fork: + # running on a fork, we cannot publish the check, but we can still read before_check_run + # bump the version if you change the target of this link (if it did not exist already) or change the section + logger.info('This action is running on a pull_request event for a fork repository. ' + 'Pull request comments and check runs cannot be created, so disabling these features. ' + 'To fully run the action on fork repository pull requests, see ' + f'https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#support-fork-repositories-and-dependabot-branches') + check_run = None + before_check_run = None + if self._settings.compare_earlier: + before_commit_sha = get_json_path(self._settings.event, 'before') + logger.debug(f'comparing against before={before_commit_sha}') + before_check_run = self.get_check_run(before_commit_sha) + else: + check_run, before_check_run = self.publish_check(stats, cases, conclusion) + + if self._settings.job_summary: + self.publish_job_summary(self._settings.comment_title, stats, check_run, before_check_run) + + if not self._settings.is_fork: + if self._settings.comment_mode != comment_mode_off: + pulls = self.get_pulls(self._settings.commit) + if pulls: + for pull in pulls: + self.publish_comment(self._settings.comment_title, stats, pull, check_run, cases) + else: + logger.info(f'There is no pull request for commit {self._settings.commit}') + else: + logger.info('Commenting on pull requests disabled') + + def get_pull_from_event(self) -> Optional[PullRequest]: + number = get_json_path(self._settings.event, 'pull_request.number') + repo = get_json_path(self._settings.event, 'pull_request.base.repo.full_name') + if number is None or repo is None or repo != self._settings.repo: + return None + + try: + return self._repo.get_pull(number) + except UnknownObjectException: + return None + + def get_pulls_from_commit(self, commit: str) -> List[PullRequest]: + try: + # totalCount of PaginatedList calls the GitHub API just to get the total number + # we have to retrieve them all anyway so better do this once by materialising the PaginatedList via list() + return list(self._repo.get_commit(commit).get_pulls()) + except UnknownObjectException: + return [] + + def get_all_pulls(self, commit: str) -> List[PullRequest]: + if self._settings.search_pull_requests: + # totalCount of PaginatedList calls the GitHub API just to get the total number + # we have to retrieve them all anyway so better do this once by materialising the PaginatedList via list() + issues = list(self._gh.search_issues(f'type:pr repo:"{self._settings.repo}" {commit}')) + pull_requests = [issue.as_pull_request() for issue in issues] + else: + pull_request = self.get_pull_from_event() + pull_requests = [pull_request] if pull_request is not None else self.get_pulls_from_commit(commit) + + logger.debug(f'found {len(pull_requests)} pull requests in repo {self._settings.repo} containing commit {commit}') + return pull_requests + + def get_pulls(self, commit: str) -> List[PullRequest]: + # get all pull requests associated with this commit + # TODO: simplify to event pr only, breaking change for version 3.0 + pull_requests = self.get_all_pulls(commit) + + if logger.isEnabledFor(logging.DEBUG): + for pr in pull_requests: + logger.debug(pr) + logger.debug(pr.raw_data) + logger.debug(f'PR {pr.html_url}: {pr.head.repo.full_name} -> {pr.base.repo.full_name}') + + # we can only publish the comment to PRs that are in the same repository as this action is executed in + # so pr.base.repo.full_name must be same as GITHUB_REPOSITORY / self._settings.repo + # we won't have permission otherwise + pulls = list([pr + for pr in pull_requests + if pr.base.repo.full_name == self._settings.repo]) + + if len(pulls) == 0: + logger.debug(f'found no pull requests in repo {self._settings.repo} for commit {commit}') + return [] + + # we only comment on PRs that have the commit as their current head or merge commit + pulls = [pull for pull in pulls if commit in [pull.head.sha, pull.merge_commit_sha]] + if len(pulls) == 0: + logger.debug(f'found no pull request in repo {self._settings.repo} with ' + f'commit {commit} as current head or merge commit') + return [] + + # only comment on the open PRs + pulls = [pull for pull in pulls if pull.state == 'open'] + if len(pulls) == 0: + logger.debug(f'found multiple pull requests in repo {self._settings.repo} with ' + f'commit {commit} as current head or merge commit but none is open') + + for pull in pulls: + logger.debug(f'found open pull request #{pull.number} with commit {commit} as current head or merge commit') + return pulls + + def get_stats_from_commit(self, commit_sha: str) -> Optional[UnitTestRunResults]: + check_run = self.get_check_run(commit_sha) + return self.get_stats_from_check_run(check_run) if check_run is not None else None + + def get_check_run(self, commit_sha: str) -> Optional[CheckRun]: + if commit_sha is None or commit_sha == '0000000000000000000000000000000000000000': + return None + + commit = None + try: + commit = self._repo.get_commit(commit_sha) + except GithubException as e: + if e.status == 422: + self._gha.warning(str(e.data)) + else: + raise e + + if commit is None: + self._gha.error(f'Could not find commit {commit_sha}') + return None + + runs = commit.get_check_runs() + # totalCount calls the GitHub API, so better not do this if we are not logging the result anyway + if logger.isEnabledFor(logging.DEBUG): + logger.debug(f'found {runs.totalCount} check runs for commit {commit_sha}') + + return self.get_check_run_from_list(list(runs)) + + def get_check_run_from_list(self, runs: List[CheckRun]) -> Optional[CheckRun]: + # filter for runs with the same name as configured + runs = [run for run in runs if run.name == self._settings.check_name] + logger.debug(f'there are {len(runs)} check runs with title {self._settings.check_name}') + if len(runs) == 0: + return None + if len(runs) == 1: + return runs[0] + + # filter based on summary + runs = [run for run in runs if run.output.summary and digest_prefix in run.output.summary] + logger.debug(f'there are {len(runs)} check runs with a test result summary') + if len(runs) == 0: + return None + if len(runs) == 1: + return runs[0] + + # filter for completed runs + runs = [run for run in runs if run.status == 'completed'] + logger.debug(f'there are {len(runs)} check runs with completed status') + if len(runs) == 0: + return None + if len(runs) == 1: + return runs[0] + + # pick run that started latest + return sorted(runs, key=lambda run: run.started_at, reverse=True)[0] + + @staticmethod + def get_stats_from_check_run(check_run: CheckRun) -> Optional[UnitTestRunResults]: + summary = check_run.output.summary + if summary is None: + return None + for line in summary.split('\n'): + logger.debug(f'summary: {line}') + + return Publisher.get_stats_from_summary_md(summary) + + @staticmethod + def get_stats_from_summary_md(summary: str) -> Optional[UnitTestRunResults]: + start = summary.index(digest_header) if digest_header in summary else None + if start: + digest = summary[start + len(digest_header):] + end = digest.index('\n') if '\n' in digest else None + if end: + digest = digest[:end] + logger.debug(f'digest: {digest}') + stats = get_stats_from_digest(digest) + logger.debug(f'stats: {stats}') + return stats + + @staticmethod + def get_test_list_from_annotation(annotation: CheckRunAnnotation) -> Optional[List[str]]: + if annotation is None or not annotation.raw_details: + return None + return annotation.raw_details.split('\n') + + def publish_check(self, + stats: UnitTestRunResults, + cases: UnitTestCaseResults, + conclusion: str) -> Tuple[CheckRun, Optional[CheckRun]]: + # get stats from earlier commits + before_stats = None + before_check_run = None + if self._settings.compare_earlier: + before_commit_sha = get_json_path(self._settings.event, 'before') + logger.debug(f'comparing against before={before_commit_sha}') + before_check_run = self.get_check_run(before_commit_sha) + before_stats = self.get_stats_from_check_run(before_check_run) if before_check_run is not None else None + stats_with_delta = get_stats_delta(stats, before_stats, 'earlier') if before_stats is not None else stats + logger.debug(f'stats with delta: {stats_with_delta}') + + error_annotations = get_error_annotations(stats.errors) + case_annotations = get_case_annotations(cases, self._settings.report_individual_runs) + output_annotations = get_suite_annotations(stats.suite_details, self._settings.report_suite_out_logs, self._settings.report_suite_err_logs) + test_list_annotations = self.get_test_list_annotations(cases) + all_annotations = error_annotations + case_annotations + output_annotations + test_list_annotations + + title = get_short_summary(stats) + summary = get_long_summary_md(stats_with_delta) + + # we can send only 50 annotations at once, so we split them into chunks of 50 + check_run = None + summary_with_digest = get_long_summary_with_digest_md(stats_with_delta, stats) + split_annotations = [annotation.to_dict() for annotation in all_annotations] + split_annotations = [split_annotations[x:x+50] for x in range(0, len(split_annotations), 50)] or [[]] + for annotations in split_annotations: + output = dict( + title=title, + summary=summary_with_digest, + annotations=annotations + ) + + if check_run is None: + logger.debug(f'creating check with {len(annotations)} annotations') + check_run = self._repo.create_check_run(name=self._settings.check_name, + head_sha=self._settings.commit, + status='completed', + conclusion=conclusion, + output=output) + logger.info(f'Created check {check_run.html_url}') + else: + logger.debug(f'updating check with {len(annotations)} more annotations') + check_run.edit(output=output) + logger.debug(f'updated check') + + # create full json + data = PublishData( + title=title, + summary=summary, + conclusion=conclusion, + stats=stats, + stats_with_delta=stats_with_delta if before_stats is not None else None, + annotations=all_annotations, + check_url=check_run.html_url, + cases=cases + ) + self.publish_json(data) + + return check_run, before_check_run + + def publish_json(self, data: PublishData): + if self._settings.json_file: + try: + with open(self._settings.json_file, 'wt', encoding='utf-8') as w: + json.dump(data.to_dict( + self._settings.json_thousands_separator, + self._settings.json_suite_details, + self._settings.json_test_case_results + ), w, ensure_ascii=False) + except Exception as e: + self._gha.error(f'Failed to write JSON file {self._settings.json_file}: {str(e)}') + try: + os.unlink(self._settings.json_file) + except: + pass + + # provide a reduced version to Github actions + self._gha.add_to_output('json', json.dumps(data.to_reduced_dict(self._settings.json_thousands_separator), ensure_ascii=False)) + + def publish_job_summary(self, + title: str, + stats: UnitTestRunResults, + check_run: CheckRun, + before_check_run: Optional[CheckRun]): + before_stats = self.get_stats_from_check_run(before_check_run) if before_check_run is not None else None + stats_with_delta = get_stats_delta(stats, before_stats, 'earlier') if before_stats is not None else stats + + details_url = check_run.html_url if check_run else None + summary = get_long_summary_md(stats_with_delta, details_url) + markdown = f'## {title}\n{summary}' + self._gha.add_to_job_summary(markdown) + logger.info(f'Created job summary') + + @staticmethod + def get_test_lists_from_check_run(check_run: Optional[CheckRun]) -> Tuple[Optional[List[str]], Optional[List[str]]]: + if check_run is None: + return None, None + + all_tests_title_regexp = re.compile(r'^\d+ test(s)? found( \(test \d+ to \d+\))?$') + skipped_tests_title_regexp = re.compile(r'^\d+ skipped test(s)? found( \(test \d+ to \d+\))?$') + + all_tests_message_regexp = re.compile( + r'^(There is 1 test, see "Raw output" for the name of the test)|' + r'(There are \d+ tests, see "Raw output" for the full list of tests)|' + r'(There are \d+ tests, see "Raw output" for the list of tests \d+ to \d+)\.$') + skipped_tests_message_regexp = re.compile( + r'^(There is 1 skipped test, see "Raw output" for the name of the skipped test)|' + r'(There are \d+ skipped tests, see "Raw output" for the full list of skipped tests)|' + r'(There are \d+ skipped tests, see "Raw output" for the list of skipped tests \d+ to \d+)\.$') + + annotations = list(check_run.get_annotations()) + all_tests_list = Publisher.get_test_list_from_annotations(annotations, all_tests_title_regexp, all_tests_message_regexp) + skipped_tests_list = Publisher.get_test_list_from_annotations(annotations, skipped_tests_title_regexp, skipped_tests_message_regexp) + + return all_tests_list or None, skipped_tests_list or None + + @staticmethod + def get_test_list_from_annotations(annotations: List[CheckRunAnnotation], + title_regexp, message_regexp) -> List[str]: + test_annotations: List[CheckRunAnnotation] = [] + + for annotation in annotations: + if annotation and annotation.title and annotation.message and annotation.raw_details and \ + title_regexp.match(annotation.title) and \ + message_regexp.match(annotation.message): + test_annotations.append(annotation) + + test_lists = [Publisher.get_test_list_from_annotation(test_annotation) + for test_annotation in test_annotations] + test_list = [test + for test_list in test_lists + if test_list + for test in test_list] + return test_list + + def get_test_list_annotations(self, cases: UnitTestCaseResults, max_chunk_size: int = 64000) -> List[Annotation]: + all_tests = get_all_tests_list_annotation(cases, max_chunk_size) \ + if all_tests_list in self._settings.check_run_annotation else [] + skipped_tests = get_skipped_tests_list_annotation(cases, max_chunk_size) \ + if skipped_tests_list in self._settings.check_run_annotation else [] + return [annotation for annotation in skipped_tests + all_tests if annotation] + + def publish_comment(self, + title: str, + stats: UnitTestRunResults, + pull_request: PullRequest, + check_run: Optional[CheckRun] = None, + cases: Optional[UnitTestCaseResults] = None): + # compare them with earlier stats + base_check_run = None + if self._settings.compare_earlier: + base_commit_sha = self.get_base_commit_sha(pull_request) + if stats.commit == base_commit_sha: + # we do not publish a comment when we compare the commit to itself + # that would overwrite earlier comments without change stats + return pull_request + logger.debug(f'comparing against base={base_commit_sha}') + base_check_run = self.get_check_run(base_commit_sha) + base_stats = self.get_stats_from_check_run(base_check_run) if base_check_run is not None else None + stats_with_delta = get_stats_delta(stats, base_stats, 'base') if base_stats is not None else stats + logger.debug(f'stats with delta: {stats_with_delta}') + + # gather test lists from check run and cases + before_all_tests, before_skipped_tests = self.get_test_lists_from_check_run(base_check_run) + all_tests, skipped_tests = get_all_tests_list(cases), get_skipped_tests_list(cases) + # 'before' test names are retrieved from check runs, which have restricted unicode + # so we have to apply the same restriction to the test names retrieved from cases, so that they match + all_tests, skipped_tests = restrict_unicode_list(all_tests), restrict_unicode_list(skipped_tests) + test_changes = SomeTestChanges(before_all_tests, all_tests, before_skipped_tests, skipped_tests) + + latest_comment = self.get_latest_comment(pull_request) + latest_comment_body = latest_comment.body if latest_comment else None + + # are we required to create a comment on this PR? + earlier_stats = self.get_stats_from_summary_md(latest_comment_body) if latest_comment_body else None + if not self.require_comment(stats_with_delta, earlier_stats): + logger.info(f'No pull request comment required as comment mode is {self._settings.comment_mode} (comment_mode)') + return + + details_url = check_run.html_url if check_run else None + summary = get_long_summary_with_digest_md(stats_with_delta, stats, details_url, test_changes, self._settings.test_changes_limit) + body = f'## {title}\n{summary}' + + # only create new comment none exists already + if latest_comment is None: + comment = pull_request.create_issue_comment(body) + logger.info(f'Created comment for pull request #{pull_request.number}: {comment.html_url}') + else: + self.reuse_comment(latest_comment, body) + logger.info(f'Edited comment for pull request #{pull_request.number}: {latest_comment.html_url}') + + def require_comment(self, + stats: UnitTestRunResultsOrDeltaResults, + earlier_stats: Optional[UnitTestRunResults]) -> bool: + # SomeTestChanges.has_changes cannot be used here as changes between earlier comment + # and current results cannot be identified + + if self._settings.comment_mode == comment_mode_always: + logger.debug(f'Comment required as comment mode is {self._settings.comment_mode}') + return True + + # helper method to detect if changes require a comment + def do_changes_require_comment(earlier_stats_is_different_to: Optional[Callable[[UnitTestRunResultsOrDeltaResults], bool]], + stats_has_changes: bool, + flavour: str = '') -> bool: + in_flavour = '' + if flavour: + flavour = f'{flavour} ' + in_flavour = f'in {flavour}' + + if earlier_stats is not None and earlier_stats_is_different_to(stats): + logger.info(f'Comment required as comment mode is "{self._settings.comment_mode}" ' + f'and {flavour}statistics are different to earlier comment') + logger.debug(f'earlier: {earlier_stats}') + logger.debug(f'current: {stats.without_delta() if stats.is_delta else stats}') + return True + if not stats.is_delta: + logger.info(f'Comment required as comment mode is "{self._settings.comment_mode}" ' + f'but no delta statistics to target branch available') + return True + if stats_has_changes: + logger.info(f'Comment required as comment mode is "{self._settings.comment_mode}" ' + f'and changes {in_flavour} to target branch exist') + logger.debug(f'current: {stats}') + return True + return False + + if self._settings.comment_mode == comment_mode_changes and \ + do_changes_require_comment(earlier_stats.is_different if earlier_stats else None, + stats.is_delta and stats.has_changes): + return True + + if self._settings.comment_mode == comment_mode_changes_failures and \ + do_changes_require_comment(earlier_stats.is_different_in_failures if earlier_stats else None, + stats.is_delta and stats.has_failure_changes, + 'failures'): + return True + + if self._settings.comment_mode in [comment_mode_changes_failures, comment_mode_changes_errors] and \ + do_changes_require_comment(earlier_stats.is_different_in_errors if earlier_stats else None, + stats.is_delta and stats.has_error_changes, + 'errors'): + return True + + # helper method to detect if stats require a comment + def do_stats_require_comment(earlier_stats_require: Optional[bool], stats_require: bool, flavour: str) -> bool: + if earlier_stats is not None and earlier_stats_require: + logger.info(f'Comment required as comment mode is {self._settings.comment_mode} ' + f'and {flavour} existed in earlier comment') + return True + if stats_require: + logger.info(f'Comment required as comment mode is {self._settings.comment_mode} ' + f'and {flavour} exist in current comment') + return True + return False + + if self._settings.comment_mode == comment_mode_failures and \ + do_stats_require_comment(earlier_stats.has_failures if earlier_stats else None, + stats.has_failures, + 'failures'): + return True + + if self._settings.comment_mode in [comment_mode_failures, comment_mode_errors] and \ + do_stats_require_comment(earlier_stats.has_errors if earlier_stats else None, + stats.has_errors, + 'errors'): + return True + + return False + + def get_latest_comment(self, pull: PullRequest) -> Optional[IssueComment]: + # get comments of this pull request + comments = self.get_pull_request_comments(pull, order_by_updated=True) + + # get all comments that come from this action and are not hidden + comments = self.get_action_comments(comments) + + # if there is no such comment, stop here + if len(comments) == 0: + return None + + # fetch latest action comment + comment_id = comments[-1].get("databaseId") + return pull.get_issue_comment(comment_id) + + def reuse_comment(self, comment: IssueComment, body: str): + if ':recycle:' not in body: + body = f'{body}\n:recycle: This comment has been updated with latest results.' + + try: + comment.edit(body) + except Exception as e: + self._gha.warning(f'Failed to edit existing comment #{comment.id}') + logger.debug('editing existing comment failed', exc_info=e) + + def get_base_commit_sha(self, pull_request: PullRequest) -> Optional[str]: + if self._settings.pull_request_build == pull_request_build_mode_merge: + if self._settings.event: + # for pull request events we take the other parent of the merge commit (base) + if self._settings.event_name == 'pull_request': + return get_json_path(self._settings.event, 'pull_request.base.sha') + # for workflow run events we should take the same as for pull request events, + # but we have no way to figure out the actual merge commit and its parents + # we do not take the base sha from pull_request as it is not immutable + if self._settings.event_name == 'workflow_run': + return None + + try: + # we always fall back to where the branch merged off base ref + logger.debug(f'comparing {pull_request.base.ref} with {self._settings.commit}') + compare = self._repo.compare(pull_request.base.ref, self._settings.commit) + return compare.merge_base_commit.sha + except: + logger.warning(f'could not find best common ancestor ' + f'between base {pull_request.base.sha} ' + f'and commit {self._settings.commit}') + + return None + + def get_pull_request_comments(self, pull: PullRequest, order_by_updated: bool) -> List[Mapping[str, Any]]: + order = '' + if order_by_updated: + order = ', orderBy: { direction: ASC, field: UPDATED_AT }' + + query = dict( + query=r'query ListComments {' + r' repository(owner:"' + self._repo.owner.login + r'", name:"' + self._repo.name + r'") {' + r' pullRequest(number: ' + str(pull.number) + r') {' + f' comments(last: 100{order}) {{' + r' nodes {' + r' id, databaseId, author { login }, body, isMinimized' + r' }' + r' }' + r' }' + r' }' + r'}' + ) + + headers, data = self._req.requestJsonAndCheck( + "POST", self._settings.graphql_url, input=query + ) + + return get_json_path(data, 'data.repository.pullRequest.comments.nodes') + + def get_action_comments(self, comments: List[Mapping[str, Any]], is_minimized: Optional[bool] = False): + comment_body_start = f'## {self._settings.comment_title}\n' + comment_body_indicators = ['\nresults for commit ', '\nResults for commit '] + return list([comment for comment in comments + if get_json_path(comment, 'author.login') == self._settings.actor + and (is_minimized is None or comment.get('isMinimized') == is_minimized) + and comment.get('body', '').startswith(comment_body_start) + and any(indicator in comment.get('body', '') for indicator in comment_body_indicators)]) diff --git a/python/publish/trx.py b/python/publish/trx.py new file mode 100644 index 0000000..b9d86cd --- /dev/null +++ b/python/publish/trx.py @@ -0,0 +1,31 @@ +import pathlib +from typing import Iterable, Callable + +from lxml import etree + +from publish.junit import JUnitTree, ParsedJUnitFile, progress_safe_parse_xml_file, xml_has_root_element + +with (pathlib.Path(__file__).resolve().parent / 'xslt' / 'trx-to-junit.xslt').open('r', encoding='utf-8') as r: + transform_trx_to_junit = etree.XSLT(etree.parse(r), regexp=False, access_control=etree.XSLTAccessControl.DENY_ALL) + + +def is_trx(path: str) -> bool: + return xml_has_root_element(path, ['TestRun']) + + +def parse_trx_file(path: str, large_files: bool) -> JUnitTree: + if large_files: + parser = etree.XMLParser(huge_tree=True) + trx = etree.parse(path, parser=parser) + else: + trx = etree.parse(path) + return transform_trx_to_junit(trx) + + +def parse_trx_files(files: Iterable[str], large_files: bool, + progress: Callable[[ParsedJUnitFile], ParsedJUnitFile] = lambda x: x) -> Iterable[ParsedJUnitFile]: + """Parses trx files.""" + def parse(path: str) -> JUnitTree: + return parse_trx_file(path, large_files) + + return progress_safe_parse_xml_file(files, parse, progress) diff --git a/python/publish/unittestresults.py b/python/publish/unittestresults.py new file mode 100644 index 0000000..b46070f --- /dev/null +++ b/python/publish/unittestresults.py @@ -0,0 +1,517 @@ +import dataclasses +from collections import defaultdict +from copy import deepcopy +from dataclasses import dataclass +from typing import Optional, List, Mapping, Any, Union, Dict, Callable, Tuple, AbstractSet +from xml.etree.ElementTree import ParseError as XmlParseError + + +@dataclass(frozen=True) +class UnitTestCase: + result_file: str + test_file: Optional[str] + line: Optional[int] + class_name: Optional[str] + test_name: Optional[str] + result: str + message: Optional[str] + content: Optional[str] + stdout: Optional[str] + stderr: Optional[str] + time: Optional[float] + + +UnitTestCaseFileName = str +UnitTestCaseClassName = str +UnitTestCaseTestName = str +UnitTestCaseResultKey = Tuple[Optional[UnitTestCaseFileName], UnitTestCaseClassName, UnitTestCaseTestName] +UnitTestCaseState = str +UnitTestCaseResults = Mapping[UnitTestCaseResultKey, Mapping[UnitTestCaseState, List[UnitTestCase]]] + + +def create_unit_test_case_results(indexed_cases: Optional[UnitTestCaseResults] = None) -> UnitTestCaseResults: + if indexed_cases: + return deepcopy(indexed_cases) + return defaultdict(lambda: defaultdict(list)) + + +@dataclass(frozen=True) +class ParseError: + file: str + message: str + line: Optional[int] = None + column: Optional[int] = None + exception: Optional[BaseException] = None + + @staticmethod + def from_exception(file: str, exception: BaseException): + if isinstance(exception, XmlParseError): + line, column = exception.position + msg = exception.msg + if msg.startswith('syntax error:') or \ + msg.startswith('no element found:') or \ + msg.startswith('unclosed token:') or \ + msg.startswith('mismatched tag:'): + msg = f'File is not a valid XML file:\n{msg}' + elif msg.startswith('Invalid format.'): + msg = f'File is not a valid JUnit file:\n{msg}' + return ParseError(file=file, message=msg, line=line, column=column, exception=exception) + return ParseError(file=file, message=str(exception), exception=exception) + + # exceptions can be arbitrary types and might not be serializable + def without_exception(self) -> 'ParseError': + return dataclasses.replace(self, exception=None) + + +@dataclass(frozen=True) +class ParsedUnitTestResults: + files: int + errors: List[ParseError] + suites: int + suite_tests: int + suite_skipped: int + suite_failures: int + suite_errors: int + suite_time: int + suite_details: List['UnitTestSuite'] + cases: List[UnitTestCase] + + def with_commit(self, commit: str) -> 'ParsedUnitTestResultsWithCommit': + return ParsedUnitTestResultsWithCommit( + self.files, + self.errors, + self.suites, + self.suite_tests, + self.suite_skipped, + self.suite_failures, + self.suite_errors, + self.suite_time, + self.suite_details, + self.cases, + commit + ) + + +@dataclass(frozen=True) +class ParsedUnitTestResultsWithCommit(ParsedUnitTestResults): + commit: str + + def with_cases(self, + cases_skipped: int, + cases_failures: int, + cases_errors: int, + cases_time: float, + case_results: UnitTestCaseResults, + tests: int, + tests_skipped: int, + tests_failures: int, + tests_errors: int) -> 'UnitTestResults': + return UnitTestResults( + files=self.files, + errors=self.errors, + suites=self.suites, + suite_tests=self.suite_tests, + suite_skipped=self.suite_skipped, + suite_failures=self.suite_failures, + suite_errors=self.suite_errors, + suite_time=self.suite_time, + suite_details=self.suite_details, + commit=self.commit, + + cases=len(self.cases), + cases_skipped=cases_skipped, + cases_failures=cases_failures, + cases_errors=cases_errors, + cases_time=cases_time, + case_results=case_results, + + tests=tests, + tests_skipped=tests_skipped, + tests_failures=tests_failures, + tests_errors=tests_errors + ) + + def without_cases(self): + # when there are no case information, we use the + # testsuite information for case and test level + return self.with_cases( + # test states and counts from cases + cases_skipped=self.suite_skipped, + cases_failures=self.suite_failures, + cases_errors=self.suite_errors, + cases_time=self.suite_time, + case_results=create_unit_test_case_results(), + + tests=self.suite_tests, + tests_skipped=self.suite_skipped, + tests_failures=self.suite_failures, + tests_errors=self.suite_errors, + ) + + +@dataclass(frozen=True) +class UnitTestSuite: + name: str + tests: int + skipped: int + failures: int + errors: int + stdout: Optional[str] + stderr: Optional[str] + + +@dataclass(frozen=True) +class UnitTestResults(ParsedUnitTestResultsWithCommit): + cases: int + cases_skipped: int + cases_failures: int + cases_errors: int + cases_time: float + case_results: UnitTestCaseResults + + tests: int + tests_skipped: int + tests_failures: int + tests_errors: int + + +@dataclass(frozen=True) +class UnitTestRunResults: + files: int + errors: List[ParseError] + suites: int + duration: int + + suite_details: Optional[List[UnitTestSuite]] + + tests: int + tests_succ: int + tests_skip: int + tests_fail: int + tests_error: int + + runs: int + runs_succ: int + runs_skip: int + runs_fail: int + runs_error: int + + commit: str + + @property + def is_delta(self) -> bool: + return False + + @property + def has_failures(self): + return self.tests_fail > 0 or self.runs_fail > 0 + + @property + def has_errors(self): + return len(self.errors) > 0 or self.tests_error > 0 or self.runs_error > 0 + + @staticmethod + def _change_fields(results: 'UnitTestRunResults') -> List[int]: + return [results.files, results.suites, + results.tests, results.tests_succ, results.tests_skip, results.tests_fail, results.tests_error, + results.runs, results.runs_succ, results.runs_skip, results.runs_fail, results.runs_error] + + @staticmethod + def _failure_fields(results: 'UnitTestRunResults') -> List[int]: + return [results.tests_fail, results.runs_fail] + + @staticmethod + def _error_fields(results: 'UnitTestRunResults') -> List[int]: + return [results.tests_error, results.runs_error] + + def is_different(self, + other: 'UnitTestRunResultsOrDeltaResults', + fields_func: Callable[['UnitTestRunResults'], List[int]] = _change_fields.__func__): + if other.is_delta: + other = other.without_delta() + + return any([left != right for left, right in zip(fields_func(self), fields_func(other))]) + + def is_different_in_failures(self, other: 'UnitTestRunResultsOrDeltaResults'): + return self.is_different(other, self._failure_fields) + + def is_different_in_errors(self, other: 'UnitTestRunResultsOrDeltaResults'): + return self.is_different(other, self._error_fields) + + def with_errors(self, errors: List[ParseError]) -> 'UnitTestRunResults': + return UnitTestRunResults( + files=self.files, + errors=errors, + suites=self.suites, + duration=self.duration, + + suite_details=self.suite_details, + + tests=self.tests, + tests_succ=self.tests_succ, + tests_skip=self.tests_skip, + tests_fail=self.tests_fail, + tests_error=self.tests_error, + + runs=self.runs, + runs_succ=self.runs_succ, + runs_skip=self.runs_skip, + runs_fail=self.runs_fail, + runs_error=self.runs_error, + + commit=self.commit + ) + + # exceptions can be arbitrary types and might not be serializable + def without_exceptions(self) -> 'UnitTestRunResults': + return dataclasses.replace(self, errors=[error.without_exception() for error in self.errors]) + + def without_suite_details(self) -> 'UnitTestRunResults': + return dataclasses.replace(self, suite_details=None) + + def to_dict(self) -> Dict[str, Any]: + # dict is usually used to serialize, but exceptions are likely not serializable, so we exclude them + # suite details might be arbitrarily large, we exclude those too + return dataclasses.asdict(self.without_exceptions().without_suite_details(), + # the dict_factory removes None values + dict_factory=lambda x: {k: v for (k, v) in x if v is not None}) + + @staticmethod + def from_dict(values: Mapping[str, Any]) -> 'UnitTestRunResults': + return UnitTestRunResults( + files=values.get('files'), + errors=values.get('errors', []), + suites=values.get('suites'), + duration=values.get('duration'), + + suite_details=None, + + tests=values.get('tests'), + tests_succ=values.get('tests_succ'), + tests_skip=values.get('tests_skip'), + tests_fail=values.get('tests_fail'), + tests_error=values.get('tests_error'), + + runs=values.get('runs'), + runs_succ=values.get('runs_succ'), + runs_skip=values.get('runs_skip'), + runs_fail=values.get('runs_fail'), + runs_error=values.get('runs_error'), + + commit=values.get('commit'), + ) + + +Numeric = Mapping[str, int] + + +@dataclass(frozen=True) +class UnitTestRunDeltaResults: + files: Numeric + errors: List[ParseError] + suites: Numeric + duration: Numeric + + tests: Numeric + tests_succ: Numeric + tests_skip: Numeric + tests_fail: Numeric + tests_error: Numeric + + runs: Numeric + runs_succ: Numeric + runs_skip: Numeric + runs_fail: Numeric + runs_error: Numeric + + commit: str + + reference_type: str + reference_commit: str + + @property + def is_delta(self) -> bool: + return True + + @staticmethod + def _has_changes(fields: List[Numeric]) -> bool: + return any([field.get('delta') for field in fields]) + + @property + def has_changes(self) -> bool: + return self._has_changes([self.files, self.suites, + self.tests, self.tests_succ, self.tests_skip, self.tests_fail, self.tests_error, + self.runs, self.runs_succ, self.runs_skip, self.runs_fail, self.runs_error]) + + @property + def has_failure_changes(self) -> bool: + return self._has_changes([self.tests_fail, self.runs_fail]) + + @property + def has_error_changes(self) -> bool: + return self._has_changes([self.tests_error, self.runs_error]) + + @property + def has_failures(self): + return self.tests_fail.get('number') > 0 or self.runs_fail.get('number') > 0 + + @property + def has_errors(self): + return len(self.errors) > 0 or self.tests_error.get('number') > 0 or self.runs_error.get('number') > 0 + + def to_dict(self) -> Dict[str, Any]: + # dict is usually used to serialize, but exceptions are likely not serializable, so we exclude them + return dataclasses.asdict(self.without_exceptions()) + + def without_delta(self) -> UnitTestRunResults: + def v(value: Numeric) -> int: + return value['number'] + + def d(value: Numeric) -> int: + return value['duration'] + + return UnitTestRunResults(files=v(self.files), errors=self.errors, suites=v(self.suites), duration=d(self.duration), suite_details=None, + tests=v(self.tests), tests_succ=v(self.tests_succ), tests_skip=v(self.tests_skip), tests_fail=v(self.tests_fail), tests_error=v(self.tests_error), + runs=v(self.runs), runs_succ=v(self.runs_succ), runs_skip=v(self.runs_skip), runs_fail=v(self.runs_fail), runs_error=v(self.runs_error), + commit=self.commit) + + def without_exceptions(self) -> 'UnitTestRunDeltaResults': + return dataclasses.replace(self, errors=[error.without_exception() for error in self.errors]) + + +UnitTestRunResultsOrDeltaResults = Union[UnitTestRunResults, UnitTestRunDeltaResults] + + +def aggregate_states(states: AbstractSet[str]) -> str: + return 'error' if 'error' in states else \ + 'failure' if 'failure' in states else \ + 'success' if 'success' in states else \ + 'skipped' + + +def get_test_results(parsed_results: ParsedUnitTestResultsWithCommit, + dedup_classes_by_file_name: bool) -> UnitTestResults: + """ + Computes case and test statistics and returns them as a UnitTestResults instance. + With dedup_classes_by_file_name=True, considers file name to identify classes, + not just their class name. + + :param parsed_results: parsed unit test results + :param dedup_classes_by_file_name: + :return: unit test result statistics + """ + cases = parsed_results.cases + + if len(cases) == 0: + return parsed_results.without_cases() + + cases_skipped = [case for case in cases if case.result in ['skipped', 'disabled']] + cases_failures = [case for case in cases if case.result == 'failure'] + cases_errors = [case for case in cases if case.result == 'error'] + cases_time = sum([case.time or 0 for case in cases]) + + # index cases by tests and state + cases_results = create_unit_test_case_results() + for case in cases: + # index by test file name (when de-duplicating by file name), class name and test name + test = (case.test_file if dedup_classes_by_file_name else None, case.class_name, case.test_name) + + # second index by state + state = case.result if case.result != 'disabled' else 'skipped' + + # collect cases of test and state + cases_results[test][state].append(case) + + test_results = dict() + for test, states in cases_results.items(): + test_results[test] = aggregate_states(states.keys()) + + tests = len(test_results) + tests_skipped = len([test for test, state in test_results.items() if state in ['skipped', 'disabled']]) + tests_failures = len([test for test, state in test_results.items() if state == 'failure']) + tests_errors = len([test for test, state in test_results.items() if state == 'error']) + + return parsed_results.with_cases( + # test states and counts from cases + cases_skipped=len(cases_skipped), + cases_failures=len(cases_failures), + cases_errors=len(cases_errors), + cases_time=cases_time, + case_results=cases_results, + + tests=tests, + # distinct test states by case name + tests_skipped=tests_skipped, + tests_failures=tests_failures, + tests_errors=tests_errors, + ) + + +def get_stats(test_results: UnitTestResults) -> UnitTestRunResults: + """Provides stats for the given test results.""" + tests_succ = test_results.tests - test_results.tests_skipped - test_results.tests_failures - test_results.tests_errors + runs_succ = test_results.suite_tests - test_results.suite_skipped - test_results.suite_failures - test_results.suite_errors + + return UnitTestRunResults( + files=test_results.files, + errors=test_results.errors, + suites=test_results.suites, + duration=test_results.suite_time, + + suite_details=test_results.suite_details, + + tests=test_results.tests, + tests_succ=tests_succ, + tests_skip=test_results.tests_skipped, + tests_fail=test_results.tests_failures, + tests_error=test_results.tests_errors, + + runs=test_results.suite_tests, + runs_succ=runs_succ, + runs_skip=test_results.suite_skipped, + runs_fail=test_results.suite_failures, + runs_error=test_results.suite_errors, + + commit=test_results.commit + ) + + +def get_diff_value(value: int, reference: int, field: str = 'number') -> Numeric: + if field == 'duration': + val = dict(duration=value) + elif field == 'number': + val = dict(number=value) + else: + raise ValueError(f'unsupported field: {field}') + + val['delta'] = value - reference + return val + + +def get_stats_delta(stats: UnitTestRunResults, + reference_stats: UnitTestRunResults, + reference_type: str) -> UnitTestRunDeltaResults: + """Given two stats provides a stats with deltas.""" + return UnitTestRunDeltaResults( + files=get_diff_value(stats.files, reference_stats.files), + errors=stats.errors, + suites=get_diff_value(stats.suites, reference_stats.suites), + duration=get_diff_value(stats.duration, reference_stats.duration, 'duration'), + + tests=get_diff_value(stats.tests, reference_stats.tests), + tests_succ=get_diff_value(stats.tests_succ, reference_stats.tests_succ), + tests_skip=get_diff_value(stats.tests_skip, reference_stats.tests_skip), + tests_fail=get_diff_value(stats.tests_fail, reference_stats.tests_fail), + tests_error=get_diff_value(stats.tests_error, reference_stats.tests_error), + + runs=get_diff_value(stats.runs, reference_stats.runs), + runs_succ=get_diff_value(stats.runs_succ, reference_stats.runs_succ), + runs_skip=get_diff_value(stats.runs_skip, reference_stats.runs_skip), + runs_fail=get_diff_value(stats.runs_fail, reference_stats.runs_fail), + runs_error=get_diff_value(stats.runs_error, reference_stats.runs_error), + + commit=stats.commit, + + reference_type=reference_type, + reference_commit=reference_stats.commit + ) diff --git a/python/publish/xslt/nunit-to-junit.xslt b/python/publish/xslt/nunit-to-junit.xslt new file mode 100644 index 0000000..11b9055 --- /dev/null +++ b/python/publish/xslt/nunit-to-junit.xslt @@ -0,0 +1,88 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +MESSAGE: + ++++++++++++++++++++ +STACK TRACE: + + + +MESSAGE: + ++++++++++++++++++++ +STACK TRACE: + + + + + + + + + + + + + + + + + + + diff --git a/python/publish/xslt/nunit3-to-junit.xslt b/python/publish/xslt/nunit3-to-junit.xslt new file mode 100644 index 0000000..00d0551 --- /dev/null +++ b/python/publish/xslt/nunit3-to-junit.xslt @@ -0,0 +1,139 @@ + + + + + + + + + + 0 + + + + + + 0 + + + + + + 0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 0 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/publish/xslt/trx-to-junit.xslt b/python/publish/xslt/trx-to-junit.xslt new file mode 100644 index 0000000..3cb1a7d --- /dev/null +++ b/python/publish/xslt/trx-to-junit.xslt @@ -0,0 +1,268 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/publish/xslt/xunit-to-junit.xslt b/python/publish/xslt/xunit-to-junit.xslt new file mode 100644 index 0000000..6ee68d6 --- /dev/null +++ b/python/publish/xslt/xunit-to-junit.xslt @@ -0,0 +1,63 @@ + + + + + + + + + + + + + + + + + T + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/publish/xunit.py b/python/publish/xunit.py new file mode 100644 index 0000000..ec7c1a8 --- /dev/null +++ b/python/publish/xunit.py @@ -0,0 +1,31 @@ +import pathlib +from typing import Iterable, Callable + +from lxml import etree + +from publish.junit import JUnitTree, ParsedJUnitFile, progress_safe_parse_xml_file, xml_has_root_element + +with (pathlib.Path(__file__).resolve().parent / 'xslt' / 'xunit-to-junit.xslt').open('r', encoding='utf-8') as r: + transform_xunit_to_junit = etree.XSLT(etree.parse(r), regexp=False, access_control=etree.XSLTAccessControl.DENY_ALL) + + +def is_xunit(path: str) -> bool: + return xml_has_root_element(path, ['assemblies', 'assembly']) + + +def parse_xunit_file(path: str, large_files: bool) -> JUnitTree: + if large_files: + parser = etree.XMLParser(huge_tree=True) + xunit = etree.parse(path, parser=parser) + else: + xunit = etree.parse(path) + return transform_xunit_to_junit(xunit) + + +def parse_xunit_files(files: Iterable[str], large_files: bool, + progress: Callable[[ParsedJUnitFile], ParsedJUnitFile] = lambda x: x) -> Iterable[ParsedJUnitFile]: + """Parses xunit files.""" + def parse(path: str) -> JUnitTree: + return parse_xunit_file(path, large_files) + + return progress_safe_parse_xml_file(files, parse, progress) diff --git a/python/publish_test_results.py b/python/publish_test_results.py new file mode 100644 index 0000000..0fd8da0 --- /dev/null +++ b/python/publish_test_results.py @@ -0,0 +1,540 @@ +import json +import logging +import os +import re +import sys +from glob import glob +from pathlib import Path +from typing import List, Optional, Union, Mapping, Tuple, Any, Iterable, Callable + +import github +import humanize +import psutil +from github.GithubRetry import DEFAULT_SECONDARY_RATE_WAIT + +import publish.github_action +from publish import __version__, available_annotations, default_annotations, none_annotations, \ + report_suite_out_log, report_suite_err_log, report_suite_logs, default_report_suite_logs, available_report_suite_logs, \ + pull_request_build_modes, fail_on_modes, fail_on_mode_errors, fail_on_mode_failures, \ + comment_mode_always, comment_modes, punctuation_space +from publish.github_action import GithubAction +from publish.junit import JUnitTree, parse_junit_xml_files, parse_junit_xml_file, process_junit_xml_elems, \ + ParsedJUnitFile, progress_safe_parse_xml_file, is_junit +from publish.progress import progress_logger +from publish.publisher import Publisher, Settings +from publish.unittestresults import get_test_results, get_stats, ParsedUnitTestResults, ParsedUnitTestResultsWithCommit, \ + ParseError + +logger = logging.getLogger('publish') + + +def get_conclusion(parsed: ParsedUnitTestResults, fail_on_failures, fail_on_errors) -> str: + if parsed.files == 0: + return 'neutral' + if fail_on_errors and len(parsed.errors) > 0: + return 'failure' + if fail_on_failures and parsed.suite_failures > 0 or fail_on_errors and parsed.suite_errors > 0: + return 'failure' + return 'success' + + +def get_github(auth: github.Auth, + url: str, + retries: int, + backoff_factor: float, + seconds_between_requests: Optional[float], + seconds_between_writes: Optional[float], + secondary_rate_wait: float) -> github.Github: + retry = github.GithubRetry(total=retries, + backoff_factor=backoff_factor, + secondary_rate_wait=secondary_rate_wait) + return github.Github(auth=auth, + base_url=url, + per_page=100, + retry=retry, + seconds_between_requests=seconds_between_requests, + seconds_between_writes=seconds_between_writes) + + +def get_files(multiline_files_globs: str) -> Tuple[List[str], bool]: + multiline_files_globs = re.split('\r?\n\r?', multiline_files_globs) + included = {str(file) + for files_glob in multiline_files_globs + if not files_glob.startswith('!') + for file in glob(files_glob, recursive=True)} + excluded = {str(file) + for files_glob in multiline_files_globs + if files_glob.startswith('!') + for file in glob(files_glob[1:], recursive=True)} + has_absolute = any({Path(pattern).is_absolute() + for files_glob in multiline_files_globs + for pattern in [files_glob[1:] if files_glob.startswith('!') else files_glob]}) + return list(included - excluded), has_absolute + + +def prettify_glob_pattern(pattern: Optional[str]) -> Optional[str]: + if pattern is not None: + return re.sub('\r?\n\r?', ', ', pattern.strip()) + + +def expand_glob(pattern: Optional[str], file_format: Optional[str], gha: GithubAction) -> List[str]: + if not pattern: + return [] + + files, has_absolute_patterns = get_files(pattern) + file_format = f' {file_format}' if file_format else '' + + prettyfied_pattern = prettify_glob_pattern(pattern) + if len(files) == 0: + gha.warning(f'Could not find any{file_format} files for {prettyfied_pattern}') + if has_absolute_patterns: + gha.warning(f'Your file pattern contains absolute paths, please read the notes on absolute paths:') + gha.warning(f'https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#running-with-absolute-paths') + else: + logger.info(f'Reading{file_format} files {prettyfied_pattern} ({get_number_of_files(files)}, {get_files_size(files)})') + logger.debug(f'reading{file_format} files {list(files)}') + + return files + + +def get_files_size(files: List[str]) -> str: + try: + size = sum([os.path.getsize(file) for file in files]) + return humanize.naturalsize(size, binary=True) + except BaseException as e: + logger.warning(f'failed to obtain file size of {len(files)} files', exc_info=e) + return 'unknown size' + + +def get_number_of_files(files: List[str], label: str = 'file') -> str: + number_of_files = '{number:,} {label}{s}'.format( + number=len(files), + label=label, + s='s' if len(files) > 1 else '' + ).replace(',', punctuation_space) + return number_of_files + + +def parse_files_as_xml(files: Iterable[str], large_files: bool, drop_testcases: bool, + progress: Callable[[ParsedJUnitFile], ParsedJUnitFile] = lambda x: x) -> Iterable[ParsedJUnitFile]: + junit_files = [] + nunit_files = [] + xunit_files = [] + trx_files = [] + dart_json_files = [] + mocha_json_files = [] + unknown_files = [] + + def parse(path: str) -> JUnitTree: + if is_junit(path): + junit_files.append(path) + return parse_junit_xml_file(path, large_files, drop_testcases) + + from publish.nunit import is_nunit, parse_nunit_file + if is_nunit(path): + nunit_files.append(path) + return parse_nunit_file(path, large_files) + + from publish.xunit import is_xunit, parse_xunit_file + if is_xunit(path): + xunit_files.append(path) + return parse_xunit_file(path, large_files) + + from publish.trx import is_trx, parse_trx_file + if is_trx(path): + trx_files.append(path) + return parse_trx_file(path, large_files) + + from publish.dart import is_dart_json, parse_dart_json_file + if is_dart_json(path): + dart_json_files.append(path) + return parse_dart_json_file(path) + + from publish.mocha import is_mocha_json, parse_mocha_json_file + if is_mocha_json(path): + mocha_json_files.append(path) + return parse_mocha_json_file(path) + + unknown_files.append(path) + raise RuntimeError(f'Unsupported file format: {path}') + + try: + return progress_safe_parse_xml_file(files, parse, progress) + finally: + for flavour, files in [ + ('JUnit XML', junit_files), + ('NUnit XML', nunit_files), + ('XUnit XML', xunit_files), + ('TRX', trx_files), + ('Dart JSON', dart_json_files), + ('Mocha JSON', mocha_json_files), + ('unsupported', unknown_files) + ]: + if files: + logger.info(f'Detected {get_number_of_files(files, f"{flavour} file")} ({get_files_size(files)})') + if flavour == 'unsupported': + for file in files: + logger.info(f'Unsupported file: {file}') + else: + logger.debug(f'detected {flavour} files {list(files)}') + + +def parse_files(settings: Settings, gha: GithubAction) -> ParsedUnitTestResultsWithCommit: + # expand file globs + files = expand_glob(settings.files_glob, None, gha) + junit_files = expand_glob(settings.junit_files_glob, 'JUnit XML', gha) + nunit_files = expand_glob(settings.nunit_files_glob, 'NUnit XML', gha) + xunit_files = expand_glob(settings.xunit_files_glob, 'XUnit XML', gha) + trx_files = expand_glob(settings.trx_files_glob, 'TRX', gha) + + elems = [] + + # parse files, log the progress + with progress_logger(items=len(files + junit_files + nunit_files + xunit_files + trx_files), + interval_seconds=10, + progress_template='Read {progress} files in {time}', + finish_template='Finished reading {observations} files in {duration}', + progress_item_type=Tuple[str, Any], + logger=logger) as progress: + if files: + elems.extend(parse_files_as_xml(files, settings.large_files, settings.ignore_runs, progress)) + if junit_files: + elems.extend(parse_junit_xml_files(junit_files, settings.large_files, settings.ignore_runs, progress)) + if xunit_files: + from publish.xunit import parse_xunit_files + elems.extend(parse_xunit_files(xunit_files, settings.large_files, progress)) + if nunit_files: + from publish.nunit import parse_nunit_files + elems.extend(parse_nunit_files(nunit_files, settings.large_files, progress)) + if trx_files: + from publish.trx import parse_trx_files + elems.extend(parse_trx_files(trx_files, settings.large_files, progress)) + + # get the test results + return process_junit_xml_elems( + elems, + time_factor=settings.time_factor, + test_file_prefix=settings.test_file_prefix, + add_suite_details=settings.report_suite_out_logs or settings.report_suite_err_logs or settings.json_suite_details + ).with_commit(settings.commit) + + +def log_parse_errors(errors: List[ParseError], gha: GithubAction): + [gha.error(message=f'Error processing result file: {error.message}', file=error.file, line=error.line, column=error.column, exception=error.exception) + for error in errors] + + +def action_fail_required(conclusion: str, action_fail: bool, action_fail_on_inconclusive: bool) -> bool: + return action_fail and conclusion == 'failure' or \ + action_fail_on_inconclusive and conclusion == 'inconclusive' + + +def main(settings: Settings, gha: GithubAction) -> None: + if settings.is_fork and not settings.job_summary: + gha.warning(f'This action is running on a pull_request event for a fork repository. ' + f'The only useful thing it can do in this situation is creating a job summary, which is disabled in settings. ' + f'To fully run the action on fork repository pull requests, see ' + f'https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#support-fork-repositories-and-dependabot-branches') + return + + # log the available RAM to help spot OOM issues: + avail_mem = humanize.naturalsize(psutil.virtual_memory().available, binary=True) + logger.info(f'Available memory to read files: {avail_mem}') + + # get the unit test results + parsed = parse_files(settings, gha) + log_parse_errors(parsed.errors, gha) + + # process the parsed results + results = get_test_results(parsed, settings.dedup_classes_by_file_name) + + # turn them into stats + stats = get_stats(results) + + # derive check run conclusion from files + conclusion = get_conclusion(parsed, fail_on_failures=settings.fail_on_failures, fail_on_errors=settings.fail_on_errors) + + # publish the delta stats + backoff_factor = max(settings.seconds_between_github_reads, settings.seconds_between_github_writes) + gh = get_github(auth=github.Auth.Token(settings.token), + url=settings.api_url, + retries=settings.api_retries, + backoff_factor=backoff_factor, + seconds_between_requests=settings.seconds_between_github_reads, + seconds_between_writes=settings.seconds_between_github_writes, + secondary_rate_wait=settings.secondary_rate_limit_wait_seconds) + Publisher(settings, gh, gha).publish(stats, results.case_results, conclusion) + + if action_fail_required(conclusion, settings.action_fail, settings.action_fail_on_inconclusive): + gha.error(f'This action finished successfully, but test results have status {conclusion}.') + sys.exit(1) + + +def get_commit_sha(event: dict, event_name: str, options: dict): + logger.debug(f"action triggered by '{event_name}' event") + + # https://developer.github.com/webhooks/event-payloads/ + if event_name.startswith('pull_request'): + return event.get('pull_request', {}).get('head', {}).get('sha') + + # https://docs.github.com/en/free-pro-team@latest/actions/reference/events-that-trigger-workflows + return options.get('GITHUB_SHA') + + +def get_annotations_config(options: dict, event: Optional[dict]) -> List[str]: + annotations = get_var('CHECK_RUN_ANNOTATIONS', options) + annotations = [annotation.strip() for annotation in annotations.split(',')] \ + if annotations else default_annotations + default_branch = event.get('repository', {}).get('default_branch') if event else None + annotations_branch = get_var('CHECK_RUN_ANNOTATIONS_BRANCH', options) or default_branch or 'main, master' + annotations_branches = {f'refs/heads/{branch.strip()}' for branch in annotations_branch.split(',')} + branch = get_var('GITHUB_REF', options) + + if annotations and branch and annotations_branches and \ + 'refs/heads/*' not in annotations_branches and \ + branch not in annotations_branches: + annotations = [] + + return annotations + + +def get_var(name: str, options: dict) -> Optional[str]: + """ + Returns the value from the given dict with key 'INPUT_$key', + or if this does not exist, key 'key'. + """ + # the last 'or None' turns empty strings into None + return options.get(f'INPUT_{name}') or options.get(name) or None + + +def get_bool_var(name: str, options: dict, default: bool) -> bool: + """ + Same as get_var(), but checks if the value is a valid boolean. + Prints a warning and uses the default if the string value is not a boolean value. + If the value is unset, returns the default. + """ + val = get_var(name, options) + if not val: + return default + + val = val.lower() + if val == 'true': + return True + elif val == 'false': + return False + else: + raise RuntimeError(f'Option {name.lower()} has to be boolean, so either "true" or "false": {val}') + + +def check_var(var: Union[Optional[str], List[str]], + name: str, + label: str, + allowed_values: Optional[List[str]] = None, + deprecated_values: Optional[List[str]] = None) -> None: + if var is None: + raise RuntimeError(f'{label} must be provided via action input or environment variable {name}') + + if allowed_values: + if isinstance(var, str): + if var not in allowed_values + (deprecated_values or []): + raise RuntimeError(f"Value '{var}' is not supported for variable {name}, " + f"expected: {', '.join(allowed_values)}") + if isinstance(var, list): + if any([v not in allowed_values + (deprecated_values or []) for v in var]): + raise RuntimeError(f"Some values in '{', '.join(var)}' " + f"are not supported for variable {name}, " + f"allowed: {', '.join(allowed_values)}") + + +def check_var_condition(condition: bool, message: str) -> None: + if not condition: + raise RuntimeError(message) + + +def deprecate_var(val: Optional[str], deprecated_var: str, replacement_var: str, gha: Optional[GithubAction]): + if val is not None: + message = f'Option {deprecated_var.lower()} is deprecated! {replacement_var}' + + if gha is None: + logger.warning(message) + else: + gha.warning(message) + + +def available_values(values: List[str]) -> str: + values = [f'"{val}"' for val in values] + return f"{', '.join(values[:-1])} or {values[-1]}" + + +def deprecate_val(val: Optional[str], var: str, replacement_vals: Mapping[str, str], gha: Optional[GithubAction]): + if val in replacement_vals: + message = f'Value "{val}" for option {var.lower()} is deprecated!' + replacement = replacement_vals[val] + if replacement: + message = f'{message} Instead, use value "{replacement}".' + + if gha is None: + logger.warning(message) + else: + gha.warning(message) + + +def is_float(text: str) -> bool: + return re.match('^[+-]?(([0-9]*\\.[0-9]+)|([0-9]+(\\.[0-9]?)?))$', text) is not None + + +def get_settings(options: dict, gha: GithubAction) -> Settings: + event_file = get_var('EVENT_FILE', options) + event = event_file or get_var('GITHUB_EVENT_PATH', options) + event_name = get_var('EVENT_NAME', options) or get_var('GITHUB_EVENT_NAME', options) + check_var(event, 'GITHUB_EVENT_PATH', 'GitHub event file path') + check_var(event_name, 'GITHUB_EVENT_NAME', 'GitHub event name') + with open(event, 'rt', encoding='utf-8') as f: + event = json.load(f) + + repo = get_var('GITHUB_REPOSITORY', options) + job_summary = get_bool_var('JOB_SUMMARY', options, default=True) + comment_mode = get_var('COMMENT_MODE', options) or comment_mode_always + + # we cannot create a check run or pull request comment when running on pull_request event from a fork + # when event_file is given we assume proper setup as in README.md#support-fork-repositories-and-dependabot-branches + is_fork = event_file is None and \ + event_name == 'pull_request' and \ + event.get('pull_request', {}).get('head', {}).get('repo', {}).get('full_name') != repo + + api_url = options.get('GITHUB_API_URL') or github.Consts.DEFAULT_BASE_URL + graphql_url = options.get('GITHUB_GRAPHQL_URL') or f'{github.Consts.DEFAULT_BASE_URL}/graphql' + test_changes_limit = get_var('TEST_CHANGES_LIMIT', options) or '10' + check_var_condition(test_changes_limit.isnumeric(), f'TEST_CHANGES_LIMIT must be a positive integer or 0: {test_changes_limit}') + + default_files_glob = None + flavours = ['JUNIT', 'NUNIT', 'XUNIT', 'TRX'] + if not any(get_var(option, options) for option in ['FILES'] + [f'{flavour}_FILES' for flavour in flavours]): + default_files_glob = '*.xml' + gha.warning(f'At least one of the FILES, JUNIT_FILES, NUNIT_FILES, XUNIT_FILES, or TRX_FILES options has to be set! ' + f'Falling back to deprecated default "{default_files_glob}"') + + time_unit = get_var('TIME_UNIT', options) or 'seconds' + time_factors = {'seconds': 1.0, 'milliseconds': 0.001} + time_factor = time_factors.get(time_unit.lower()) + check_var_condition(time_factor is not None, f'TIME_UNIT {time_unit} is not supported. ' + f'It is optional, but when given must be one of these values: ' + f'{", ".join(time_factors.keys())}') + + check_name = get_var('CHECK_NAME', options) or 'Test Results' + annotations = get_annotations_config(options, event) + suite_logs_mode = get_var('REPORT_SUITE_LOGS', options) or default_report_suite_logs + ignore_runs = get_bool_var('IGNORE_RUNS', options, default=False) + + fail_on = get_var('FAIL_ON', options) or 'test failures' + check_var(fail_on, 'FAIL_ON', 'Check fail mode', fail_on_modes) + # here we decide that we want to fail on errors when we fail on test failures, like log level escalation + fail_on_failures = fail_on == fail_on_mode_failures + fail_on_errors = fail_on == fail_on_mode_errors or fail_on_failures + + retries = get_var('GITHUB_RETRIES', options) or '10' + seconds_between_github_reads = get_var('SECONDS_BETWEEN_GITHUB_READS', options) or '1' + seconds_between_github_writes = get_var('SECONDS_BETWEEN_GITHUB_WRITES', options) or '2' + secondary_rate_limit_wait_seconds = get_var('SECONDARY_RATE_LIMIT_WAIT_SECONDS', options) or str(DEFAULT_SECONDARY_RATE_WAIT) + check_var_condition(retries.isnumeric(), f'GITHUB_RETRIES must be a positive integer or 0: {retries}') + check_var_condition(is_float(seconds_between_github_reads), f'SECONDS_BETWEEN_GITHUB_READS must be an integer or float number: {seconds_between_github_reads}') + check_var_condition(is_float(seconds_between_github_writes), f'SECONDS_BETWEEN_GITHUB_WRITES must be an integer or float number: {seconds_between_github_writes}') + check_var_condition(is_float(secondary_rate_limit_wait_seconds), f'SECONDARY_RATE_LIMIT_WAIT_SECONDS must be an integer or float number: {secondary_rate_limit_wait_seconds}') + + settings = Settings( + token=get_var('GITHUB_TOKEN', options), + actor=get_var('GITHUB_TOKEN_ACTOR', options) or 'github-actions', + api_url=api_url, + graphql_url=graphql_url, + api_retries=int(retries), + event=event, + event_file=event_file, + event_name=event_name, + is_fork=is_fork, + repo=repo, + commit=get_var('COMMIT', options) or get_commit_sha(event, event_name, options), + json_file=get_var('JSON_FILE', options), + json_thousands_separator=get_var('JSON_THOUSANDS_SEPARATOR', options) or punctuation_space, + json_suite_details=get_bool_var('JSON_SUITE_DETAILS', options, default=False), + json_test_case_results=get_bool_var('JSON_TEST_CASE_RESULTS', options, default=False), + fail_on_errors=fail_on_errors, + fail_on_failures=fail_on_failures, + action_fail=get_bool_var('ACTION_FAIL', options, default=False), + action_fail_on_inconclusive=get_bool_var('ACTION_FAIL_ON_INCONCLUSIVE', options, default=False), + files_glob=get_var('FILES', options) or default_files_glob, + junit_files_glob=get_var('JUNIT_FILES', options), + nunit_files_glob=get_var('NUNIT_FILES', options), + xunit_files_glob=get_var('XUNIT_FILES', options), + trx_files_glob=get_var('TRX_FILES', options), + time_factor=time_factor, + test_file_prefix=get_var('TEST_FILE_PREFIX', options) or None, + check_name=check_name, + comment_title=get_var('COMMENT_TITLE', options) or check_name, + comment_mode=comment_mode, + job_summary=job_summary, + compare_earlier=get_bool_var('COMPARE_TO_EARLIER_COMMIT', options, default=True), + pull_request_build=get_var('PULL_REQUEST_BUILD', options) or 'merge', + test_changes_limit=int(test_changes_limit), + report_individual_runs=get_bool_var('REPORT_INDIVIDUAL_RUNS', options, default=False), + report_suite_out_logs=suite_logs_mode in {report_suite_logs, report_suite_out_log}, + report_suite_err_logs=suite_logs_mode in {report_suite_logs, report_suite_err_log}, + dedup_classes_by_file_name=get_bool_var('DEDUPLICATE_CLASSES_BY_FILE_NAME', options, default=False), + large_files=get_bool_var('LARGE_FILES', options, default=ignore_runs), + ignore_runs=ignore_runs, + check_run_annotation=annotations, + seconds_between_github_reads=float(seconds_between_github_reads), + seconds_between_github_writes=float(seconds_between_github_writes), + secondary_rate_limit_wait_seconds=float(secondary_rate_limit_wait_seconds), + search_pull_requests=get_bool_var('SEARCH_PULL_REQUESTS', options, default=False), + ) + + check_var(settings.token, 'GITHUB_TOKEN', 'GitHub token') + check_var(settings.repo, 'GITHUB_REPOSITORY', 'GitHub repository') + check_var(settings.commit, 'COMMIT, GITHUB_SHA or event file', 'Commit SHA') + check_var_condition( + settings.test_file_prefix is None or any([settings.test_file_prefix.startswith(sign) for sign in ['-', '+']]), + f"TEST_FILE_PREFIX is optional, but when given, it must start with '-' or '+': {settings.test_file_prefix}" + ) + check_var(settings.comment_mode, 'COMMENT_MODE', 'Comment mode', comment_modes) + check_var(settings.pull_request_build, 'PULL_REQUEST_BUILD', 'Pull Request build', pull_request_build_modes) + check_var(suite_logs_mode, 'REPORT_SUITE_LOGS', 'Report suite logs mode', available_report_suite_logs) + check_var(settings.check_run_annotation, 'CHECK_RUN_ANNOTATIONS', 'Check run annotations', available_annotations) + check_var_condition( + none_annotations not in settings.check_run_annotation or len(settings.check_run_annotation) == 1, + f"CHECK_RUN_ANNOTATIONS '{none_annotations}' cannot be combined with other annotations: {', '.join(settings.check_run_annotation)}" + ) + + check_var_condition(settings.test_changes_limit >= 0, f'TEST_CHANGES_LIMIT must be a positive integer or 0: {settings.test_changes_limit}') + check_var_condition(settings.api_retries >= 0, f'GITHUB_RETRIES must be a positive integer or 0: {settings.api_retries}') + check_var_condition(settings.seconds_between_github_reads > 0, f'SECONDS_BETWEEN_GITHUB_READS must be a positive number: {seconds_between_github_reads}') + check_var_condition(settings.seconds_between_github_writes > 0, f'SECONDS_BETWEEN_GITHUB_WRITES must be a positive number: {seconds_between_github_writes}') + check_var_condition(settings.secondary_rate_limit_wait_seconds > 0, f'SECONDARY_RATE_LIMIT_WAIT_SECONDS must be a positive number: {secondary_rate_limit_wait_seconds}') + + return settings + + +def set_log_level(handler: logging.Logger, level: str, gha: GithubAction): + try: + handler.setLevel(level.upper()) + except ValueError as e: + gha.warning(f'Failed to set log level {level}: {e}') + + +if __name__ == "__main__": + gha = GithubAction() + options = dict(os.environ) + + root_log_level = get_var('ROOT_LOG_LEVEL', options) or 'INFO' + set_log_level(logging.root, root_log_level, gha) + logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)5s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S %z') + + log_level = get_var('LOG_LEVEL', options) or 'INFO' + set_log_level(logger, log_level, gha) + set_log_level(publish.logger, log_level, gha) + if log_level == 'DEBUG': + gha.echo(True) + + settings = get_settings(options, gha) + logger.debug(f'Settings: {settings}') + + main(settings, gha) diff --git a/python/requirements-direct.txt b/python/requirements-direct.txt new file mode 100644 index 0000000..2b17f6c --- /dev/null +++ b/python/requirements-direct.txt @@ -0,0 +1,5 @@ +humanize==3.14.0 +junitparser==3.1.0 +lxml==4.9.3 +psutil==5.9.5 +PyGithub==2.1.1 diff --git a/python/requirements.txt b/python/requirements.txt new file mode 100644 index 0000000..d44c3db --- /dev/null +++ b/python/requirements.txt @@ -0,0 +1,21 @@ +humanize==3.14.0 +junitparser==3.1.0 + future==0.18.3 +lxml==4.9.3 +psutil==5.9.5 +PyGithub==2.1.1 + Deprecated==1.2.14 + wrapt==1.16.0 + PyJWT==2.8.0 + PyNaCl==1.5.0 + cffi==1.15.1 + pycparser==2.21 + python-dateutil==2.8.2 + six==1.16.0 + requests==2.31.0 + certifi==2023.7.22 + charset-normalizer==3.3.0 + idna==3.4 + urllib3==2.0.6 + typing_extensions==4.7.1 + urllib3==2.0.6 diff --git a/python/test/constraints.txt b/python/test/constraints.txt new file mode 100644 index 0000000..f42c5f7 --- /dev/null +++ b/python/test/constraints.txt @@ -0,0 +1,2 @@ +# test_github.py fails with newer version +Werkzeug<2.1.0 \ No newline at end of file diff --git a/python/test/files/dart/json/README.md b/python/test/files/dart/json/README.md new file mode 100644 index 0000000..8ad7313 --- /dev/null +++ b/python/test/files/dart/json/README.md @@ -0,0 +1,2 @@ +Example test results from https://github.com/dart-code-checker/dart-code-metrics @ 96001b5e78937be84270b6744898c82f9f0d9ddd + diff --git a/python/test/files/dart/json/tests.annotations b/python/test/files/dart/json/tests.annotations new file mode 100644 index 0000000..aeac63b --- /dev/null +++ b/python/test/files/dart/json/tests.annotations @@ -0,0 +1,118 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '2 errors, 1 fail, 1 skipped, 16 pass in 0s', + 'summary': + '20 tests\u2002\u2003\u200316 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20044 suites\u2003\u2003\u205f\u20041 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20041 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\u2003\u20032 ' + '[:fire:](https://github.com/step-security/publish-unit-test-result-ac' + 'tion/blob/VERSION/README.md#the-symbols "test errors")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMTQqAIBBGr' + 'yKuW1REiy4TYkVDpTHqKrp74w+au3nvY97DNzhXwyfWNYwbBzbAQLA4FBa0ImwJabB+6j' + 'PMxknpP8diDrhTK4pNwFmJFVGjz5BBp3LR31UwitIL/MsF/tekvi6wBOliZhf8/QAMgVR' + 'H4QAAAA==\n', + 'annotations': [ + { + 'path': + 'file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/cli_runner_test.dart', + 'start_line': 21, + 'end_line': 21, + 'annotation_level': 'warning', + 'message': 'json/tests.json\u2003[took 0s]', + 'title': 'Cli runner should have correct invocation failed', + 'raw_details': + "Expected: 'metrics [arguments] nope'\n " + "Actual: 'metrics [arguments] '\n Which: " + "is different. Both strings start the same, but the actual value is " + "missing the following trailing characters: nope\n\n" + "package:test_api expect\n" + "test/src/cli/cli_runner_test.dart 22:7 main.." + }, + { + 'path': + 'file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/utils/detect_sdk_path_test.dart', + 'start_line': 16, + 'end_line': 16, + 'annotation_level': 'failure', + 'message': 'json/tests.json\u2003[took 0s]', + 'title': 'detectSdkPath should return `null` if running inside VM with error', + 'raw_details': + 'Exception: exception\ntest/src/cli/utils/detect_sdk_path_test.dart ' + '21:7 main..' + }, + { + 'path': + 'file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/utils/detect_sdk_path_test.dart', + 'start_line': 46, + 'end_line': 46, + 'annotation_level': 'failure', + 'message': 'json/tests.json\u2003[took 0s]', + 'title': + 'detectSdkPath should return null if sdk path is not found inside ' + 'environment PATH variable with error', + 'raw_details': + "Instance of 'Error'\ntest/src/cli/utils/detect_sdk_path_test.dart " + "67:9 main.." + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'Cli runner should have correct description' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 20 tests, see "Raw output" for the full list of tests.', + 'title': '20 tests found', + 'raw_details': + 'AnalysisOptions readIterableOfString returns iterables with data ' + 'or not\nAnalysisOptions readMap returns map with data or not\n' + 'AnalysisOptions readMapOfMap returns map with data or not\n' + 'AnalysisOptions returns correct "folderPath" on posix platforms\n' + 'CheckUnnecessaryNullableCommand should have correct description\n' + 'CheckUnnecessaryNullableCommand should have correct invocation\n' + 'CheckUnnecessaryNullableCommand should have correct name\n' + 'CheckUnnecessaryNullableCommand should have correct usage\nCli ' + 'runner run with version argument\nCli runner should have correct ' + 'description\nCli runner should have correct invocation\n' + 'analysisOptionsFromFile constructs AnalysisOptions from extends ' + 'config\nanalysisOptionsFromFile constructs AnalysisOptions from ' + 'invalid file\nanalysisOptionsFromFile constructs AnalysisOptions ' + 'from null\nanalysisOptionsFromFile constructs AnalysisOptions from ' + 'valid file with single import\nanalysisOptionsFromFile constructs ' + 'AnalysisOptions from yaml file\ndetectSdkPath should find sdk path ' + 'inside environment PATH variable\ndetectSdkPath should return ' + '`null` for non-Windows platforms\ndetectSdkPath should return ' + '`null` if running inside VM\ndetectSdkPath should return null if ' + 'sdk path is not found inside environment PATH variable' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/dart/json/tests.json b/python/test/files/dart/json/tests.json new file mode 100644 index 0000000..c68e97a --- /dev/null +++ b/python/test/files/dart/json/tests.json @@ -0,0 +1,73 @@ +{"protocolVersion":"0.1.1","runnerVersion":"1.23.1","pid":1719,"type":"start","time":0} +{"suite":{"id":0,"platform":"vm","path":"test/src/cli/cli_runner_test.dart"},"type":"suite","time":0} +{"test":{"id":1,"name":"loading test/src/cli/cli_runner_test.dart","suiteID":0,"groupIDs":[],"metadata":{"skip":false,"skipReason":null},"line":null,"column":null,"url":null},"type":"testStart","time":2} +{"count":158,"time":16,"type":"allSuites"} +{"suiteID":0,"observatory":"http://127.0.0.1:39993/KXsTuRgxBB0=/#/inspect?isolateId=isolates%2F1400071086986095&objectId=libraries%2F%4018482860","remoteDebugger":null,"type":"debug","time":9392} +{"testID":1,"result":"success","skipped":false,"hidden":true,"type":"testDone","time":9409} +{"group":{"id":2,"suiteID":0,"parentID":null,"name":"","metadata":{"skip":false,"skipReason":null},"testCount":3,"line":null,"column":null,"url":null},"type":"group","time":9416} +{"group":{"id":3,"suiteID":0,"parentID":2,"name":"Cli runner","metadata":{"skip":false,"skipReason":null},"testCount":3,"line":13,"column":3,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/cli_runner_test.dart"},"type":"group","time":9419} +{"test":{"id":4,"name":"Cli runner should have correct description","suiteID":0,"groupIDs":[2,3],"metadata":{"skip":true,"skipReason":"just skipping"},"line":14,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/cli_runner_test.dart"},"type":"testStart","time":9419} +{"testID":4,"messageType":"skip","message":"Skip: just skipping","type":"print","time":9422} +{"testID":4,"result":"success","skipped":true,"hidden":false,"type":"testDone","time":9424} +{"test":{"id":5,"name":"Cli runner should have correct invocation","suiteID":0,"groupIDs":[2,3],"metadata":{"skip":false,"skipReason":null},"line":21,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/cli_runner_test.dart"},"type":"testStart","time":9425} +{"testID":5,"error":"Expected: 'metrics [arguments] nope'\n Actual: 'metrics [arguments] '\n Which: is different. Both strings start the same, but the actual value is missing the following trailing characters: nope\n","stackTrace":"package:test_api expect\ntest/src/cli/cli_runner_test.dart 22:7 main..\n","isFailure":true,"type":"error","time":9501} +{"testID":5,"result":"failure","skipped":false,"hidden":false,"type":"testDone","time":9502} +{"group":{"id":6,"suiteID":0,"parentID":3,"name":"Cli runner run","metadata":{"skip":false,"skipReason":null},"testCount":1,"line":28,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/cli_runner_test.dart"},"type":"group","time":9503} +{"test":{"id":7,"name":"Cli runner run with version argument","suiteID":0,"groupIDs":[2,3,6],"metadata":{"skip":false,"skipReason":null},"line":35,"column":7,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/cli_runner_test.dart"},"type":"testStart","time":9503} +{"testID":7,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":9525} +{"suite":{"id":8,"platform":"vm","path":"test/src/cli/commands/check_unnecessary_nullable_command_test.dart"},"type":"suite","time":13466} +{"test":{"id":9,"name":"loading test/src/cli/commands/check_unnecessary_nullable_command_test.dart","suiteID":8,"groupIDs":[],"metadata":{"skip":false,"skipReason":null},"line":null,"column":null,"url":null},"type":"testStart","time":13466} +{"suiteID":8,"observatory":"http://127.0.0.1:39993/KXsTuRgxBB0=/#/inspect?isolateId=isolates%2F974706369572371&objectId=libraries%2F%4018385818","remoteDebugger":null,"type":"debug","time":13897} +{"testID":9,"result":"success","skipped":false,"hidden":true,"type":"testDone","time":13897} +{"group":{"id":10,"suiteID":8,"parentID":null,"name":"","metadata":{"skip":false,"skipReason":null},"testCount":4,"line":null,"column":null,"url":null},"type":"group","time":13897} +{"group":{"id":11,"suiteID":8,"parentID":10,"name":"CheckUnnecessaryNullableCommand","metadata":{"skip":false,"skipReason":null},"testCount":4,"line":41,"column":3,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/commands/check_unnecessary_nullable_command_test.dart"},"type":"group","time":13897} +{"test":{"id":12,"name":"CheckUnnecessaryNullableCommand should have correct name","suiteID":8,"groupIDs":[10,11],"metadata":{"skip":false,"skipReason":null},"line":45,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/commands/check_unnecessary_nullable_command_test.dart"},"type":"testStart","time":13897} +{"testID":12,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":13920} +{"test":{"id":13,"name":"CheckUnnecessaryNullableCommand should have correct description","suiteID":8,"groupIDs":[10,11],"metadata":{"skip":false,"skipReason":null},"line":49,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/commands/check_unnecessary_nullable_command_test.dart"},"type":"testStart","time":13920} +{"testID":13,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":13923} +{"test":{"id":14,"name":"CheckUnnecessaryNullableCommand should have correct invocation","suiteID":8,"groupIDs":[10,11],"metadata":{"skip":false,"skipReason":null},"line":58,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/commands/check_unnecessary_nullable_command_test.dart"},"type":"testStart","time":13924} +{"testID":14,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":13935} +{"test":{"id":15,"name":"CheckUnnecessaryNullableCommand should have correct usage","suiteID":8,"groupIDs":[10,11],"metadata":{"skip":false,"skipReason":null},"line":65,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/commands/check_unnecessary_nullable_command_test.dart"},"type":"testStart","time":13935} +{"testID":15,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":13942} +{"suite":{"id":57,"platform":"vm","path":"test/src/cli/utils/detect_sdk_path_test.dart"},"type":"suite","time":39345} +{"test":{"id":58,"name":"loading test/src/cli/utils/detect_sdk_path_test.dart","suiteID":57,"groupIDs":[],"metadata":{"skip":false,"skipReason":null},"line":null,"column":null,"url":null},"type":"testStart","time":39346} +{"suiteID":57,"observatory":"http://127.0.0.1:39993/KXsTuRgxBB0=/#/inspect?isolateId=isolates%2F3444428955488735&objectId=libraries%2F%4018185026","remoteDebugger":null,"type":"debug","time":39717} +{"testID":58,"result":"success","skipped":false,"hidden":true,"type":"testDone","time":39717} +{"group":{"id":59,"suiteID":57,"parentID":null,"name":"","metadata":{"skip":false,"skipReason":null},"testCount":4,"line":null,"column":null,"url":null},"type":"group","time":39717} +{"group":{"id":60,"suiteID":57,"parentID":59,"name":"detectSdkPath","metadata":{"skip":false,"skipReason":null},"testCount":4,"line":11,"column":3,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/utils/detect_sdk_path_test.dart"},"type":"group","time":39717} +{"test":{"id":61,"name":"detectSdkPath should return `null` for non-Windows platforms","suiteID":57,"groupIDs":[59,60],"metadata":{"skip":false,"skipReason":null},"line":12,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/utils/detect_sdk_path_test.dart"},"type":"testStart","time":39717} +{"testID":61,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":39740} +{"test":{"id":62,"name":"detectSdkPath should return `null` if running inside VM","suiteID":57,"groupIDs":[59,60],"metadata":{"skip":false,"skipReason":null},"line":16,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/utils/detect_sdk_path_test.dart"},"type":"testStart","time":39740} +{"testID":62,"error":"Exception: exception","stackTrace":"test/src/cli/utils/detect_sdk_path_test.dart 21:7 main..\n","isFailure":false,"type":"error","time":39748} +{"testID":62,"result":"error","skipped":false,"hidden":false,"type":"testDone","time":39752} +{"test":{"id":63,"name":"detectSdkPath should find sdk path inside environment PATH variable","suiteID":57,"groupIDs":[59,60],"metadata":{"skip":false,"skipReason":null},"line":24,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/utils/detect_sdk_path_test.dart"},"type":"testStart","time":39752} +{"testID":63,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":39773} +{"test":{"id":64,"name":"detectSdkPath should return null if sdk path is not found inside environment PATH variable","suiteID":57,"groupIDs":[59,60],"metadata":{"skip":false,"skipReason":null},"line":46,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/cli/utils/detect_sdk_path_test.dart"},"type":"testStart","time":39774} +{"testID":64,"error":"Instance of 'Error'","stackTrace":"test/src/cli/utils/detect_sdk_path_test.dart 67:9 main..\n","isFailure":false,"type":"error","time":39778} +{"testID":64,"result":"error","skipped":false,"hidden":false,"type":"testDone","time":39779} +{"suite":{"id":65,"platform":"vm","path":"test/src/config_builder/models/analysis_options_test.dart"},"type":"suite","time":43677} +{"test":{"id":66,"name":"loading test/src/config_builder/models/analysis_options_test.dart","suiteID":65,"groupIDs":[],"metadata":{"skip":false,"skipReason":null},"line":null,"column":null,"url":null},"type":"testStart","time":43677} +{"suiteID":65,"observatory":"http://127.0.0.1:39993/KXsTuRgxBB0=/#/inspect?isolateId=isolates%2F2572154226386227&objectId=libraries%2F%4018279407","remoteDebugger":null,"type":"debug","time":44739} +{"testID":66,"result":"success","skipped":false,"hidden":true,"type":"testDone","time":44741} +{"group":{"id":67,"suiteID":65,"parentID":null,"name":"","metadata":{"skip":false,"skipReason":null},"testCount":9,"line":null,"column":null,"url":null},"type":"group","time":44741} +{"group":{"id":68,"suiteID":65,"parentID":67,"name":"analysisOptionsFromFile constructs AnalysisOptions from","metadata":{"skip":false,"skipReason":null},"testCount":5,"line":39,"column":3,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/config_builder/models/analysis_options_test.dart"},"type":"group","time":44741} +{"test":{"id":69,"name":"analysisOptionsFromFile constructs AnalysisOptions from null","suiteID":65,"groupIDs":[67,68],"metadata":{"skip":false,"skipReason":null},"line":40,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/config_builder/models/analysis_options_test.dart"},"type":"testStart","time":44742} +{"testID":69,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":44794} +{"test":{"id":70,"name":"analysisOptionsFromFile constructs AnalysisOptions from invalid file","suiteID":65,"groupIDs":[67,68],"metadata":{"skip":false,"skipReason":null},"line":46,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/config_builder/models/analysis_options_test.dart"},"type":"testStart","time":44795} +{"testID":70,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":44796} +{"test":{"id":71,"name":"analysisOptionsFromFile constructs AnalysisOptions from yaml file","suiteID":65,"groupIDs":[67,68],"metadata":{"skip":false,"skipReason":null},"line":55,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/config_builder/models/analysis_options_test.dart"},"type":"testStart","time":44796} +{"testID":71,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":44822} +{"test":{"id":72,"name":"analysisOptionsFromFile constructs AnalysisOptions from valid file with single import","suiteID":65,"groupIDs":[67,68],"metadata":{"skip":false,"skipReason":null},"line":115,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/config_builder/models/analysis_options_test.dart"},"type":"testStart","time":44823} +{"testID":72,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":44826} +{"test":{"id":73,"name":"analysisOptionsFromFile constructs AnalysisOptions from extends config","suiteID":65,"groupIDs":[67,68],"metadata":{"skip":false,"skipReason":null},"line":130,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/config_builder/models/analysis_options_test.dart"},"type":"testStart","time":44827} +{"testID":73,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":44846} +{"group":{"id":74,"suiteID":65,"parentID":67,"name":"AnalysisOptions","metadata":{"skip":false,"skipReason":null},"testCount":4,"line":151,"column":3,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/config_builder/models/analysis_options_test.dart"},"type":"group","time":44847} +{"test":{"id":75,"name":"AnalysisOptions readIterableOfString returns iterables with data or not","suiteID":65,"groupIDs":[67,74],"metadata":{"skip":false,"skipReason":null},"line":152,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/config_builder/models/analysis_options_test.dart"},"type":"testStart","time":44847} +{"testID":75,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":44850} +{"test":{"id":76,"name":"AnalysisOptions readMap returns map with data or not","suiteID":65,"groupIDs":[67,74],"metadata":{"skip":false,"skipReason":null},"line":171,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/config_builder/models/analysis_options_test.dart"},"type":"testStart","time":44850} +{"testID":76,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":44852} +{"test":{"id":77,"name":"AnalysisOptions readMapOfMap returns map with data or not","suiteID":65,"groupIDs":[67,74],"metadata":{"skip":false,"skipReason":null},"line":191,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/config_builder/models/analysis_options_test.dart"},"type":"testStart","time":44852} +{"testID":77,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":44854} +{"test":{"id":78,"name":"AnalysisOptions returns correct \"folderPath\" on posix platforms","suiteID":65,"groupIDs":[67,74],"metadata":{"skip":false,"skipReason":null},"line":255,"column":5,"url":"file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/src/config_builder/models/analysis_options_test.dart"},"type":"testStart","time":44855} +{"testID":78,"result":"success","skipped":false,"hidden":false,"type":"testDone","time":44856} +{"success":false,"type":"done","time":1201464} diff --git a/python/test/files/dart/json/tests.junit-xml b/python/test/files/dart/json/tests.junit-xml new file mode 100644 index 0000000..5a38ed6 --- /dev/null +++ b/python/test/files/dart/json/tests.junit-xml @@ -0,0 +1,49 @@ + + + + + + + + [arguments] nope' + Actual: 'metrics [arguments] ' + Which: is different. Both strings start the same, but the actual value is missing the following trailing characters: nope + +package:test_api expect +test/src/cli/cli_runner_test.dart 22:7 main.. +]]> + + + + + + + + + + + + + . +]]> + + + + . +]]> + + + + + + + + + + + + + + diff --git a/python/test/files/dart/json/tests.results b/python/test/files/dart/json/tests.results new file mode 100644 index 0000000..10e2931 --- /dev/null +++ b/python/test/files/dart/json/tests.results @@ -0,0 +1,344 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=4, + suite_tests=20, + suite_skipped=1, + suite_failures=1, + suite_errors=2, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='test/src/cli/cli_runner_test.dart', + tests=3, + skipped=1, + failures=1, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='test/src/cli/commands/check_unnecessary_nullable_command_test.dart', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='test/src/cli/utils/detect_sdk_path_test.dart', + tests=4, + skipped=0, + failures=0, + errors=2, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='test/src/config_builder/models/analysis_options_test.dart', + tests=9, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/cli_runner_test.dart', + line=14, + class_name=None, + test_name='Cli runner should have correct description', + result='skipped', + message='Skip: just skipping', + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/cli_runner_test.dart', + line=21, + class_name=None, + test_name='Cli runner should have correct invocation', + result='failure', + message="Expected: 'metrics [arguments] nope'\n " + "Actual: 'metrics [arguments] '\n Which: is " + "different. Both strings start the same, but the actual value is " + "missing the following trailing characters: nope\n", + content="Expected: 'metrics [arguments] nope'\n " + "Actual: 'metrics [arguments] '\n Which: is " + "different. Both strings start the same, but the actual value is " + "missing the following trailing characters: nope\n\npackage:test_api " + " expect\ntest/src/cli/cli_runner_test.dart " + "22:7 main..\n", + stdout=None, + stderr=None, + time=0.077 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/cli_runner_test.dart', + line=35, + class_name=None, + test_name='Cli runner run with version argument', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.022 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/commands/check_unnecessary_nullable_command_test.dart', + line=45, + class_name=None, + test_name='CheckUnnecessaryNullableCommand should have correct name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.023 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/commands/check_unnecessary_nullable_command_test.dart', + line=49, + class_name=None, + test_name='CheckUnnecessaryNullableCommand should have correct description', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/commands/check_unnecessary_nullable_command_test.dart', + line=58, + class_name=None, + test_name='CheckUnnecessaryNullableCommand should have correct invocation', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.011 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/commands/check_unnecessary_nullable_command_test.dart', + line=65, + class_name=None, + test_name='CheckUnnecessaryNullableCommand should have correct usage', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/utils/detect_sdk_path_test.dart', + line=12, + class_name=None, + test_name='detectSdkPath should return `null` for non-Windows platforms', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.023 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/utils/detect_sdk_path_test.dart', + line=16, + class_name=None, + test_name='detectSdkPath should return `null` if running inside VM', + result='error', + message='Exception: exception', + content='Exception: exception\ntest/src/cli/utils/detect_sdk_path_test.dart ' + '21:7 main..\n', + stdout=None, + stderr=None, + time=0.012 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/utils/detect_sdk_path_test.dart', + line=24, + class_name=None, + test_name='detectSdkPath should find sdk path inside environment PATH variable', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.021 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/cli/utils/detect_sdk_path_test.dart', + line=46, + class_name=None, + test_name='detectSdkPath should return null if sdk path is not found inside ' + 'environment PATH variable', + result='error', + message="Instance of 'Error'", + content="Instance of 'Error'\ntest/src/cli/utils/detect_sdk_path_test.dart " + "67:9 main..\n", + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/config_builder/models/analysis_options_test.dart', + line=40, + class_name=None, + test_name='analysisOptionsFromFile constructs AnalysisOptions from null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.052 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/config_builder/models/analysis_options_test.dart', + line=46, + class_name=None, + test_name='analysisOptionsFromFile constructs AnalysisOptions from invalid file', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/config_builder/models/analysis_options_test.dart', + line=55, + class_name=None, + test_name='analysisOptionsFromFile constructs AnalysisOptions from yaml file', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.026 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/config_builder/models/analysis_options_test.dart', + line=115, + class_name=None, + test_name='analysisOptionsFromFile constructs AnalysisOptions from valid file ' + 'with single import', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/config_builder/models/analysis_options_test.dart', + line=130, + class_name=None, + test_name='analysisOptionsFromFile constructs AnalysisOptions from extends ' + 'config', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.019 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/config_builder/models/analysis_options_test.dart', + line=152, + class_name=None, + test_name='AnalysisOptions readIterableOfString returns iterables with data or ' + 'not', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/config_builder/models/analysis_options_test.dart', + line=171, + class_name=None, + test_name='AnalysisOptions readMap returns map with data or not', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/config_builder/models/analysis_options_test.dart', + line=191, + class_name=None, + test_name='AnalysisOptions readMapOfMap returns map with data or not', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='json/tests.json', + test_file='file:///home/runner/work/dart-code-metrics/dart-code-metrics/test/' + 'src/config_builder/models/analysis_options_test.dart', + line=255, + class_name=None, + test_name='AnalysisOptions returns correct "folderPath" on posix platforms', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/json/empty.exception b/python/test/files/json/empty.exception new file mode 100644 index 0000000..97498c4 --- /dev/null +++ b/python/test/files/json/empty.exception @@ -0,0 +1 @@ +ParseError: file='files/json/empty.json', message='File is empty.', line=None, column=None, exception=Exception('File is empty.') \ No newline at end of file diff --git a/python/test/files/json/empty.json b/python/test/files/json/empty.json new file mode 100644 index 0000000..e69de29 diff --git a/python/test/files/json/malformed-json.exception b/python/test/files/json/malformed-json.exception new file mode 100644 index 0000000..f3eb10d --- /dev/null +++ b/python/test/files/json/malformed-json.exception @@ -0,0 +1 @@ +ParseError: file='files/json/malformed-json.json', message='Expecting property name enclosed in double quotes: line 1 column 16 (char 15)', line=None, column=None, exception=JSONDecodeError('Expecting property name enclosed in double quotes: line 1 column 16 (char 15)') \ No newline at end of file diff --git a/python/test/files/json/malformed-json.json b/python/test/files/json/malformed-json.json new file mode 100644 index 0000000..11401bc --- /dev/null +++ b/python/test/files/json/malformed-json.json @@ -0,0 +1 @@ +{"key": "val", malformed: "key"} diff --git a/python/test/files/json/non-json.exception b/python/test/files/json/non-json.exception new file mode 100644 index 0000000..05d8c27 --- /dev/null +++ b/python/test/files/json/non-json.exception @@ -0,0 +1 @@ +ParseError: file='files/non-json.json', message='Expecting value: line 1 column 1 (char 0)', line=None, column=None, exception=JSONDecodeError('Expecting value: line 1 column 1 (char 0)') \ No newline at end of file diff --git a/python/test/files/json/non-json.json b/python/test/files/json/non-json.json new file mode 100644 index 0000000..ede72e7 --- /dev/null +++ b/python/test/files/json/non-json.json @@ -0,0 +1 @@ +this is not json diff --git a/python/test/files/json/not-existing.exception b/python/test/files/json/not-existing.exception new file mode 100644 index 0000000..a3092b3 --- /dev/null +++ b/python/test/files/json/not-existing.exception @@ -0,0 +1 @@ +ParseError: file='files/json/not-existing.json', message='File does not exist.', line=None, column=None, exception=FileNotFoundError('File does not exist.') \ No newline at end of file diff --git a/python/test/files/junit-xml/bazel/suite-logs.annotations b/python/test/files/junit-xml/bazel/suite-logs.annotations new file mode 100644 index 0000000..f0365bb --- /dev/null +++ b/python/test/files/junit-xml/bazel/suite-logs.annotations @@ -0,0 +1,79 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 errors in 0s', + 'summary': + '1 tests\u2002\u2003\u20030 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\u2003\u20031 ' + '[:fire:](https://github.com/step-security/publish-unit-test-result-ac' + 'tion/blob/VERSION/README.md#the-symbols "test errors")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMMQ6AIBAEv' + '0KoLbT1M4QgxIsC5g4q498liHp0OzvJnNLBbknOYhqEpAzpgyWjThBDwbFgEelVdSvKxn' + 'CpaIOjO5yGvTssYsQWwRyITZ57+K9VZrHKvGWi95AKtCVo1fK6AX55nzvdAAAA\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'failure', + 'message': 'bazel/suite-logs.xml\u2003[took 0s]', + 'title': 'bazel/failing_absl_test with error', + 'raw_details': 'exited with error code 1' + }, + { + 'path': 'bazel/failing_absl_test', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'Test suite bazel/failing_absl_test has the following stdout output ' + '(see Raw output).', + 'title': 'Logging on stdout of test suite bazel/failing_absl_test', + 'raw_details': + 'Generated test.log (if the file is not UTF-8, then this may be ' + 'unreadable):\nexec ${PAGER:-/usr/bin/less} "$0" || exit 1\n' + 'Executing tests from //bazel:failing_absl_test\n' + '-------------------------------------------------------------------' + '----------\nTraceback (most recent call last):\n File ' + '"", line 3, in \n import non_existent_package\n' + 'ModuleNotFoundError: No module named \'non_existent_package\'' + }, + { + 'path': 'bazel/failing_absl_test', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': + 'Test suite bazel/failing_absl_test has the following stderr output ' + '(see Raw output).', + 'title': 'Logging on stderr of test suite bazel/failing_absl_test', + 'raw_details': 'Generated test.err' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There is 1 test, see "Raw output" for the name of the test.', + 'title': '1 test found', + 'raw_details': 'bazel/failing_absl_test' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/bazel/suite-logs.junit-xml b/python/test/files/junit-xml/bazel/suite-logs.junit-xml new file mode 100644 index 0000000..2a3c1e5 --- /dev/null +++ b/python/test/files/junit-xml/bazel/suite-logs.junit-xml @@ -0,0 +1,19 @@ + + + + + +Generated test.log (if the file is not UTF-8, then this may be unreadable): +exec ${PAGER:-/usr/bin/less} "$0" || exit 1 +Executing tests from //bazel:failing_absl_test +----------------------------------------------------------------------------- +Traceback (most recent call last): + File "<reducted>", line 3, in <module> + import non_existent_package +ModuleNotFoundError: No module named 'non_existent_package' + + +Generated test.err + + + diff --git a/python/test/files/junit-xml/bazel/suite-logs.results b/python/test/files/junit-xml/bazel/suite-logs.results new file mode 100644 index 0000000..e9a777d --- /dev/null +++ b/python/test/files/junit-xml/bazel/suite-logs.results @@ -0,0 +1,42 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=1, + suite_skipped=0, + suite_failures=0, + suite_errors=1, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='bazel/failing_absl_test', + tests=1, + skipped=0, + failures=0, + errors=1, + stdout='Generated test.log (if the file is not UTF-8, then this may be ' + 'unreadable):\nexec ${PAGER:-/usr/bin/less} "$0" || exit 1\nExecuting ' + 'tests from //bazel:failing_absl_test\n' + '---------------------------------------------------------------------' + '--------\nTraceback (most recent call last):\n File "", ' + 'line 3, in \n import non_existent_package\n' + 'ModuleNotFoundError: No module named \'non_existent_package\'', + stderr='Generated test.err' + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='bazel/suite-logs.xml', + test_file=None, + line=None, + class_name=None, + test_name='bazel/failing_absl_test', + result='error', + message='exited with error code 1', + content=None, + stdout=None, + stderr=None, + time=0.0 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/bazel/suite-logs.xml b/python/test/files/junit-xml/bazel/suite-logs.xml new file mode 100644 index 0000000..adcf19b --- /dev/null +++ b/python/test/files/junit-xml/bazel/suite-logs.xml @@ -0,0 +1,19 @@ + + + + + +Generated test.log (if the file is not UTF-8, then this may be unreadable): +", line 3, in + import non_existent_package +ModuleNotFoundError: No module named 'non_existent_package']]> + + +Generated test.err + + + diff --git a/python/test/files/junit-xml/jest/jest-junit.annotations b/python/test/files/junit-xml/jest/jest-junit.annotations new file mode 100644 index 0000000..c521c6d --- /dev/null +++ b/python/test/files/junit-xml/jest/jest-junit.annotations @@ -0,0 +1,39 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 2 tests pass in 0s', + 'summary': + '2 tests\u2002\u2003\u20032 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBBEr' + '0KoLdTSyxCCEDfyMQtUxrsLCgrdvJnJO6kCLT1dyDQQ6iOED9aIPICzCceEaQh5mmtmPg' + 'rRFzsc7ZspDrorJKLD0mC01Zdjq3v5tz3cyB5uXcIZAyFBScRvnF43yWbLod0AAAA=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 2 tests, see "Raw output" for the full list of tests.', + 'title': '2 tests found', + 'raw_details': 'Load widget via link\nMount iframe' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/jest/jest-junit.junit-xml b/python/test/files/junit-xml/jest/jest-junit.junit-xml new file mode 100644 index 0000000..bfb1490 --- /dev/null +++ b/python/test/files/junit-xml/jest/jest-junit.junit-xml @@ -0,0 +1,9 @@ + + + + + + + + + diff --git a/python/test/files/junit-xml/jest/jest-junit.results b/python/test/files/junit-xml/jest/jest-junit.results new file mode 100644 index 0000000..058c504 --- /dev/null +++ b/python/test/files/junit-xml/jest/jest-junit.results @@ -0,0 +1,49 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=2, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='widget.test.js', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='jest/jest-junit.xml', + test_file=None, + line=None, + class_name='', + test_name='Load widget via link', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.272 + ), + publish.unittestresults.UnitTestCase( + result_file='jest/jest-junit.xml', + test_file=None, + line=None, + class_name='', + test_name='Mount iframe', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.023 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/jest/jest-junit.xml b/python/test/files/junit-xml/jest/jest-junit.xml new file mode 100644 index 0000000..63ae5fd --- /dev/null +++ b/python/test/files/junit-xml/jest/jest-junit.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/junit-xml/junit.multiresult.annotations b/python/test/files/junit-xml/junit.multiresult.annotations new file mode 100644 index 0000000..7350421 --- /dev/null +++ b/python/test/files/junit-xml/junit.multiresult.annotations @@ -0,0 +1,83 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 errors, 1 fail, 1 skipped, 1 pass in 1s', + 'summary': + '1 files\u2004\u20031 suites\u2004\u2003\u20021s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '4 tests\u20031 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '1 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '1 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\u20031 ' + '[:fire:](https://github.com/step-security/publish-unit-test-result-ac' + 'tion/blob/VERSION/README.md#the-symbols "test errors")\n4 runs\u2006\u2003' + '-2 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '3 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '2 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\u20031 ' + '[:fire:](https://github.com/step-security/publish-unit-test-result-ac' + 'tion/blob/VERSION/README.md#the-symbols "test errors")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0KotfBTeRlCEONGPmYXKuPdlQhEujdvkrn4BkYTX9jQMU4RQoU1ogzgXcZXhKTmsgVFpf' + '5S0AFnc2wSTHNoRI/5wehKL82S68d6fLmpcK5V/48pby2EF/JitEt+P6y+BE/eAAAA\n', + 'annotations': [ + { + 'path': 'test class', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'failure', + 'message': 'junit.multiresult.xml\u2003[took 0s]', + 'title': 'test that errors (test class) with error', + 'raw_details': 'test teardown failure\nstdout' + }, + { + 'path': 'test class', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'junit.multiresult.xml\u2003[took 0s]', + 'title': 'test that fails (test class) failed', + 'raw_details': 'test failure\nAssertion failed' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'test class ‑ test that is skipped' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 4 tests, see "Raw output" for the full list of tests.', + 'title': '4 tests found', + 'raw_details': + 'test class ‑ test that errors\ntest class ‑ test that fails\ntest ' + 'class ‑ test that is skipped\ntest class ‑ test that succeeds' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/junit.multiresult.junit-xml b/python/test/files/junit-xml/junit.multiresult.junit-xml new file mode 100644 index 0000000..7136fe3 --- /dev/null +++ b/python/test/files/junit-xml/junit.multiresult.junit-xml @@ -0,0 +1,19 @@ + + + + + + Assertion failed + stdout + + + + Assertion failed + + + + + + + + diff --git a/python/test/files/junit-xml/junit.multiresult.results b/python/test/files/junit-xml/junit.multiresult.results new file mode 100644 index 0000000..8b2acc1 --- /dev/null +++ b/python/test/files/junit-xml/junit.multiresult.results @@ -0,0 +1,75 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=4, + suite_skipped=3, + suite_failures=2, + suite_errors=1, + suite_time=1, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='test suite', + tests=4, + skipped=3, + failures=2, + errors=1, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='junit.multiresult.xml', + test_file=None, + line=None, + class_name='test class', + test_name='test that errors', + result='error', + message='test teardown failure', + content='stdout', + stdout=None, + stderr=None, + time=0.123 + ), + publish.unittestresults.UnitTestCase( + result_file='junit.multiresult.xml', + test_file=None, + line=None, + class_name='test class', + test_name='test that fails', + result='failure', + message='test failure', + content='Assertion failed', + stdout=None, + stderr=None, + time=0.234 + ), + publish.unittestresults.UnitTestCase( + result_file='junit.multiresult.xml', + test_file=None, + line=None, + class_name='test class', + test_name='test that is skipped', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.345 + ), + publish.unittestresults.UnitTestCase( + result_file='junit.multiresult.xml', + test_file=None, + line=None, + class_name='test class', + test_name='test that succeeds', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.456 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/junit.multiresult.xml b/python/test/files/junit-xml/junit.multiresult.xml new file mode 100644 index 0000000..172839e --- /dev/null +++ b/python/test/files/junit-xml/junit.multiresult.xml @@ -0,0 +1,19 @@ + + + + + + Assertion failed + stdout + + + + Assertion failed + + + + + + + + \ No newline at end of file diff --git a/python/test/files/junit-xml/minimal-attributes.annotations b/python/test/files/junit-xml/minimal-attributes.annotations new file mode 100644 index 0000000..c1920cc --- /dev/null +++ b/python/test/files/junit-xml/minimal-attributes.annotations @@ -0,0 +1,70 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 errors, 1 fail, 1 skipped, 1 pass in 0s', + 'summary': + '4 tests\u2002\u2003\u20031 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20031 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\u2003\u20031 ' + '[:fire:](https://github.com/step-security/publish-unit-test-result-ac' + 'tion/blob/VERSION/README.md#the-symbols "test errors")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0KoLTSx8jKGIMSNfMwClfHuAoJC92Z2MxeVoISjC5kGQl0A/8EWkHmwJuIYMR58Os11ry' + '5wXn6LOODshGSgOiEQLRaDwdRemm3u5b+WuYllblvcag0+QlnE7YzeD8XajRvdAAAA\n', + 'annotations': [ + { + 'path': 'ClassName', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'minimal-attributes.xml', + 'title': 'failed_test (ClassName) failed' + }, + { + 'path': 'ClassName', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'failure', + 'message': 'minimal-attributes.xml', + 'title': 'error_test (ClassName) with error' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'ClassName ‑ skipped_test' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 4 tests, see "Raw output" for the full list of tests.', + 'title': '4 tests found', + 'raw_details': + 'ClassName ‑ error_test\nClassName ‑ failed_test\nClassName ‑ ' + 'skipped_test\nClassName ‑ test_name' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/minimal-attributes.junit-xml b/python/test/files/junit-xml/minimal-attributes.junit-xml new file mode 100644 index 0000000..92ca206 --- /dev/null +++ b/python/test/files/junit-xml/minimal-attributes.junit-xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/minimal-attributes.results b/python/test/files/junit-xml/minimal-attributes.results new file mode 100644 index 0000000..83a9370 --- /dev/null +++ b/python/test/files/junit-xml/minimal-attributes.results @@ -0,0 +1,75 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=4, + suite_skipped=1, + suite_failures=1, + suite_errors=1, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name=None, + tests=4, + skipped=1, + failures=1, + errors=1, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='minimal-attributes.xml', + test_file=None, + line=None, + class_name='ClassName', + test_name='test_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='minimal-attributes.xml', + test_file=None, + line=None, + class_name='ClassName', + test_name='skipped_test', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='minimal-attributes.xml', + test_file=None, + line=None, + class_name='ClassName', + test_name='failed_test', + result='failure', + message=None, + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='minimal-attributes.xml', + test_file=None, + line=None, + class_name='ClassName', + test_name='error_test', + result='error', + message=None, + content=None, + stdout=None, + stderr=None, + time=None + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/minimal-attributes.xml b/python/test/files/junit-xml/minimal-attributes.xml new file mode 100644 index 0000000..2d99719 --- /dev/null +++ b/python/test/files/junit-xml/minimal-attributes.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/junit-xml/mocha/latex-utensils.annotations b/python/test/files/junit-xml/mocha/latex-utensils.annotations new file mode 100644 index 0000000..fe84823 --- /dev/null +++ b/python/test/files/junit-xml/mocha/latex-utensils.annotations @@ -0,0 +1,132 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 101 tests pass in 0s', + 'summary': + '\u205f\u2004\u205f\u20041 files\u2004\u2003\u205f\u2004\u205f\u20041 ' + 'suites\u2004\u2003\u20020s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '101 tests\u2003101 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '0 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n109 runs\u2006\u2003' + '109 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '0 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/12MMQ6AIBAEv' + '0KoLaDUzxCCEC8imAMq498liIJ2N7O5OagBqwOdCB8IDQniC3NCGcG7jCxjHmKZGH9IhK' + 'TUX62w9x/CSLAfoRE9VoPJ3c2xQks204qFu2Dhvqf8tkHMUC8SFknPC30yEpLlAAAA\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 101 tests, see "Raw output" for the full list of tests.', + 'title': '101 tests found', + 'raw_details': + 'bibtexParser ‑ parse @comment @article @comment\nbibtexParser ‑ ' + 'parse a simple bib file\nbibtexParser ‑ parse an entry with only ' + 'key\nbibtexParser ‑ parse bib with abbreviation\nbibtexParser ‑ ' + 'parse bib with comments\nbibtexParser ‑ parse entry with command\n' + 'bibtexParser ‑ parse entry with concat\nbibtexParser ‑ parse entry ' + 'with empty citeky\nbibtexParser ‑ parse entry with quotes\n' + 'bibtexParser ‑ parse fields ending ,\nbibtexParser ‑ parse only ' + '@comment\nbibtexParser ‑ should not throw SyntaxError\n' + 'bibtexParser ‑ should throw SyntaxError\nlatexLogParser ‑ parse ' + 'LaTeX log files\nlatexLogParser ‑ parse LaTeX log files generated ' + 'with -halt-on-error\nlatexParser matchers findAll ‑ test ' + 'latexParser.findAll\nlatexParser matchers findAllSeqences ‑ test ' + 'latexParser.findAllSeqences\nlatexParser matchers latexParser ' + 'findNodeAt ‑ test latexParser.findNodeAt\nlatexParser matchers ' + 'latexParser findNodeAt ‑ test latexParser.findNodeAt with line and ' + 'column\nlatexParser matchers latexParser findNodeAt ‑ test ' + 'latexParser.findNodeAt with line and column for multiple lines\n' + 'latexParser matchers pattern ‑ test latexParser.pattern\n' + 'latexParser matchers pattern ‑ test latexParser.pattern.match\n' + 'latexParser matchers type ‑ test that properties having a ' + 'Node-related-type value are only content, args, and arg.\n' + 'latexParser matchers type ‑ test the types of content, arg, and ' + 'args.\nlatexParser other ‑ test type guard\nlatexParser other ‑ ' + 'test type guard with assingment and never type\nlatexParser parse ' + '2 ‑ parse Sch\\"onbrunner Schlo\\ss{} Stra\\ss e\nlatexParser ' + 'parse 2 ‑ parse \\"\\i\nlatexParser parse 2 ‑ parse a\\\\b ' + 'c\\newline\nlatexParser parse 2 ‑ parse space + \\begin{center}\n' + 'latexParser parse 2 ‑ parse x {a} { b }d\nlatexParser parse 2 ‑ ' + 'parse { a }d\nlatexParser parse ‑ parse $ $, including only spaces\n' + 'latexParser parse ‑ parse $ a ^ b $\nlatexParser parse ‑ parse $$ ' + '$$\nlatexParser parse ‑ parse $1$\nlatexParser parse ‑ parse ' + '$\\left(1\\right]$\nlatexParser parse ‑ parse $\\left.1\\right]$\n' + 'latexParser parse ‑ parse $a^b$\nlatexParser parse ‑ parse $a^b$ ' + 'with {enableMathCharacterLocation: true}\nlatexParser parse ‑ ' + 'parse \\( \\)\nlatexParser parse ‑ parse \\[ \\]\nlatexParser ' + 'parse ‑ parse \\begin{align} \\begin{alignedat}\nlatexParser parse ' + '‑ parse \\begin{align} \\begin{aligned}\nlatexParser parse ‑ parse ' + '\\begin{align} \\end{align}\nlatexParser parse ‑ parse ' + '\\begin{align}...\nlatexParser parse ‑ parse \\begin{center} ' + '\\begin{itemize}\nlatexParser parse ‑ parse \\begin{center}...\n' + 'latexParser parse ‑ parse \\begin{center}\\endcommand\nlatexParser ' + 'parse ‑ parse \\begin{lstlisting}...\nlatexParser parse ‑ parse ' + '\\begin{minted}...\nlatexParser parse ‑ parse ' + '\\begin{verbatim*}...\nlatexParser parse ‑ parse ' + '\\begin{verbatim}...\nlatexParser parse ‑ parse ' + '\\begin{verbatim}... 02\nlatexParser parse ‑ parse \\def\\abc ' + '[#1]#2 {#2#1abc}\nlatexParser parse ‑ parse \\def\\abc{abc}\n' + 'latexParser parse ‑ parse \\href\nlatexParser parse ‑ parse ' + '\\label{a_b}\nlatexParser parse ‑ parse \\linebreakMyCommand\n' + 'latexParser parse ‑ parse \\newlineMyCommand\nlatexParser parse ‑ ' + 'parse \\node[label={abc}, efg]\nlatexParser parse ‑ parse ' + '\\par\\par\nlatexParser parse ‑ parse \\part\nlatexParser parse ‑ ' + 'parse \\url\nlatexParser parse ‑ parse \\verb*|1|\nlatexParser ' + 'parse ‑ parse \\verbatimfont{\\small}\nlatexParser parse ‑ parse ' + '\\verb|1|\nlatexParser parse ‑ parse a command whose name has @\n' + 'latexParser parse ‑ parse a^b\nlatexParser parse ‑ parse a_b\n' + 'latexParser parse ‑ parse an optional argument having only spaces\n' + 'latexParser parse ‑ parse comments\nlatexParser parse ‑ parse ' + 'empty preamble\nlatexParser parse ‑ parse invalid commands without ' + 'error\nlatexParser parse ‑ parse newenvironment command\n' + 'latexParser parse ‑ parse optional arguments having a tilde\n' + 'latexParser parse ‑ parse optional arguments having spaces\n' + 'latexParser parse ‑ parse preamble\nlatexParser parse ‑ parse ' + 'unbalanced \\begin\nlatexParser parse ‑ parse unbalanced ' + '\\begin{aligned}\nlatexParser parse ‑ parse unbalanced \\end\n' + 'latexParser parse ‑ parse unbalanced \\end{aligned}\nlatexParser ' + 'parse ‑ parse { }, including only spaces\nlatexParser parse ‑ ' + 'parse ~\nlatexParser parse ‑ should throw SyntaxError\nlatexParser ' + 'stringify ‑ test latexParser.stringify a b\nlatexParser stringify ' + '‑ test latexParser.stringify a b\nlatexParser stringify ‑ test ' + 'latexParser.stringify a\\nb\nlatexParser stringify ‑ test ' + 'latexParser.stringify a_b\nlatexParser stringify ‑ test ' + 'latexParser.stringify newcommand 01\nlatexParser stringify ‑ test ' + 'latexParser.stringify newcommand 02\nlatexParser stringify ‑ test ' + 'latexParser.stringify newcommand 03\nlatexParser stringify ‑ test ' + 'latexParser.stringify with lineBreak 01\nlatexParser stringify ‑ ' + 'test latexParser.stringify with lineBreak 02\nlatexParser ' + 'stringify ‑ test stringify $ \\sin x$\nlatexParser stringify ‑ ' + 'test stringify $a^b$\nlatexParser stringify ‑ test stringify ' + '\\def\\abc [#1]#2 {#2#1abc}\nlatexParser stringify ‑ test ' + 'stringify \\href[]{}{}\nlatexParser stringify ‑ test stringify ' + '\\href{}{}\nlatexParser stringify ‑ test stringify \\url\n' + 'latexParser stringify ‑ test stringify a_b' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/mocha/latex-utensils.junit-xml b/python/test/files/junit-xml/mocha/latex-utensils.junit-xml new file mode 100644 index 0000000..93d4973 --- /dev/null +++ b/python/test/files/junit-xml/mocha/latex-utensils.junit-xml @@ -0,0 +1,112 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/mocha/latex-utensils.results b/python/test/files/junit-xml/mocha/latex-utensils.results new file mode 100644 index 0000000..3cab102 --- /dev/null +++ b/python/test/files/junit-xml/mocha/latex-utensils.results @@ -0,0 +1,1441 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=109, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='Mocha Tests', + tests=109, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='parse a simple bib file', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='parse fields ending ,', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='parse an entry with only key', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='parse bib with comments', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='parse only @comment', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='parse @comment @article @comment', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='parse bib with abbreviation', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='parse entry with empty citeky', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='parse entry with command', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='parse entry with command', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='parse entry with concat', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='parse entry with quotes', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='should not throw SyntaxError', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='bibtexParser', + test_name='should throw SyntaxError', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexLogParser', + test_name='parse LaTeX log files', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.117 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexLogParser', + test_name='parse LaTeX log files generated with -halt-on-error', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.071 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser matchers findAll', + test_name='test latexParser.findAll', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser matchers findAllSeqences', + test_name='test latexParser.findAllSeqences', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser matchers pattern', + test_name='test latexParser.pattern', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser matchers pattern', + test_name='test latexParser.pattern.match', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser matchers pattern', + test_name='test latexParser.pattern', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser matchers latexParser findNodeAt', + test_name='test latexParser.findNodeAt', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser matchers latexParser findNodeAt', + test_name='test latexParser.findNodeAt', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser matchers latexParser findNodeAt', + test_name='test latexParser.findNodeAt', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser matchers latexParser findNodeAt', + test_name='test latexParser.findNodeAt with line and column', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser matchers latexParser findNodeAt', + test_name='test latexParser.findNodeAt with line and column for multiple lines', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser matchers type', + test_name='test that properties having a Node-related-type value are only ' + 'content, args, and arg.', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser matchers type', + test_name='test the types of content, arg, and args.', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\begin{center}...', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\begin{center}\\endcommand', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\begin{center} \\begin{itemize}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse unbalanced \\begin', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse unbalanced \\end', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse $1$', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\url', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\url', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\href', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\href', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\verb|1|', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\verb*|1|', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\verbatimfont{\\small}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\begin{verbatim}...', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\begin{verbatim}... 02', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\begin{verbatim*}...', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\begin{minted}...', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\begin{lstlisting}...', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse comments', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\begin{align}...', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\begin{align}...', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse unbalanced \\begin{aligned}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse newenvironment command', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse an optional argument having only spaces', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse optional arguments having spaces', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse optional arguments having a tilde', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\node[label={abc}, efg]', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\def\\abc{abc}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\def\\abc [#1]#2 {#2#1abc}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse a command whose name has @', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse invalid commands without error', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\part', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\par\\par', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\newlineMyCommand', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\linebreakMyCommand', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\label{a_b}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse a_b', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse a^b', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse $a^b$', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse $a^b$ with {enableMathCharacterLocation: true}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse ~', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='should throw SyntaxError', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse $ a ^ b $', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse $\\left(1\\right]$', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse $\\left.1\\right]$', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse { }, including only spaces', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse $ $, including only spaces', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\( \\)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\[ \\]', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse $$ $$', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\begin{align} \\end{align}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\begin{align} \\begin{aligned}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse \\begin{align} \\begin{alignedat}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse unbalanced \\begin{aligned}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse unbalanced \\end{aligned}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse preamble', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse', + test_name='parse empty preamble', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse 2', + test_name='parse \\"\\i', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse 2', + test_name='parse Sch\\"onbrunner Schlo\\ss{} Stra\\ss e', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse 2', + test_name='parse a\\\\b c\\newline', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse 2', + test_name='parse space + \\begin{center}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse 2', + test_name='parse { a }d', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser parse 2', + test_name='parse x {a} { b }d', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test latexParser.stringify a b', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test latexParser.stringify a b', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test latexParser.stringify a_b', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test latexParser.stringify a\\nb', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test latexParser.stringify newcommand 01', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test latexParser.stringify newcommand 02', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test latexParser.stringify newcommand 03', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.008 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test latexParser.stringify with lineBreak 01', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test latexParser.stringify with lineBreak 02', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test stringify a_b', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test stringify $a^b$', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test stringify $ \\sin x$', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test stringify \\def\\abc [#1]#2 {#2#1abc}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test stringify \\url', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test stringify \\href[]{}{}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser stringify', + test_name='test stringify \\href{}{}', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser other', + test_name='test type guard', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mocha/latex-utensils.xml', + test_file=None, + line=None, + class_name='latexParser other', + test_name='test type guard with assingment and never type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/mocha/latex-utensils.xml b/python/test/files/junit-xml/mocha/latex-utensils.xml new file mode 100644 index 0000000..c5d96d7 --- /dev/null +++ b/python/test/files/junit-xml/mocha/latex-utensils.xml @@ -0,0 +1,111 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/junit-xml/no-attributes.annotations b/python/test/files/junit-xml/no-attributes.annotations new file mode 100644 index 0000000..40ace0f --- /dev/null +++ b/python/test/files/junit-xml/no-attributes.annotations @@ -0,0 +1,31 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 errors, 1 fail, 1 skipped, 1 pass in 0s', + 'summary': + '4 tests\u2002\u2003\u20031 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20031 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\u2003\u20031 ' + '[:fire:](https://github.com/step-security/publish-unit-test-result-ac' + 'tion/blob/VERSION/README.md#the-symbols "test errors")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0KoLTSx8jKGIMSNfMwClfHuAoJC92Z2MxeVoISjC5kGQl0A/8EWkHmwJuIYMR58Os11ry' + '5wXn6LOODshGSgOiEQLRaDwdRemm3u5b+WuYllblvcag0+QlnE7YzeD8XajRvdAAAA\n', + 'annotations': [] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/no-attributes.junit-xml b/python/test/files/junit-xml/no-attributes.junit-xml new file mode 100644 index 0000000..a14fca7 --- /dev/null +++ b/python/test/files/junit-xml/no-attributes.junit-xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/no-attributes.results b/python/test/files/junit-xml/no-attributes.results new file mode 100644 index 0000000..4011aa9 --- /dev/null +++ b/python/test/files/junit-xml/no-attributes.results @@ -0,0 +1,22 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=4, + suite_skipped=1, + suite_failures=1, + suite_errors=1, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name=None, + tests=4, + skipped=1, + failures=1, + errors=1, + stdout=None, + stderr=None + ) + ], + cases=[] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/no-attributes.xml b/python/test/files/junit-xml/no-attributes.xml new file mode 100644 index 0000000..e260ae8 --- /dev/null +++ b/python/test/files/junit-xml/no-attributes.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/junit-xml/no-cases-but-tests.annotations b/python/test/files/junit-xml/no-cases-but-tests.annotations new file mode 100644 index 0000000..5bb2fd9 --- /dev/null +++ b/python/test/files/junit-xml/no-cases-but-tests.annotations @@ -0,0 +1,30 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail, 2 skipped, 3 pass in 0s', + 'summary': + '6 tests\u2002\u2003\u20033 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20032 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MOw6AIBAFr' + '0KoLfwkFl7GEJS4UcEsUBnv7spH6N7MS+bmCo7V8ol1DePWg/th8SgcGE3YEtLhvmvMe7' + 'ZeShJDETtcJPpfKAFHqkWxIhpMQfQ6975Z5yKXWuAqFrhuSXOe4AjSYnYT/HkBNCXSZd0' + 'AAAA=\n', + 'annotations': [] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/no-cases-but-tests.junit-xml b/python/test/files/junit-xml/no-cases-but-tests.junit-xml new file mode 100644 index 0000000..c59bf56 --- /dev/null +++ b/python/test/files/junit-xml/no-cases-but-tests.junit-xml @@ -0,0 +1,4 @@ + + + + diff --git a/python/test/files/junit-xml/no-cases-but-tests.results b/python/test/files/junit-xml/no-cases-but-tests.results new file mode 100644 index 0000000..63bc5d0 --- /dev/null +++ b/python/test/files/junit-xml/no-cases-but-tests.results @@ -0,0 +1,22 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=6, + suite_skipped=2, + suite_failures=1, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=6, + skipped=2, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/no-cases-but-tests.xml b/python/test/files/junit-xml/no-cases-but-tests.xml new file mode 100644 index 0000000..a1f85fb --- /dev/null +++ b/python/test/files/junit-xml/no-cases-but-tests.xml @@ -0,0 +1 @@ + diff --git a/python/test/files/junit-xml/no-cases.annotations b/python/test/files/junit-xml/no-cases.annotations new file mode 100644 index 0000000..e49dc25 --- /dev/null +++ b/python/test/files/junit-xml/no-cases.annotations @@ -0,0 +1,29 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'No tests found', + 'summary': + '0 tests\u2002\u2003\u20030 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0K2ttDWyxiCEDfyMbtQGe8uQaNL92ZeMic49JZhVtOggAvmD9ZCOmOKFceK9cgs98LFmF' + '7seHTCafSdsESJXkMlspgy9/BfayxijWXLpBAwV3iX4k3DdQOuuvQ/3QAAAA==\n', + 'annotations': [] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/no-cases.junit-xml b/python/test/files/junit-xml/no-cases.junit-xml new file mode 100644 index 0000000..4e2bc81 --- /dev/null +++ b/python/test/files/junit-xml/no-cases.junit-xml @@ -0,0 +1,4 @@ + + + + diff --git a/python/test/files/junit-xml/no-cases.results b/python/test/files/junit-xml/no-cases.results new file mode 100644 index 0000000..df6ce55 --- /dev/null +++ b/python/test/files/junit-xml/no-cases.results @@ -0,0 +1,22 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=0, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=0, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/no-cases.xml b/python/test/files/junit-xml/no-cases.xml new file mode 100644 index 0000000..12c8ba9 --- /dev/null +++ b/python/test/files/junit-xml/no-cases.xml @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/python/test/files/junit-xml/non-junit.annotations b/python/test/files/junit-xml/non-junit.annotations new file mode 100644 index 0000000..6e1249a --- /dev/null +++ b/python/test/files/junit-xml/non-junit.annotations @@ -0,0 +1,39 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 parse errors', + 'summary': + '0 tests\u2002\u2003\u20030 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '0 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n1 errors\n\n' + 'Results for commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMywqAIBBFf' + '0Vm3aK2/UyIKQ35iBldRf+emNC4u+dcODc49JZhVcukgAvmBnOFvZDOmGLHemSWe+NizC' + 'hOvAbhNPpBWKJE3VCJLKbMffzXGotYY9kyKQTMFfpSfGh4XnRU87HdAAAA\n', + 'annotations': [ + { + 'path': 'non-junit.xml', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'failure', + 'message': 'Invalid format.', + 'title': 'Error processing result file', + 'raw_details': 'non-junit.xml' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/non-junit.exception b/python/test/files/junit-xml/non-junit.exception new file mode 100644 index 0000000..8ab21ff --- /dev/null +++ b/python/test/files/junit-xml/non-junit.exception @@ -0,0 +1 @@ +JUnitXmlError('Invalid format.') \ No newline at end of file diff --git a/python/test/files/junit-xml/non-junit.junit-xml b/python/test/files/junit-xml/non-junit.junit-xml new file mode 100644 index 0000000..de6d15a --- /dev/null +++ b/python/test/files/junit-xml/non-junit.junit-xml @@ -0,0 +1,6 @@ + + + + + + diff --git a/python/test/files/junit-xml/non-junit.results b/python/test/files/junit-xml/non-junit.results new file mode 100644 index 0000000..7a64531 --- /dev/null +++ b/python/test/files/junit-xml/non-junit.results @@ -0,0 +1,18 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[ + publish.unittestresults.ParseError( + file='non-junit.xml', + message='Invalid format.', + exception=junitparser.junitparser.JUnitXmlError('Invalid format.') + ) + ], + suites=0, + suite_tests=0, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=0, + suite_details=[], + cases=[] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/non-junit.xml b/python/test/files/junit-xml/non-junit.xml new file mode 100644 index 0000000..550e86a --- /dev/null +++ b/python/test/files/junit-xml/non-junit.xml @@ -0,0 +1,8 @@ + + + + + + \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/corrupt-xml.exception b/python/test/files/junit-xml/pytest/corrupt-xml.exception new file mode 100644 index 0000000..56d6493 --- /dev/null +++ b/python/test/files/junit-xml/pytest/corrupt-xml.exception @@ -0,0 +1 @@ +ParseError: file='files/junit-xml/pytest/corrupt-xml.xml', message='Premature end of data in tag skipped line 9, line 11, column 22 (corrupt-xml.xml, line 11)', line=None, column=None, exception=XMLSyntaxError('Premature end of data in tag skipped line 9, line 11, column 22') \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/corrupt-xml.xml b/python/test/files/junit-xml/pytest/corrupt-xml.xml new file mode 100644 index 0000000..2024f2a --- /dev/null +++ b/python/test/files/junit-xml/pytest/corrupt-xml.xml @@ -0,0 +1,11 @@ + + + + + + /horovod/test/test_spark.py:1642: get_available_devices only + suppo \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.fail.annotations b/python/test/files/junit-xml/pytest/junit.fail.annotations new file mode 100644 index 0000000..c19a554 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.fail.annotations @@ -0,0 +1,74 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail, 1 skipped, 3 pass in 2s', + 'summary': + '5 tests\u2002\u2003\u20033 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '2s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20031 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0KoLfzExssYghA3IpgFKuPdXVEUuzez2dm5BqM8H1hTMe4jhBemiCKAs4QtIR3CderzHn' + '2UkkT3iQW25/kWWoD5CYXokExNBqPNvWuWuZu/WuIilrhsSbeuEAiexfws+HECiWEEJ90' + 'AAAA=\n', + 'annotations': [ + { + 'path': 'test/test_spark.py', + 'start_line': 819, + 'end_line': 819, + 'annotation_level': 'warning', + 'message': 'pytest/junit.fail.xml\u2003[took 7s]', + 'title': 'test_rsh_events (test.test_spark.SparkTests) failed', + 'raw_details': + 'self = \n\n ' + ' def test_rsh_events(self):\n > ' + 'self.do_test_rsh_events(3)\n\n test_spark.py:821:\n ' + ' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ' + '_ _ _ _ _ _ _ _ _ _ _ _ _ _\n test_spark.py:836: in ' + 'do_test_rsh_events\n self.do_test_rsh(command, 143, ' + 'events=events)\n test_spark.py:852: in do_test_rsh\n ' + ' self.assertEqual(expected_result, res)\n ' + ' E AssertionError: 143 != 0' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'test.test_spark.SparkTests ‑ test_get_available_devices' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 5 tests, see "Raw output" for the full list of tests.', + 'title': '5 tests found', + 'raw_details': + 'test.test_spark.SparkTests ‑ test_check_shape_compatibility\n' + 'test.test_spark.SparkTests ‑ test_get_available_devices\n' + 'test.test_spark.SparkTests ‑ test_get_col_info\n' + 'test.test_spark.SparkTests ‑ test_rsh_events\n' + 'test.test_spark.SparkTests ‑ test_rsh_with_non_zero_exit_code' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.fail.junit-xml b/python/test/files/junit-xml/pytest/junit.fail.junit-xml new file mode 100644 index 0000000..8d24644 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.fail.junit-xml @@ -0,0 +1,28 @@ + + + + + + /horovod/test/test_spark.py:1642: get_available_devices only + supported in Spark 3.0 and above + + + + + self = <test_spark.SparkTests testMethod=test_rsh_events> + + def test_rsh_events(self): + > self.do_test_rsh_events(3) + + test_spark.py:821: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + test_spark.py:836: in do_test_rsh_events + self.do_test_rsh(command, 143, events=events) + test_spark.py:852: in do_test_rsh + self.assertEqual(expected_result, res) + E AssertionError: 143 != 0 + + + + + diff --git a/python/test/files/junit-xml/pytest/junit.fail.results b/python/test/files/junit-xml/pytest/junit.fail.results new file mode 100644 index 0000000..130637e --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.fail.results @@ -0,0 +1,103 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=5, + suite_skipped=1, + suite_failures=1, + suite_errors=0, + suite_time=2, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=5, + skipped=1, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.fail.xml', + test_file='test/test_spark.py', + line=1412, + class_name='test.test_spark.SparkTests', + test_name='test_check_shape_compatibility', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.435 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.fail.xml', + test_file='test/test_spark.py', + line=1641, + class_name='test.test_spark.SparkTests', + test_name='test_get_available_devices', + result='skipped', + message='get_available_devices only supported in Spark 3.0 and above', + content='/horovod/test/test_spark.py:1642: get_available_devices only\n ' + ' supported in Spark 3.0 and above\n ', + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.fail.xml', + test_file='test/test_spark.py', + line=1102, + class_name='test.test_spark.SparkTests', + test_name='test_get_col_info', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.417 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.fail.xml', + test_file='test/test_spark.py', + line=819, + class_name='test.test_spark.SparkTests', + test_name='test_rsh_events', + result='failure', + message='self = def ' + 'test_rsh_events(self): > self.do_test_rsh_events(3) ' + 'test_spark.py:821: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ' + '_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ test_spark.py:836: in ' + 'do_test_rsh_events self.do_test_rsh(command, 143, events=events) ' + 'test_spark.py:852: in do_test_rsh ' + 'self.assertEqual(expected_result, res) E AssertionError: 143 != 0', + content='self = \n\n ' + ' def test_rsh_events(self):\n > ' + 'self.do_test_rsh_events(3)\n\n test_spark.py:821:\n ' + ' _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ ' + '_ _ _ _ _ _ _ _ _ _ _ _\n test_spark.py:836: in ' + 'do_test_rsh_events\n self.do_test_rsh(command, 143, ' + 'events=events)\n test_spark.py:852: in do_test_rsh\n ' + ' self.assertEqual(expected_result, res)\n ' + 'E AssertionError: 143 != 0\n ', + stdout=None, + stderr=None, + time=7.541 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.fail.xml', + test_file='test/test_spark.py', + line=813, + class_name='test.test_spark.SparkTests', + test_name='test_rsh_with_non_zero_exit_code', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.514 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.fail.xml b/python/test/files/junit-xml/pytest/junit.fail.xml new file mode 100644 index 0000000..b47e289 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.fail.xml @@ -0,0 +1,46 @@ + + + + + + /horovod/test/test_spark.py:1642: get_available_devices only + supported in Spark 3.0 and above + + + + + self = <test_spark.SparkTests testMethod=test_rsh_events> + + def test_rsh_events(self): + > self.do_test_rsh_events(3) + + test_spark.py:821: + _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ + test_spark.py:836: in do_test_rsh_events + self.do_test_rsh(command, 143, events=events) + test_spark.py:852: in do_test_rsh + self.assertEqual(expected_result, res) + E AssertionError: 143 != 0 + + + + + \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.elastic.annotations b/python/test/files/junit-xml/pytest/junit.gloo.elastic.annotations new file mode 100644 index 0000000..0c3f8c2 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.elastic.annotations @@ -0,0 +1,87 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 10 tests pass, 4 skipped in 1m 12s', + 'summary': + '14 tests\u2002\u2003\u200310 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '1m 12s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20041 suites\u2003\u2003\u205f\u20044 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20040 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMQQqAIBBFr' + 'yKuW1QEQZcJMaMhzRh1Fd290SxrN+/94R18Bq0cH1hTMe4C+BemgMKD3Qj7lpgWn7bugd' + 'EFKaOpi1lhJ1NeZgGaRPlQiBazwbC9xXj/grcovcSfXOJvTVpjwBPki7lF8PMCyjZFT+I' + 'AAAA=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 4 skipped tests, see "Raw output" for the full list of ' + 'skipped tests.', + 'title': '4 skipped tests found', + 'raw_details': + 'test.integration.test_elastic_tensorflow.ElasticTensorFlowTests ‑ ' + 'test_all_hosts_blacklisted\n' + 'test.integration.test_elastic_tensorflow.ElasticTensorFlowTests ‑ ' + 'test_min_hosts_timeout\n' + 'test.integration.test_elastic_torch.ElasticTorchTests ‑ ' + 'test_all_hosts_blacklisted\n' + 'test.integration.test_elastic_torch.ElasticTorchTests ‑ ' + 'test_min_hosts_timeout' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 14 tests, see "Raw output" for the full list of tests.', + 'title': '14 tests found', + 'raw_details': + 'test.integration.test_elastic_tensorflow.ElasticTensorFlowTests ‑ ' + 'test_all_hosts_blacklisted\n' + 'test.integration.test_elastic_tensorflow.ElasticTensorFlowTests ‑ ' + 'test_all_ranks_failure\n' + 'test.integration.test_elastic_tensorflow.ElasticTensorFlowTests ‑ ' + 'test_fault_tolerance_without_scaling\n' + 'test.integration.test_elastic_tensorflow.ElasticTensorFlowTests ‑ ' + 'test_hosts_added_and_removed\n' + 'test.integration.test_elastic_tensorflow.ElasticTensorFlowTests ‑ ' + 'test_min_hosts_timeout\n' + 'test.integration.test_elastic_tensorflow.ElasticTensorFlowTests ‑ ' + 'test_reset_limit\n' + 'test.integration.test_elastic_tensorflow.ElasticTensorFlowTests ‑ ' + 'test_single_rank_failure\n' + 'test.integration.test_elastic_torch.ElasticTorchTests ‑ ' + 'test_all_hosts_blacklisted\n' + 'test.integration.test_elastic_torch.ElasticTorchTests ‑ ' + 'test_all_ranks_failure\n' + 'test.integration.test_elastic_torch.ElasticTorchTests ‑ ' + 'test_fault_tolerance_without_scaling\n' + 'test.integration.test_elastic_torch.ElasticTorchTests ‑ ' + 'test_hosts_added_and_removed\n' + 'test.integration.test_elastic_torch.ElasticTorchTests ‑ ' + 'test_min_hosts_timeout\n' + 'test.integration.test_elastic_torch.ElasticTorchTests ‑ ' + 'test_reset_limit\n' + 'test.integration.test_elastic_torch.ElasticTorchTests ‑ ' + 'test_single_rank_failure' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.elastic.junit-xml b/python/test/files/junit-xml/pytest/junit.gloo.elastic.junit-xml new file mode 100644 index 0000000..d7fc4c6 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.elastic.junit-xml @@ -0,0 +1,27 @@ + + + + + /horovod/test/integration/test_elastic_torch.py:30: This test fails due to https://github.com/horovod/horovod/issues/2030 + + + + + + /horovod/test/integration/test_elastic_torch.py:35: This test fails due to https://github.com/horovod/horovod/issues/2030 + + + + + /horovod/test/integration/test_elastic_tensorflow.py:30: This test fails due to https://github.com/horovod/horovod/issues/2030 + + + + + + /horovod/test/integration/test_elastic_tensorflow.py:35: This test fails due to https://github.com/horovod/horovod/issues/2030 + + + + + diff --git a/python/test/files/junit-xml/pytest/junit.gloo.elastic.results b/python/test/files/junit-xml/pytest/junit.gloo.elastic.results new file mode 100644 index 0000000..a764c60 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.elastic.results @@ -0,0 +1,209 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=14, + suite_skipped=4, + suite_failures=0, + suite_errors=0, + suite_time=72, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=14, + skipped=4, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/test_elastic_torch.py', + line=29, + class_name='test.integration.test_elastic_torch.ElasticTorchTests', + test_name='test_all_hosts_blacklisted', + result='skipped', + message='This test fails due to https://github.com/horovod/horovod/issues/2030', + content='/horovod/test/integration/test_elastic_torch.py:30: This test fails ' + 'due to https://github.com/horovod/horovod/issues/2030', + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/elastic_common.py', + line=196, + class_name='test.integration.test_elastic_torch.ElasticTorchTests', + test_name='test_all_ranks_failure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.874 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/elastic_common.py', + line=170, + class_name='test.integration.test_elastic_torch.ElasticTorchTests', + test_name='test_fault_tolerance_without_scaling', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.28 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/elastic_common.py', + line=115, + class_name='test.integration.test_elastic_torch.ElasticTorchTests', + test_name='test_hosts_added_and_removed', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.847 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/test_elastic_torch.py', + line=34, + class_name='test.integration.test_elastic_torch.ElasticTorchTests', + test_name='test_min_hosts_timeout', + result='skipped', + message='This test fails due to https://github.com/horovod/horovod/issues/2030', + content='/horovod/test/integration/test_elastic_torch.py:35: This test fails ' + 'due to https://github.com/horovod/horovod/issues/2030', + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/elastic_common.py', + line=242, + class_name='test.integration.test_elastic_torch.ElasticTorchTests', + test_name='test_reset_limit', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.49 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/elastic_common.py', + line=142, + class_name='test.integration.test_elastic_torch.ElasticTorchTests', + test_name='test_single_rank_failure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.264 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/test_elastic_tensorflow.py', + line=29, + class_name='test.integration.test_elastic_tensorflow.ElasticTensorFlowTests', + test_name='test_all_hosts_blacklisted', + result='skipped', + message='This test fails due to https://github.com/horovod/horovod/issues/2030', + content='/horovod/test/integration/test_elastic_tensorflow.py:30: This test ' + 'fails due to https://github.com/horovod/horovod/issues/2030', + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/elastic_common.py', + line=196, + class_name='test.integration.test_elastic_tensorflow.ElasticTensorFlowTests', + test_name='test_all_ranks_failure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.603 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/elastic_common.py', + line=170, + class_name='test.integration.test_elastic_tensorflow.ElasticTensorFlowTests', + test_name='test_fault_tolerance_without_scaling', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.349 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/elastic_common.py', + line=115, + class_name='test.integration.test_elastic_tensorflow.ElasticTensorFlowTests', + test_name='test_hosts_added_and_removed', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=12.64 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/test_elastic_tensorflow.py', + line=34, + class_name='test.integration.test_elastic_tensorflow.ElasticTensorFlowTests', + test_name='test_min_hosts_timeout', + result='skipped', + message='This test fails due to https://github.com/horovod/horovod/issues/2030', + content='/horovod/test/integration/test_elastic_tensorflow.py:35: This test ' + 'fails due to https://github.com/horovod/horovod/issues/2030', + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/elastic_common.py', + line=242, + class_name='test.integration.test_elastic_tensorflow.ElasticTensorFlowTests', + test_name='test_reset_limit', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=12.21 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.xml', + test_file='test/integration/elastic_common.py', + line=142, + class_name='test.integration.test_elastic_tensorflow.ElasticTensorFlowTests', + test_name='test_single_rank_failure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.455 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.tf.annotations b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.tf.annotations new file mode 100644 index 0000000..7de6020 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.tf.annotations @@ -0,0 +1,103 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 20 tests pass, 2 skipped in 10m 27s', + 'summary': + '22 tests\u2002\u2003\u200320 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '10m 27s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20041 suites\u2003\u2003\u205f\u20042 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20040 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMMQ6AIAxFr' + '0KYHZRBEy9jCGpsFDAFJuPdLaigW9/7zTv4DNvkeM+ainEXwGcYA0oP1hC2oiNBk4+jEC' + '8MLigVTV3MCns0WcwSNhLlY0K0+BgMJhfj/QveovQSf3KJvzVltQZP8FzMLZKfF82Ojyn' + 'jAAAA\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 2 skipped tests, see "Raw output" for the full list of ' + 'skipped tests.', + 'title': '2 skipped tests found', + 'raw_details': + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_auto_scale_down_by_discovery\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_fault_tolerance_hosts_added_and_removed' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 22 tests, see "Raw output" for the full list of tests.', + 'title': '22 tests found', + 'raw_details': + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_auto_scale_down_by_discovery\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_auto_scale_down_by_exception\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_auto_scale_no_spark_black_list\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_auto_scale_spark_blacklist_no_executor_reuse\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_auto_scale_spark_blacklist_no_executor_reuse_in_app\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ ' + 'test_auto_scale_spark_blacklist_no_executor_reuse_same_task\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_auto_scale_spark_blacklist_no_node_reuse\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_auto_scale_spark_blacklist_no_node_reuse_in_app\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_auto_scale_up\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_fault_tolerance_all_hosts_lost\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_fault_tolerance_exception_all_ranks\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_fault_tolerance_exception_single_rank\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_fault_tolerance_exception_with_min_hosts_timeout\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_fault_tolerance_hosts_added_and_removed\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_fault_tolerance_no_spark_blacklist\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_fault_tolerance_spark_blacklist_no_executor_reuse\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ ' + 'test_fault_tolerance_spark_blacklist_no_executor_reuse_in_app\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ ' + 'test_fault_tolerance_spark_blacklist_no_executor_reuse_same_task\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_fault_tolerance_spark_blacklist_no_node_reuse\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ ' + 'test_fault_tolerance_spark_blacklist_no_node_reuse_in_app\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_fault_tolerance_unused_hosts_added_and_removed\n' + 'test.integration.test_elastic_spark_tensorflow.ElasticSparkTensorfl' + 'owTests ‑ test_happy_run' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.tf.junit-xml b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.tf.junit-xml new file mode 100644 index 0000000..ead436e --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.tf.junit-xml @@ -0,0 +1,31 @@ + + + + + /horovod/test/integration/test_elastic_spark_tensorflow.py:35: This test fails due to https://github.com/horovod/horovod/issues/1994 + + + + + + + + + + + + + + + /horovod/test/integration/test_elastic_spark_tensorflow.py:32: This test fails due to https://github.com/horovod/horovod/issues/1994 + + + + + + + + + + + diff --git a/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.tf.results b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.tf.results new file mode 100644 index 0000000..ea9dd16 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.tf.results @@ -0,0 +1,333 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=22, + suite_skipped=2, + suite_failures=0, + suite_errors=0, + suite_time=627, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=22, + skipped=2, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='test/integration/test_elastic_spark_tensorflow.py', + line=34, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_auto_scale_down_by_discovery', + result='skipped', + message='This test fails due to https://github.com/horovod/horovod/issues/1994', + content='/horovod/test/integration/test_elastic_spark_tensorflow.py:35: This ' + 'test fails due to https://github.com/horovod/horovod/issues/1994', + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='test/integration/elastic_spark_common.py', + line=681, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_auto_scale_down_by_exception', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=22.073 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='test/integration/elastic_spark_common.py', + line=710, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_auto_scale_no_spark_black_list', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=55.445 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_auto_scale_spark_blacklist_no_executor_reuse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=49.354 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_auto_scale_spark_blacklist_no_executor_reuse_in_app', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=41.024 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_auto_scale_spark_blacklist_no_executor_reuse_same_task', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=40.743 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_auto_scale_spark_blacklist_no_node_reuse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=40.671 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_auto_scale_spark_blacklist_no_node_reuse_in_app', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=40.612 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='test/integration/elastic_spark_common.py', + line=626, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_auto_scale_up', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=30.312 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='test/integration/elastic_spark_common.py', + line=613, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_fault_tolerance_all_hosts_lost', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=12.623 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='test/integration/elastic_spark_common.py', + line=584, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_fault_tolerance_exception_all_ranks', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=15.757 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='test/integration/elastic_spark_common.py', + line=558, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_fault_tolerance_exception_single_rank', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=26.113 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='test/integration/elastic_spark_common.py', + line=598, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_fault_tolerance_exception_with_min_hosts_timeout', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=26.65 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='test/integration/test_elastic_spark_tensorflow.py', + line=31, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_fault_tolerance_hosts_added_and_removed', + result='skipped', + message='This test fails due to https://github.com/horovod/horovod/issues/1994', + content='/horovod/test/integration/test_elastic_spark_tensorflow.py:32: This ' + 'test fails due to https://github.com/horovod/horovod/issues/1994', + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='test/integration/elastic_spark_common.py', + line=503, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_fault_tolerance_no_spark_blacklist', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=25.879 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_fault_tolerance_spark_blacklist_no_executor_reuse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=26.382 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_fault_tolerance_spark_blacklist_no_executor_reuse_in_app', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=26.381 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_fault_tolerance_spark_blacklist_no_executor_reuse_same_task', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=26.389 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_fault_tolerance_spark_blacklist_no_node_reuse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=26.403 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_fault_tolerance_spark_blacklist_no_node_reuse_in_app', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=26.404 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='test/integration/elastic_spark_common.py', + line=472, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_fault_tolerance_unused_hosts_added_and_removed', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=46.667 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.tf.xml', + test_file='test/integration/elastic_spark_common.py', + line=394, + class_name='test.integration.test_elastic_spark_tensorflow.' + 'ElasticSparkTensorflowTests', + test_name='test_happy_run', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=21.082 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.tf.xml b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.tf.xml new file mode 100644 index 0000000..54e7536 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.tf.xml @@ -0,0 +1 @@ +/horovod/test/integration/test_elastic_spark_tensorflow.py:35: This test fails due to https://github.com/horovod/horovod/issues/1994/horovod/test/integration/test_elastic_spark_tensorflow.py:32: This test fails due to https://github.com/horovod/horovod/issues/1994 \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.torch.annotations b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.torch.annotations new file mode 100644 index 0000000..d23a4c5 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.torch.annotations @@ -0,0 +1,84 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 22 tests pass in 11m 10s', + 'summary': + '22 tests\u2002\u2003\u200322 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '11m 10s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20041 suites\u2003\u2003\u205f\u20040 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20040 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/12MOw6AIBAFr' + '0KoLdRCEy9jCErc+MEsUBnv7oKIYrczbzMHV7CMhnesKhg3DmyCwaGwoDfCpi1J0GT9WN' + 'cP9MZJ+TMz7GTSf68ELJkYETVGg25LRX9nwVu8vcCfXOBvTep1BUsQL2Ymwc8LUe9HxOM' + 'AAAA=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 22 tests, see "Raw output" for the full list of tests.', + 'title': '22 tests found', + 'raw_details': + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_auto_scale_down_by_discovery\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_auto_scale_down_by_exception\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_auto_scale_no_spark_black_list\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_auto_scale_spark_blacklist_no_executor_reuse\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_auto_scale_spark_blacklist_no_executor_reuse_in_app\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_auto_scale_spark_blacklist_no_executor_reuse_same_task\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_auto_scale_spark_blacklist_no_node_reuse\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_auto_scale_spark_blacklist_no_node_reuse_in_app\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_auto_scale_up\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_fault_tolerance_all_hosts_lost\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_fault_tolerance_exception_all_ranks\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_fault_tolerance_exception_single_rank\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_fault_tolerance_exception_with_min_hosts_timeout\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_fault_tolerance_hosts_added_and_removed\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_fault_tolerance_no_spark_blacklist\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_fault_tolerance_spark_blacklist_no_executor_reuse\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_fault_tolerance_spark_blacklist_no_executor_reuse_in_app\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_fault_tolerance_spark_blacklist_no_executor_reuse_same_task\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_fault_tolerance_spark_blacklist_no_node_reuse\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_fault_tolerance_spark_blacklist_no_node_reuse_in_app\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_fault_tolerance_unused_hosts_added_and_removed\n' + 'test.integration.test_elastic_spark_torch.ElasticSparkTorchTests ‑ ' + 'test_happy_run' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.torch.junit-xml b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.torch.junit-xml new file mode 100644 index 0000000..c48929b --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.torch.junit-xml @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.torch.results b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.torch.results new file mode 100644 index 0000000..ccda77b --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.torch.results @@ -0,0 +1,309 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=22, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=670, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=22, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='test/integration/elastic_spark_common.py', + line=653, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_auto_scale_down_by_discovery', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=26.583 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='test/integration/elastic_spark_common.py', + line=681, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_auto_scale_down_by_exception', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=20.709 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='test/integration/elastic_spark_common.py', + line=710, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_auto_scale_no_spark_black_list', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=58.313 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_auto_scale_spark_blacklist_no_executor_reuse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=39.759 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_auto_scale_spark_blacklist_no_executor_reuse_in_app', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=39.508 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_auto_scale_spark_blacklist_no_executor_reuse_same_task', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=39.404 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_auto_scale_spark_blacklist_no_node_reuse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=40.36 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_auto_scale_spark_blacklist_no_node_reuse_in_app', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=39.424 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='test/integration/elastic_spark_common.py', + line=626, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_auto_scale_up', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=27.592 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='test/integration/elastic_spark_common.py', + line=613, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_fault_tolerance_all_hosts_lost', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=11.068 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='test/integration/elastic_spark_common.py', + line=584, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_fault_tolerance_exception_all_ranks', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=14.72 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='test/integration/elastic_spark_common.py', + line=558, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_fault_tolerance_exception_single_rank', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=23.053 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='test/integration/elastic_spark_common.py', + line=598, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_fault_tolerance_exception_with_min_hosts_timeout', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=25.401 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='test/integration/elastic_spark_common.py', + line=414, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_fault_tolerance_hosts_added_and_removed', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=48.786 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='test/integration/elastic_spark_common.py', + line=503, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_fault_tolerance_no_spark_blacklist', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=22.948 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_fault_tolerance_spark_blacklist_no_executor_reuse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=25.312 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_fault_tolerance_spark_blacklist_no_executor_reuse_in_app', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=25.432 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_fault_tolerance_spark_blacklist_no_executor_reuse_same_task', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=25.427 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_fault_tolerance_spark_blacklist_no_node_reuse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=25.592 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='../usr/local/lib/python3.6/dist-packages/mock/mock.py', + line=1363, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_fault_tolerance_spark_blacklist_no_node_reuse_in_app', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=25.494 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='test/integration/elastic_spark_common.py', + line=472, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_fault_tolerance_unused_hosts_added_and_removed', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=45.176 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.elastic.spark.torch.xml', + test_file='test/integration/elastic_spark_common.py', + line=394, + class_name='test.integration.test_elastic_spark_torch.ElasticSparkTorchTests', + test_name='test_happy_run', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=19.518 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.torch.xml b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.torch.xml new file mode 100644 index 0000000..8b1b4b3 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.elastic.spark.torch.xml @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.elastic.xml b/python/test/files/junit-xml/pytest/junit.gloo.elastic.xml new file mode 100644 index 0000000..ca0c7d3 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.elastic.xml @@ -0,0 +1 @@ +/horovod/test/integration/test_elastic_torch.py:30: This test fails due to https://github.com/horovod/horovod/issues/2030/horovod/test/integration/test_elastic_torch.py:35: This test fails due to https://github.com/horovod/horovod/issues/2030/horovod/test/integration/test_elastic_tensorflow.py:30: This test fails due to https://github.com/horovod/horovod/issues/2030/horovod/test/integration/test_elastic_tensorflow.py:35: This test fails due to https://github.com/horovod/horovod/issues/2030 \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.standalone.annotations b/python/test/files/junit-xml/pytest/junit.gloo.standalone.annotations new file mode 100644 index 0000000..6489a01 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.standalone.annotations @@ -0,0 +1,179 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 80 tests pass, 17 skipped in 3m 25s', + 'summary': + '97 tests\u2002\u2003\u200380 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '3m 25s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20041 suites\u2003\u200317 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20040 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMTQqAIBBGr' + 'yKuW1QQ/VwmxIqGSmPUVXT3xrK03bz3De/gE6yj4R0rMsaNA/vB4FBY0IqwzCsSNFk/tv' + 'ULvXFSkmnyaBbYfSD+TAJWMvFlRNQYDDr1Jf39Kz4iCd4i6d2c5qTeNrAE4WJmFvy8ADN' + 'K9FzlAAAA\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 17 skipped tests, see "Raw output" for the full list of ' + 'skipped tests.', + 'title': '17 skipped tests found', + 'raw_details': + 'test.test_run.RunTests ‑ test_js_run\ntest.test_run.RunTests ‑ ' + 'test_mpi_run_full\ntest.test_run.RunTests ‑ test_mpi_run_minimal\n' + 'test.test_run.RunTests ‑ test_mpi_run_on_large_cluster\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_both_paths\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_both_pythonpaths\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_env_path\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_env_pythonpath\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_non_zero_exit\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_os_environ\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_sys_path\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_sys_pythonpath\n' + 'test.test_run.RunTests ‑ test_mpi_run_without_path\n' + 'test.test_run.RunTests ‑ test_mpi_run_without_pythonpath\n' + 'test.test_spark.SparkTests ‑ test_get_available_devices\n' + 'test.test_spark.SparkTests ‑ test_happy_run_with_mpi\n' + 'test.test_spark.SparkTests ‑ test_timeout_with_mpi' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 97 tests, see "Raw output" for the full list of tests.', + 'title': '97 tests found', + 'raw_details': + 'test.test_run.RunTests ‑ test_autotune_args\n' + 'test.test_run.RunTests ‑ test_autotuning_with_fixed_param\n' + 'test.test_run.RunTests ‑ test_config_file\ntest.test_run.RunTests ' + '‑ test_config_file_override_args\ntest.test_run.RunTests ‑ ' + 'test_generate_jsrun_rankfile\ntest.test_run.RunTests ‑ ' + 'test_get_mpi_implementation\ntest.test_run.RunTests ‑ ' + 'test_gloo_run_minimal\ntest.test_run.RunTests ‑ ' + 'test_gloo_run_with_os_environ\ntest.test_run.RunTests ‑ test_hash\n' + 'test.test_run.RunTests ‑ test_horovodrun_hostfile\n' + 'test.test_run.RunTests ‑ test_host_hash\ntest.test_run.RunTests ‑ ' + 'test_in_thread_args\ntest.test_run.RunTests ‑ test_js_run\n' + 'test.test_run.RunTests ‑ test_library_args\ntest.test_run.RunTests ' + '‑ test_logging_args\ntest.test_run.RunTests ‑ test_mpi_run_full\n' + 'test.test_run.RunTests ‑ test_mpi_run_minimal\n' + 'test.test_run.RunTests ‑ test_mpi_run_on_large_cluster\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_both_paths\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_both_pythonpaths\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_env_path\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_env_pythonpath\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_non_zero_exit\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_os_environ\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_sys_path\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_sys_pythonpath\n' + 'test.test_run.RunTests ‑ test_mpi_run_without_path\n' + 'test.test_run.RunTests ‑ test_mpi_run_without_pythonpath\n' + 'test.test_run.RunTests ‑ test_on_event\ntest.test_run.RunTests ‑ ' + 'test_params_args\ntest.test_run.RunTests ‑ test_run_controller\n' + 'test.test_run.RunTests ‑ test_run_with_jsrun\n' + 'test.test_run.RunTests ‑ ' + 'test_safe_shell_exec_captures_last_line_wo_eol\n' + 'test.test_run.RunTests ‑ test_safe_shell_exec_captures_stderr\n' + 'test.test_run.RunTests ‑ test_safe_shell_exec_captures_stdout\n' + 'test.test_run.RunTests ‑ test_safe_shell_exec_interrupts_on_event\n' + 'test.test_run.RunTests ‑ ' + 'test_safe_shell_exec_interrupts_on_parent_shutdown\n' + 'test.test_run.RunTests ‑ test_safe_shell_exec_returns_exit_code\n' + 'test.test_run.RunTests ‑ test_stall_check_args\n' + 'test.test_run.RunTests ‑ test_timeline_args\n' + 'test.test_run.RunTests ‑ test_validate_config_args\n' + 'test.test_spark.SparkTests ‑ test_check_shape_compatibility\n' + 'test.test_spark.SparkTests ‑ test_df_cache\n' + 'test.test_spark.SparkTests ‑ test_driver_common_interfaces\n' + 'test.test_spark.SparkTests ‑ test_driver_common_interfaces_fails\n' + 'test.test_spark.SparkTests ‑ ' + 'test_driver_common_interfaces_from_settings\n' + 'test.test_spark.SparkTests ‑ test_driver_set_local_rank_to_index\n' + 'test.test_spark.SparkTests ‑ test_get_available_devices\n' + 'test.test_spark.SparkTests ‑ test_get_col_info\n' + 'test.test_spark.SparkTests ‑ test_get_col_info_error_bad_shape\n' + 'test.test_spark.SparkTests ‑ test_get_col_info_error_bad_size\n' + 'test.test_spark.SparkTests ‑ test_get_metadata\n' + 'test.test_spark.SparkTests ‑ test_gloo_exec_fn\n' + 'test.test_spark.SparkTests ‑ ' + 'test_gloo_exec_fn_provides_driver_with_local_rank\n' + 'test.test_spark.SparkTests ‑ test_happy_run_elastic\n' + 'test.test_spark.SparkTests ‑ test_happy_run_with_gloo\n' + 'test.test_spark.SparkTests ‑ test_happy_run_with_mpi\n' + 'test.test_spark.SparkTests ‑ test_hdfs_store_parse_url\n' + 'test.test_spark.SparkTests ‑ test_host_hash\n' + 'test.test_spark.SparkTests ‑ ' + 'test_mpi_exec_fn_provides_driver_with_local_rank\n' + 'test.test_spark.SparkTests ‑ test_mpirun_exec_fn\n' + 'test.test_spark.SparkTests ‑ test_mpirun_not_found\n' + 'test.test_spark.SparkTests ‑ test_prepare_data_compress_sparse\n' + 'test.test_spark.SparkTests ‑ test_prepare_data_no_compression\n' + 'test.test_spark.SparkTests ‑ test_rsh_event\n' + 'test.test_spark.SparkTests ‑ test_rsh_events\n' + 'test.test_spark.SparkTests ‑ test_rsh_with_non_zero_exit_code\n' + 'test.test_spark.SparkTests ‑ test_rsh_with_zero_exit_code\n' + 'test.test_spark.SparkTests ‑ test_spark_driver_host_discovery\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_defaults_num_proc_to_spark_cores_with_gloo\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_defaults_num_proc_to_spark_cores_with_mpi\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_does_not_default_env_to_os_env_with_gloo\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_does_not_default_env_to_os_env_with_mpi\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_num_proc_precedes_spark_cores_with_gloo\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_num_proc_precedes_spark_cores_with_mpi\n' + 'test.test_spark.SparkTests ‑ test_spark_run_with_gloo\n' + 'test.test_spark.SparkTests ‑ test_spark_run_with_mpi\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_with_non_zero_exit_with_gloo\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_with_non_zero_exit_with_mpi\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_with_os_environ_with_mpi\n' + 'test.test_spark.SparkTests ‑ test_spark_run_with_path_with_mpi\n' + 'test.test_spark.SparkTests ‑ test_spark_task_service_abort_command\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_task_service_abort_no_command\n' + 'test.test_spark.SparkTests ‑ test_spark_task_service_env\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_task_service_execute_command\n' + 'test.test_spark.SparkTests ‑ test_sync_hdfs_store\n' + 'test.test_spark.SparkTests ‑ test_task_fn_run_commands\n' + 'test.test_spark.SparkTests ‑ test_task_fn_run_gloo_exec\n' + 'test.test_spark.SparkTests ‑ ' + 'test_task_service_check_for_command_start\n' + 'test.test_spark.SparkTests ‑ ' + 'test_task_service_wait_for_command_start_with_timeout\n' + 'test.test_spark.SparkTests ‑ ' + 'test_task_service_wait_for_command_start_without_timeout\n' + 'test.test_spark.SparkTests ‑ test_timeout_with_gloo\n' + 'test.test_spark.SparkTests ‑ test_timeout_with_mpi\n' + 'test.test_spark.SparkTests ‑ test_to_list\n' + 'test.test_spark.SparkTests ‑ test_train_val_split_col_boolean\n' + 'test.test_spark.SparkTests ‑ test_train_val_split_col_integer\n' + 'test.test_spark.SparkTests ‑ test_train_val_split_ratio' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.standalone.junit-xml b/python/test/files/junit-xml/pytest/junit.gloo.standalone.junit-xml new file mode 100644 index 0000000..f3127af --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.standalone.junit-xml @@ -0,0 +1,136 @@ + + + + + + + + + + + /horovod/test/test_spark.py:1638: get_available_devices only supported in Spark 3.0 and above + + + + + + + + + + + /horovod/test/test_spark.py:330: Open MPI is not available + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + /horovod/test/test_spark.py:384: Open MPI is not available + + + + + + + + + + + + + + + + + + + /horovod/test/test_run.py:822: MPI is not available + + + + + /horovod/test/test_run.py:626: MPI is not available + + + /horovod/test/test_run.py:548: MPI is not available + + + /horovod/test/test_run.py:585: MPI is not available + + + /horovod/test/test_run.py:730: MPI is not available + + + /horovod/test/test_run.py:706: MPI is not available + + + /horovod/test/test_run.py:724: MPI is not available + + + /horovod/test/test_run.py:700: MPI is not available + + + /horovod/test/test_run.py:755: MPI is not available + + + /horovod/test/test_run.py:773: MPI is not available + + + /horovod/test/test_run.py:718: MPI is not available + + + /horovod/test/test_run.py:694: MPI is not available + + + /horovod/test/test_run.py:712: MPI is not available + + + /horovod/test/test_run.py:688: MPI is not available + + + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/pytest/junit.gloo.standalone.results b/python/test/files/junit-xml/pytest/junit.gloo.standalone.results new file mode 100644 index 0000000..b6a2ab0 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.standalone.results @@ -0,0 +1,1285 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=97, + suite_skipped=17, + suite_failures=0, + suite_errors=0, + suite_time=205, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=97, + skipped=17, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1408, + class_name='test.test_spark.SparkTests', + test_name='test_check_shape_compatibility', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.389 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1016, + class_name='test.test_spark.SparkTests', + test_name='test_df_cache', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.311 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=119, + class_name='test.test_spark.SparkTests', + test_name='test_driver_common_interfaces', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.508 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=140, + class_name='test.test_spark.SparkTests', + test_name='test_driver_common_interfaces_fails', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.509 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=127, + class_name='test.test_spark.SparkTests', + test_name='test_driver_common_interfaces_from_settings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.507 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=152, + class_name='test.test_spark.SparkTests', + test_name='test_driver_set_local_rank_to_index', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.51 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1637, + class_name='test.test_spark.SparkTests', + test_name='test_get_available_devices', + result='skipped', + message='get_available_devices only supported in Spark 3.0 and above', + content='/horovod/test/test_spark.py:1638: get_available_devices only ' + 'supported in Spark 3.0 and above', + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1098, + class_name='test.test_spark.SparkTests', + test_name='test_get_col_info', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.967 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1147, + class_name='test.test_spark.SparkTests', + test_name='test_get_col_info_error_bad_shape', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.228 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1159, + class_name='test.test_spark.SparkTests', + test_name='test_get_col_info_error_bad_size', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.974 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1216, + class_name='test.test_spark.SparkTests', + test_name='test_get_metadata', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.566 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=921, + class_name='test.test_spark.SparkTests', + test_name='test_gloo_exec_fn', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=941, + class_name='test.test_spark.SparkTests', + test_name='test_gloo_exec_fn_provides_driver_with_local_rank', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.052 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=363, + class_name='test.test_spark.SparkTests', + test_name='test_happy_run_elastic', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=11.536 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=338, + class_name='test.test_spark.SparkTests', + test_name='test_happy_run_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=10.494 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=329, + class_name='test.test_spark.SparkTests', + test_name='test_happy_run_with_mpi', + result='skipped', + message='Open MPI is not available', + content='/horovod/test/test_spark.py:330: Open MPI is not available', + stdout=None, + stderr=None, + time=1.415 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1505, + class_name='test.test_spark.SparkTests', + test_name='test_hdfs_store_parse_url', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=91, + class_name='test.test_spark.SparkTests', + test_name='test_host_hash', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=936, + class_name='test.test_spark.SparkTests', + test_name='test_mpi_exec_fn_provides_driver_with_local_rank', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.034 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=849, + class_name='test.test_spark.SparkTests', + test_name='test_mpirun_exec_fn', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.011 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=414, + class_name='test.test_spark.SparkTests', + test_name='test_mpirun_not_found', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.391 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1336, + class_name='test.test_spark.SparkTests', + test_name='test_prepare_data_compress_sparse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.877 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1265, + class_name='test.test_spark.SparkTests', + test_name='test_prepare_data_no_compression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.206 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=812, + class_name='test.test_spark.SparkTests', + test_name='test_rsh_event', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.517 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=815, + class_name='test.test_spark.SparkTests', + test_name='test_rsh_events', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.539 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=809, + class_name='test.test_spark.SparkTests', + test_name='test_rsh_with_non_zero_exit_code', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.513 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=806, + class_name='test.test_spark.SparkTests', + test_name='test_rsh_with_zero_exit_code', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.513 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=993, + class_name='test.test_spark.SparkTests', + test_name='test_spark_driver_host_discovery', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.509 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=492, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_defaults_num_proc_to_spark_cores_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.829 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=485, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_defaults_num_proc_to_spark_cores_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.723 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=512, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_does_not_default_env_to_os_env_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.823 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=505, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_does_not_default_env_to_os_env_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.85 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=459, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_num_proc_precedes_spark_cores_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.875 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=452, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_num_proc_precedes_spark_cores_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.889 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=433, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.972 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=426, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.948 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=548, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_with_non_zero_exit_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.733 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=539, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_with_non_zero_exit_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.84 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=529, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_with_os_environ_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.87 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=472, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_with_path_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.828 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1614, + class_name='test.test_spark.SparkTests', + test_name='test_spark_task_service_abort_command', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.514 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1630, + class_name='test.test_spark.SparkTests', + test_name='test_spark_task_service_abort_no_command', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.713 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1568, + class_name='test.test_spark.SparkTests', + test_name='test_spark_task_service_env', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.011 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1608, + class_name='test.test_spark.SparkTests', + test_name='test_spark_task_service_execute_command', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.711 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1466, + class_name='test.test_spark.SparkTests', + test_name='test_sync_hdfs_store', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=262, + class_name='test.test_spark.SparkTests', + test_name='test_task_fn_run_commands', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.407 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=295, + class_name='test.test_spark.SparkTests', + test_name='test_task_fn_run_gloo_exec', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.347 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=205, + class_name='test.test_spark.SparkTests', + test_name='test_task_service_check_for_command_start', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.029 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=183, + class_name='test.test_spark.SparkTests', + test_name='test_task_service_wait_for_command_start_with_timeout', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.017 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=175, + class_name='test.test_spark.SparkTests', + test_name='test_task_service_wait_for_command_start_without_timeout', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.509 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=392, + class_name='test.test_spark.SparkTests', + test_name='test_timeout_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.259 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=383, + class_name='test.test_spark.SparkTests', + test_name='test_timeout_with_mpi', + result='skipped', + message='Open MPI is not available', + content='/horovod/test/test_spark.py:384: Open MPI is not available', + stdout=None, + stderr=None, + time=1.325 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1649, + class_name='test.test_spark.SparkTests', + test_name='test_to_list', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1201, + class_name='test.test_spark.SparkTests', + test_name='test_train_val_split_col_boolean', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.255 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1186, + class_name='test.test_spark.SparkTests', + test_name='test_train_val_split_col_integer', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.333 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_spark.py', + line=1171, + class_name='test.test_spark.SparkTests', + test_name='test_train_val_split_ratio', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.86 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=74, + class_name='test.test_run.RunTests', + test_name='test_autotune_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=93, + class_name='test.test_run.RunTests', + test_name='test_autotuning_with_fixed_param', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=165, + class_name='test.test_run.RunTests', + test_name='test_config_file', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.009 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=207, + class_name='test.test_run.RunTests', + test_name='test_config_file_override_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=864, + class_name='test.test_run.RunTests', + test_name='test_generate_jsrun_rankfile', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=427, + class_name='test.test_run.RunTests', + test_name='test_get_mpi_implementation', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=790, + class_name='test.test_run.RunTests', + test_name='test_gloo_run_minimal', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.194 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=801, + class_name='test.test_run.RunTests', + test_name='test_gloo_run_with_os_environ', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.2 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=415, + class_name='test.test_run.RunTests', + test_name='test_hash', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=809, + class_name='test.test_run.RunTests', + test_name='test_horovodrun_hostfile', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=419, + class_name='test.test_run.RunTests', + test_name='test_host_hash', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=224, + class_name='test.test_run.RunTests', + test_name='test_in_thread_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=821, + class_name='test.test_run.RunTests', + test_name='test_js_run', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:822: MPI is not available', + stdout=None, + stderr=None, + time=0.537 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=139, + class_name='test.test_run.RunTests', + test_name='test_library_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=154, + class_name='test.test_run.RunTests', + test_name='test_logging_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=625, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_full', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:626: MPI is not available', + stdout=None, + stderr=None, + time=0.175 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=547, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_minimal', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:548: MPI is not available', + stdout=None, + stderr=None, + time=0.171 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=584, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_on_large_cluster', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:585: MPI is not available', + stdout=None, + stderr=None, + time=0.172 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=729, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_both_paths', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:730: MPI is not available', + stdout=None, + stderr=None, + time=0.171 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=705, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_both_pythonpaths', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:706: MPI is not available', + stdout=None, + stderr=None, + time=0.177 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=723, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_env_path', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:724: MPI is not available', + stdout=None, + stderr=None, + time=0.18 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=699, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_env_pythonpath', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:700: MPI is not available', + stdout=None, + stderr=None, + time=0.181 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=754, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_non_zero_exit', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:755: MPI is not available', + stdout=None, + stderr=None, + time=0.189 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=772, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_os_environ', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:773: MPI is not available', + stdout=None, + stderr=None, + time=0.184 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=717, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_sys_path', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:718: MPI is not available', + stdout=None, + stderr=None, + time=0.182 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=693, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_sys_pythonpath', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:694: MPI is not available', + stdout=None, + stderr=None, + time=0.188 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=711, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_without_path', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:712: MPI is not available', + stdout=None, + stderr=None, + time=0.194 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=687, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_without_pythonpath', + result='skipped', + message='MPI is not available', + content='/horovod/test/test_run.py:688: MPI is not available', + stdout=None, + stderr=None, + time=0.192 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=249, + class_name='test.test_run.RunTests', + test_name='test_on_event', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.218 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=57, + class_name='test.test_run.RunTests', + test_name='test_params_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=453, + class_name='test.test_run.RunTests', + test_name='test_run_controller', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.706 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=896, + class_name='test.test_run.RunTests', + test_name='test_run_with_jsrun', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=350, + class_name='test.test_run.RunTests', + test_name='test_safe_shell_exec_captures_last_line_wo_eol', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.181 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=347, + class_name='test.test_run.RunTests', + test_name='test_safe_shell_exec_captures_stderr', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.177 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=344, + class_name='test.test_run.RunTests', + test_name='test_safe_shell_exec_captures_stdout', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.182 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=357, + class_name='test.test_run.RunTests', + test_name='test_safe_shell_exec_interrupts_on_event', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.029 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=371, + class_name='test.test_run.RunTests', + test_name='test_safe_shell_exec_interrupts_on_parent_shutdown', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.209 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=354, + class_name='test.test_run.RunTests', + test_name='test_safe_shell_exec_returns_exit_code', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.209 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=119, + class_name='test.test_run.RunTests', + test_name='test_stall_check_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=108, + class_name='test.test_run.RunTests', + test_name='test_timeline_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.standalone.xml', + test_file='test/test_run.py', + line=217, + class_name='test.test_run.RunTests', + test_name='test_validate_config_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.standalone.xml b/python/test/files/junit-xml/pytest/junit.gloo.standalone.xml new file mode 100644 index 0000000..169af68 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.standalone.xml @@ -0,0 +1 @@ +/horovod/test/test_spark.py:1638: get_available_devices only supported in Spark 3.0 and above/horovod/test/test_spark.py:330: Open MPI is not available/horovod/test/test_spark.py:384: Open MPI is not available/horovod/test/test_run.py:822: MPI is not available/horovod/test/test_run.py:626: MPI is not available/horovod/test/test_run.py:548: MPI is not available/horovod/test/test_run.py:585: MPI is not available/horovod/test/test_run.py:730: MPI is not available/horovod/test/test_run.py:706: MPI is not available/horovod/test/test_run.py:724: MPI is not available/horovod/test/test_run.py:700: MPI is not available/horovod/test/test_run.py:755: MPI is not available/horovod/test/test_run.py:773: MPI is not available/horovod/test/test_run.py:718: MPI is not available/horovod/test/test_run.py:694: MPI is not available/horovod/test/test_run.py:712: MPI is not available/horovod/test/test_run.py:688: MPI is not available \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.static.annotations b/python/test/files/junit-xml/pytest/junit.gloo.static.annotations new file mode 100644 index 0000000..b34fc4c --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.static.annotations @@ -0,0 +1,123 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 12 tests pass, 12 skipped in 1m 9s', + 'summary': + '24 tests\u2002\u2003\u200312 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '1m 9s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20041 suites\u2003\u200312 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20040 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMTQqAIBBGr' + 'yKuW1REUJcJsaQhzRh1Fd29sR+z3bz3DW/nCvTkeM+qgnEXwCcYAwoPdiVsO2JafNzq5o' + 'XBBSnjd/2ZBba/UQI0mTKJCdHiYzCsKRnvX/EWWfASWe/iPCetMeAJnou5WfDjBP7Rpw/' + 'kAAAA\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 12 skipped tests, see "Raw output" for the full list of ' + 'skipped tests.', + 'title': '12 skipped tests found', + 'raw_details': + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_local_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_local_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_mixed_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_mixed_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_remote_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_remote_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_local_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_local_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_mixed_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_mixed_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_remote_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_remote_func' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 24 tests, see "Raw output" for the full list of tests.', + 'title': '24 tests found', + 'raw_details': + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_gloo_local_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_gloo_local_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_gloo_mixed_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_gloo_mixed_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_gloo_remote_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_gloo_remote_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_local_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_local_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_mixed_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_mixed_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_remote_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_remote_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_gloo_local_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_gloo_local_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_gloo_mixed_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_gloo_mixed_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_gloo_remote_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_gloo_remote_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_local_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_local_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_mixed_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_mixed_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_remote_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_remote_func' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.static.junit-xml b/python/test/files/junit-xml/pytest/junit.gloo.static.junit-xml new file mode 100644 index 0000000..47f7f43 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.static.junit-xml @@ -0,0 +1,53 @@ + + + + + + + + + + + /horovod/test/integration/test_static_run.py:149: MPI is not available + + + /horovod/test/integration/test_static_run.py:149: MPI is not available + + + /horovod/test/integration/test_static_run.py:149: MPI is not available + + + /horovod/test/integration/test_static_run.py:149: MPI is not available + + + /horovod/test/integration/test_static_run.py:149: MPI is not available + + + /horovod/test/integration/test_static_run.py:149: MPI is not available + + + + + + + + + /horovod/test/integration/test_static_run.py:136: MPI is not available + + + /horovod/test/integration/test_static_run.py:136: MPI is not available + + + /horovod/test/integration/test_static_run.py:136: MPI is not available + + + /horovod/test/integration/test_static_run.py:136: MPI is not available + + + /horovod/test/integration/test_static_run.py:136: MPI is not available + + + /horovod/test/integration/test_static_run.py:136: MPI is not available + + + diff --git a/python/test/files/junit-xml/pytest/junit.gloo.static.results b/python/test/files/junit-xml/pytest/junit.gloo.static.results new file mode 100644 index 0000000..209433c --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.static.results @@ -0,0 +1,347 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=24, + suite_skipped=12, + suite_failures=0, + suite_errors=0, + suite_time=69, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=24, + skipped=12, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_gloo_local_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.284 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_gloo_local_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.885 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_gloo_mixed_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.615 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_gloo_mixed_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.338 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_gloo_remote_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.68 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_gloo_remote_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.279 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_mpi_local_cmd', + result='skipped', + message='MPI is not available', + content='/horovod/test/integration/test_static_run.py:149: MPI is not ' + 'available', + stdout=None, + stderr=None, + time=1.422 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_mpi_local_func', + result='skipped', + message='MPI is not available', + content='/horovod/test/integration/test_static_run.py:149: MPI is not ' + 'available', + stdout=None, + stderr=None, + time=1.329 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_mpi_mixed_cmd', + result='skipped', + message='MPI is not available', + content='/horovod/test/integration/test_static_run.py:149: MPI is not ' + 'available', + stdout=None, + stderr=None, + time=1.32 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_mpi_mixed_func', + result='skipped', + message='MPI is not available', + content='/horovod/test/integration/test_static_run.py:149: MPI is not ' + 'available', + stdout=None, + stderr=None, + time=1.324 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_mpi_remote_cmd', + result='skipped', + message='MPI is not available', + content='/horovod/test/integration/test_static_run.py:149: MPI is not ' + 'available', + stdout=None, + stderr=None, + time=1.318 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_mpi_remote_func', + result='skipped', + message='MPI is not available', + content='/horovod/test/integration/test_static_run.py:149: MPI is not ' + 'available', + stdout=None, + stderr=None, + time=1.321 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_gloo_local_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.914 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_gloo_local_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.739 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_gloo_mixed_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.611 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_gloo_mixed_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.809 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_gloo_remote_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.597 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_gloo_remote_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.788 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_mpi_local_cmd', + result='skipped', + message='MPI is not available', + content='/horovod/test/integration/test_static_run.py:136: MPI is not ' + 'available', + stdout=None, + stderr=None, + time=1.377 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_mpi_local_func', + result='skipped', + message='MPI is not available', + content='/horovod/test/integration/test_static_run.py:136: MPI is not ' + 'available', + stdout=None, + stderr=None, + time=1.361 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_mpi_mixed_cmd', + result='skipped', + message='MPI is not available', + content='/horovod/test/integration/test_static_run.py:136: MPI is not ' + 'available', + stdout=None, + stderr=None, + time=1.373 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_mpi_mixed_func', + result='skipped', + message='MPI is not available', + content='/horovod/test/integration/test_static_run.py:136: MPI is not ' + 'available', + stdout=None, + stderr=None, + time=1.436 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_mpi_remote_cmd', + result='skipped', + message='MPI is not available', + content='/horovod/test/integration/test_static_run.py:136: MPI is not ' + 'available', + stdout=None, + stderr=None, + time=1.351 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.gloo.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_mpi_remote_func', + result='skipped', + message='MPI is not available', + content='/horovod/test/integration/test_static_run.py:136: MPI is not ' + 'available', + stdout=None, + stderr=None, + time=1.425 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.gloo.static.xml b/python/test/files/junit-xml/pytest/junit.gloo.static.xml new file mode 100644 index 0000000..5c574fc --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.gloo.static.xml @@ -0,0 +1 @@ +/horovod/test/integration/test_static_run.py:149: MPI is not available/horovod/test/integration/test_static_run.py:149: MPI is not available/horovod/test/integration/test_static_run.py:149: MPI is not available/horovod/test/integration/test_static_run.py:149: MPI is not available/horovod/test/integration/test_static_run.py:149: MPI is not available/horovod/test/integration/test_static_run.py:149: MPI is not available/horovod/test/integration/test_static_run.py:136: MPI is not available/horovod/test/integration/test_static_run.py:136: MPI is not available/horovod/test/integration/test_static_run.py:136: MPI is not available/horovod/test/integration/test_static_run.py:136: MPI is not available/horovod/test/integration/test_static_run.py:136: MPI is not available/horovod/test/integration/test_static_run.py:136: MPI is not available \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.mpi.integration.annotations b/python/test/files/junit-xml/pytest/junit.mpi.integration.annotations new file mode 100644 index 0000000..d1498f0 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.mpi.integration.annotations @@ -0,0 +1,44 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 3 tests pass in 15s', + 'summary': + '3 tests\u2002\u2003\u20033 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '15s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0K2ttAYGy9jCGLciGAWqIx3d/EL3Zt5yewwodEeetFUAnzE8MEYSQZ0NmHHzE9IX/vuwU' + 'elSrHgxqL+xCTRFEITOXoMRfv20sxzN/+1i7PYxXlLuXXFwPAs4WcJxwk6KM9l3gAAAA=' + '=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 3 tests, see "Raw output" for the full list of tests.', + 'title': '3 tests found', + 'raw_details': + 'test.test_interactiverun.InteractiveRunTests ‑ test_failed_run\n' + 'test.test_interactiverun.InteractiveRunTests ‑ test_happy_run\n' + 'test.test_interactiverun.InteractiveRunTests ‑ ' + 'test_happy_run_elastic' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.mpi.integration.junit-xml b/python/test/files/junit-xml/pytest/junit.mpi.integration.junit-xml new file mode 100644 index 0000000..44ac603 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.mpi.integration.junit-xml @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/python/test/files/junit-xml/pytest/junit.mpi.integration.results b/python/test/files/junit-xml/pytest/junit.mpi.integration.results new file mode 100644 index 0000000..40b579c --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.mpi.integration.results @@ -0,0 +1,62 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=3, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=15, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.integration.xml', + test_file='test/test_interactiverun.py', + line=78, + class_name='test.test_interactiverun.InteractiveRunTests', + test_name='test_failed_run', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.386 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.integration.xml', + test_file='test/test_interactiverun.py', + line=35, + class_name='test.test_interactiverun.InteractiveRunTests', + test_name='test_happy_run', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.012 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.integration.xml', + test_file='test/test_interactiverun.py', + line=63, + class_name='test.test_interactiverun.InteractiveRunTests', + test_name='test_happy_run_elastic', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.898 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.mpi.integration.xml b/python/test/files/junit-xml/pytest/junit.mpi.integration.xml new file mode 100644 index 0000000..29d3389 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.mpi.integration.xml @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.mpi.standalone.annotations b/python/test/files/junit-xml/pytest/junit.mpi.standalone.annotations new file mode 100644 index 0000000..e243980 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.mpi.standalone.annotations @@ -0,0 +1,163 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 96 tests pass, 1 skipped in 3m 39s', + 'summary': + '97 tests\u2002\u2003\u200396 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '3m 39s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20041 suites\u2003\u2003\u205f\u20041 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20040 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0KoLdRCo5cxBCFu/GAWqIx3d1FE7N7MS+bgGhZlec+qgnHrwSUYPQoHZiOsq44EXS6cXf' + 'vCYL2UwTSfmWGPgUdoAQuJMgmFaDAa9Fsqhv0LPuLr3Zzlbs5r0qwrOIK4mJ0EPy/3HdY' + 'E4wAAAA==\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'test.test_spark.SparkTests ‑ test_get_available_devices' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 97 tests, see "Raw output" for the full list of tests.', + 'title': '97 tests found', + 'raw_details': + 'test.test_run.RunTests ‑ test_autotune_args\n' + 'test.test_run.RunTests ‑ test_autotuning_with_fixed_param\n' + 'test.test_run.RunTests ‑ test_config_file\ntest.test_run.RunTests ' + '‑ test_config_file_override_args\ntest.test_run.RunTests ‑ ' + 'test_generate_jsrun_rankfile\ntest.test_run.RunTests ‑ ' + 'test_get_mpi_implementation\ntest.test_run.RunTests ‑ ' + 'test_gloo_run_minimal\ntest.test_run.RunTests ‑ ' + 'test_gloo_run_with_os_environ\ntest.test_run.RunTests ‑ test_hash\n' + 'test.test_run.RunTests ‑ test_horovodrun_hostfile\n' + 'test.test_run.RunTests ‑ test_host_hash\ntest.test_run.RunTests ‑ ' + 'test_in_thread_args\ntest.test_run.RunTests ‑ test_js_run\n' + 'test.test_run.RunTests ‑ test_library_args\ntest.test_run.RunTests ' + '‑ test_logging_args\ntest.test_run.RunTests ‑ test_mpi_run_full\n' + 'test.test_run.RunTests ‑ test_mpi_run_minimal\n' + 'test.test_run.RunTests ‑ test_mpi_run_on_large_cluster\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_both_paths\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_both_pythonpaths\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_env_path\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_env_pythonpath\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_non_zero_exit\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_os_environ\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_sys_path\n' + 'test.test_run.RunTests ‑ test_mpi_run_with_sys_pythonpath\n' + 'test.test_run.RunTests ‑ test_mpi_run_without_path\n' + 'test.test_run.RunTests ‑ test_mpi_run_without_pythonpath\n' + 'test.test_run.RunTests ‑ test_on_event\ntest.test_run.RunTests ‑ ' + 'test_params_args\ntest.test_run.RunTests ‑ test_run_controller\n' + 'test.test_run.RunTests ‑ test_run_with_jsrun\n' + 'test.test_run.RunTests ‑ ' + 'test_safe_shell_exec_captures_last_line_wo_eol\n' + 'test.test_run.RunTests ‑ test_safe_shell_exec_captures_stderr\n' + 'test.test_run.RunTests ‑ test_safe_shell_exec_captures_stdout\n' + 'test.test_run.RunTests ‑ test_safe_shell_exec_interrupts_on_event\n' + 'test.test_run.RunTests ‑ ' + 'test_safe_shell_exec_interrupts_on_parent_shutdown\n' + 'test.test_run.RunTests ‑ test_safe_shell_exec_returns_exit_code\n' + 'test.test_run.RunTests ‑ test_stall_check_args\n' + 'test.test_run.RunTests ‑ test_timeline_args\n' + 'test.test_run.RunTests ‑ test_validate_config_args\n' + 'test.test_spark.SparkTests ‑ test_check_shape_compatibility\n' + 'test.test_spark.SparkTests ‑ test_df_cache\n' + 'test.test_spark.SparkTests ‑ test_driver_common_interfaces\n' + 'test.test_spark.SparkTests ‑ test_driver_common_interfaces_fails\n' + 'test.test_spark.SparkTests ‑ ' + 'test_driver_common_interfaces_from_settings\n' + 'test.test_spark.SparkTests ‑ test_driver_set_local_rank_to_index\n' + 'test.test_spark.SparkTests ‑ test_get_available_devices\n' + 'test.test_spark.SparkTests ‑ test_get_col_info\n' + 'test.test_spark.SparkTests ‑ test_get_col_info_error_bad_shape\n' + 'test.test_spark.SparkTests ‑ test_get_col_info_error_bad_size\n' + 'test.test_spark.SparkTests ‑ test_get_metadata\n' + 'test.test_spark.SparkTests ‑ test_gloo_exec_fn\n' + 'test.test_spark.SparkTests ‑ ' + 'test_gloo_exec_fn_provides_driver_with_local_rank\n' + 'test.test_spark.SparkTests ‑ test_happy_run_elastic\n' + 'test.test_spark.SparkTests ‑ test_happy_run_with_gloo\n' + 'test.test_spark.SparkTests ‑ test_happy_run_with_mpi\n' + 'test.test_spark.SparkTests ‑ test_hdfs_store_parse_url\n' + 'test.test_spark.SparkTests ‑ test_host_hash\n' + 'test.test_spark.SparkTests ‑ ' + 'test_mpi_exec_fn_provides_driver_with_local_rank\n' + 'test.test_spark.SparkTests ‑ test_mpirun_exec_fn\n' + 'test.test_spark.SparkTests ‑ test_mpirun_not_found\n' + 'test.test_spark.SparkTests ‑ test_prepare_data_compress_sparse\n' + 'test.test_spark.SparkTests ‑ test_prepare_data_no_compression\n' + 'test.test_spark.SparkTests ‑ test_rsh_event\n' + 'test.test_spark.SparkTests ‑ test_rsh_events\n' + 'test.test_spark.SparkTests ‑ test_rsh_with_non_zero_exit_code\n' + 'test.test_spark.SparkTests ‑ test_rsh_with_zero_exit_code\n' + 'test.test_spark.SparkTests ‑ test_spark_driver_host_discovery\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_defaults_num_proc_to_spark_cores_with_gloo\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_defaults_num_proc_to_spark_cores_with_mpi\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_does_not_default_env_to_os_env_with_gloo\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_does_not_default_env_to_os_env_with_mpi\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_num_proc_precedes_spark_cores_with_gloo\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_num_proc_precedes_spark_cores_with_mpi\n' + 'test.test_spark.SparkTests ‑ test_spark_run_with_gloo\n' + 'test.test_spark.SparkTests ‑ test_spark_run_with_mpi\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_with_non_zero_exit_with_gloo\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_with_non_zero_exit_with_mpi\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_run_with_os_environ_with_mpi\n' + 'test.test_spark.SparkTests ‑ test_spark_run_with_path_with_mpi\n' + 'test.test_spark.SparkTests ‑ test_spark_task_service_abort_command\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_task_service_abort_no_command\n' + 'test.test_spark.SparkTests ‑ test_spark_task_service_env\n' + 'test.test_spark.SparkTests ‑ ' + 'test_spark_task_service_execute_command\n' + 'test.test_spark.SparkTests ‑ test_sync_hdfs_store\n' + 'test.test_spark.SparkTests ‑ test_task_fn_run_commands\n' + 'test.test_spark.SparkTests ‑ test_task_fn_run_gloo_exec\n' + 'test.test_spark.SparkTests ‑ ' + 'test_task_service_check_for_command_start\n' + 'test.test_spark.SparkTests ‑ ' + 'test_task_service_wait_for_command_start_with_timeout\n' + 'test.test_spark.SparkTests ‑ ' + 'test_task_service_wait_for_command_start_without_timeout\n' + 'test.test_spark.SparkTests ‑ test_timeout_with_gloo\n' + 'test.test_spark.SparkTests ‑ test_timeout_with_mpi\n' + 'test.test_spark.SparkTests ‑ test_to_list\n' + 'test.test_spark.SparkTests ‑ test_train_val_split_col_boolean\n' + 'test.test_spark.SparkTests ‑ test_train_val_split_col_integer\n' + 'test.test_spark.SparkTests ‑ test_train_val_split_ratio' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.mpi.standalone.junit-xml b/python/test/files/junit-xml/pytest/junit.mpi.standalone.junit-xml new file mode 100644 index 0000000..63bfd1d --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.mpi.standalone.junit-xml @@ -0,0 +1,104 @@ + + + + + + + + + + + /horovod/test/test_spark.py:1638: get_available_devices only supported in Spark 3.0 and above + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/pytest/junit.mpi.standalone.results b/python/test/files/junit-xml/pytest/junit.mpi.standalone.results new file mode 100644 index 0000000..f1f9814 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.mpi.standalone.results @@ -0,0 +1,1285 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=97, + suite_skipped=1, + suite_failures=0, + suite_errors=0, + suite_time=219, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=97, + skipped=1, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1408, + class_name='test.test_spark.SparkTests', + test_name='test_check_shape_compatibility', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.035 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1016, + class_name='test.test_spark.SparkTests', + test_name='test_df_cache', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.226 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=119, + class_name='test.test_spark.SparkTests', + test_name='test_driver_common_interfaces', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.508 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=140, + class_name='test.test_spark.SparkTests', + test_name='test_driver_common_interfaces_fails', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.508 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=127, + class_name='test.test_spark.SparkTests', + test_name='test_driver_common_interfaces_from_settings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.507 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=152, + class_name='test.test_spark.SparkTests', + test_name='test_driver_set_local_rank_to_index', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.51 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1637, + class_name='test.test_spark.SparkTests', + test_name='test_get_available_devices', + result='skipped', + message='get_available_devices only supported in Spark 3.0 and above', + content='/horovod/test/test_spark.py:1638: get_available_devices only ' + 'supported in Spark 3.0 and above', + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1098, + class_name='test.test_spark.SparkTests', + test_name='test_get_col_info', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.198 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1147, + class_name='test.test_spark.SparkTests', + test_name='test_get_col_info_error_bad_shape', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.138 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1159, + class_name='test.test_spark.SparkTests', + test_name='test_get_col_info_error_bad_size', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.161 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1216, + class_name='test.test_spark.SparkTests', + test_name='test_get_metadata', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.009 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=921, + class_name='test.test_spark.SparkTests', + test_name='test_gloo_exec_fn', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=941, + class_name='test.test_spark.SparkTests', + test_name='test_gloo_exec_fn_provides_driver_with_local_rank', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.034 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=363, + class_name='test.test_spark.SparkTests', + test_name='test_happy_run_elastic', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=11.495 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=338, + class_name='test.test_spark.SparkTests', + test_name='test_happy_run_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=10.036 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=329, + class_name='test.test_spark.SparkTests', + test_name='test_happy_run_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.208 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1505, + class_name='test.test_spark.SparkTests', + test_name='test_hdfs_store_parse_url', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=91, + class_name='test.test_spark.SparkTests', + test_name='test_host_hash', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=936, + class_name='test.test_spark.SparkTests', + test_name='test_mpi_exec_fn_provides_driver_with_local_rank', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.033 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=849, + class_name='test.test_spark.SparkTests', + test_name='test_mpirun_exec_fn', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.011 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=414, + class_name='test.test_spark.SparkTests', + test_name='test_mpirun_not_found', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.869 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1336, + class_name='test.test_spark.SparkTests', + test_name='test_prepare_data_compress_sparse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.896 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1265, + class_name='test.test_spark.SparkTests', + test_name='test_prepare_data_no_compression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.362 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=812, + class_name='test.test_spark.SparkTests', + test_name='test_rsh_event', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.517 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=815, + class_name='test.test_spark.SparkTests', + test_name='test_rsh_events', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.534 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=809, + class_name='test.test_spark.SparkTests', + test_name='test_rsh_with_non_zero_exit_code', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.512 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=806, + class_name='test.test_spark.SparkTests', + test_name='test_rsh_with_zero_exit_code', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.013 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=993, + class_name='test.test_spark.SparkTests', + test_name='test_spark_driver_host_discovery', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.508 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=492, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_defaults_num_proc_to_spark_cores_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.972 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=485, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_defaults_num_proc_to_spark_cores_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.868 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=512, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_does_not_default_env_to_os_env_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.865 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=505, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_does_not_default_env_to_os_env_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.955 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=459, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_num_proc_precedes_spark_cores_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.872 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=452, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_num_proc_precedes_spark_cores_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.868 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=433, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.975 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=426, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.946 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=548, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_with_non_zero_exit_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.868 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=539, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_with_non_zero_exit_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.962 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=529, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_with_os_environ_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.863 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=472, + class_name='test.test_spark.SparkTests', + test_name='test_spark_run_with_path_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.822 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1614, + class_name='test.test_spark.SparkTests', + test_name='test_spark_task_service_abort_command', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.511 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1630, + class_name='test.test_spark.SparkTests', + test_name='test_spark_task_service_abort_no_command', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.712 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1568, + class_name='test.test_spark.SparkTests', + test_name='test_spark_task_service_env', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.011 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1608, + class_name='test.test_spark.SparkTests', + test_name='test_spark_task_service_execute_command', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.712 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1466, + class_name='test.test_spark.SparkTests', + test_name='test_sync_hdfs_store', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=262, + class_name='test.test_spark.SparkTests', + test_name='test_task_fn_run_commands', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.289 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=295, + class_name='test.test_spark.SparkTests', + test_name='test_task_fn_run_gloo_exec', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.251 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=205, + class_name='test.test_spark.SparkTests', + test_name='test_task_service_check_for_command_start', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.022 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=183, + class_name='test.test_spark.SparkTests', + test_name='test_task_service_wait_for_command_start_with_timeout', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.013 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=175, + class_name='test.test_spark.SparkTests', + test_name='test_task_service_wait_for_command_start_without_timeout', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.509 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=392, + class_name='test.test_spark.SparkTests', + test_name='test_timeout_with_gloo', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.096 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=383, + class_name='test.test_spark.SparkTests', + test_name='test_timeout_with_mpi', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.264 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1649, + class_name='test.test_spark.SparkTests', + test_name='test_to_list', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1201, + class_name='test.test_spark.SparkTests', + test_name='test_train_val_split_col_boolean', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.052 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1186, + class_name='test.test_spark.SparkTests', + test_name='test_train_val_split_col_integer', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.158 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_spark.py', + line=1171, + class_name='test.test_spark.SparkTests', + test_name='test_train_val_split_ratio', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.948 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=74, + class_name='test.test_run.RunTests', + test_name='test_autotune_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=93, + class_name='test.test_run.RunTests', + test_name='test_autotuning_with_fixed_param', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=165, + class_name='test.test_run.RunTests', + test_name='test_config_file', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.008 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=207, + class_name='test.test_run.RunTests', + test_name='test_config_file_override_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.008 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=864, + class_name='test.test_run.RunTests', + test_name='test_generate_jsrun_rankfile', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=427, + class_name='test.test_run.RunTests', + test_name='test_get_mpi_implementation', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=790, + class_name='test.test_run.RunTests', + test_name='test_gloo_run_minimal', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.187 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=801, + class_name='test.test_run.RunTests', + test_name='test_gloo_run_with_os_environ', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.188 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=415, + class_name='test.test_run.RunTests', + test_name='test_hash', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=809, + class_name='test.test_run.RunTests', + test_name='test_horovodrun_hostfile', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=419, + class_name='test.test_run.RunTests', + test_name='test_host_hash', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=224, + class_name='test.test_run.RunTests', + test_name='test_in_thread_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=821, + class_name='test.test_run.RunTests', + test_name='test_js_run', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.181 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=139, + class_name='test.test_run.RunTests', + test_name='test_library_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=154, + class_name='test.test_run.RunTests', + test_name='test_logging_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=625, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_full', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.181 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=547, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_minimal', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.197 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=584, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_on_large_cluster', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.178 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=729, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_both_paths', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.177 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=705, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_both_pythonpaths', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.179 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=723, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_env_path', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.184 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=699, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_env_pythonpath', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.207 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=754, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_non_zero_exit', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.179 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=772, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_os_environ', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.18 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=717, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_sys_path', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.186 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=693, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_with_sys_pythonpath', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.179 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=711, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_without_path', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.179 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=687, + class_name='test.test_run.RunTests', + test_name='test_mpi_run_without_pythonpath', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.179 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=249, + class_name='test.test_run.RunTests', + test_name='test_on_event', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.218 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=57, + class_name='test.test_run.RunTests', + test_name='test_params_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=453, + class_name='test.test_run.RunTests', + test_name='test_run_controller', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.715 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=896, + class_name='test.test_run.RunTests', + test_name='test_run_with_jsrun', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=350, + class_name='test.test_run.RunTests', + test_name='test_safe_shell_exec_captures_last_line_wo_eol', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.174 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=347, + class_name='test.test_run.RunTests', + test_name='test_safe_shell_exec_captures_stderr', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.177 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=344, + class_name='test.test_run.RunTests', + test_name='test_safe_shell_exec_captures_stdout', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.174 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=357, + class_name='test.test_run.RunTests', + test_name='test_safe_shell_exec_interrupts_on_event', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.028 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=371, + class_name='test.test_run.RunTests', + test_name='test_safe_shell_exec_interrupts_on_parent_shutdown', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.208 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=354, + class_name='test.test_run.RunTests', + test_name='test_safe_shell_exec_returns_exit_code', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.178 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=119, + class_name='test.test_run.RunTests', + test_name='test_stall_check_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=108, + class_name='test.test_run.RunTests', + test_name='test_timeline_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.standalone.xml', + test_file='test/test_run.py', + line=217, + class_name='test.test_run.RunTests', + test_name='test_validate_config_args', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.mpi.standalone.xml b/python/test/files/junit-xml/pytest/junit.mpi.standalone.xml new file mode 100644 index 0000000..e1b36da --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.mpi.standalone.xml @@ -0,0 +1 @@ +/horovod/test/test_spark.py:1638: get_available_devices only supported in Spark 3.0 and above \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.mpi.static.annotations b/python/test/files/junit-xml/pytest/junit.mpi.static.annotations new file mode 100644 index 0000000..687830f --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.mpi.static.annotations @@ -0,0 +1,88 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 24 tests pass in 2m 4s', + 'summary': + '24 tests\u2002\u2003\u200324 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '2m 4s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20041 suites\u2003\u2003\u205f\u20040 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20040 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/12MOwqAMBAFr' + 'xK2tlCx8jISYsTFT2STVOLd3ajkY/dmHswJE67aQi+aSoD16CKMnqRDswdsOxZ8uXAmGK' + 'xX6mcWPNjUUUwS10JoIkOfIb/HYthF8BWp93CWezivKbNt6Bi+Jews4boBWo1x8eMAAAA' + '=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 24 tests, see "Raw output" for the full list of tests.', + 'title': '24 tests found', + 'raw_details': + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_gloo_local_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_gloo_local_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_gloo_mixed_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_gloo_mixed_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_gloo_remote_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_gloo_remote_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_local_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_local_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_mixed_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_mixed_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_remote_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_failure_mpi_remote_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_gloo_local_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_gloo_local_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_gloo_mixed_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_gloo_mixed_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_gloo_remote_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_gloo_remote_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_local_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_local_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_mixed_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_mixed_func\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_remote_cmd\n' + 'test.integration.test_static_run.StaticRunTests ‑ ' + 'test_run_success_mpi_remote_func' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.mpi.static.junit-xml b/python/test/files/junit-xml/pytest/junit.mpi.static.junit-xml new file mode 100644 index 0000000..7458c66 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.mpi.static.junit-xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/pytest/junit.mpi.static.results b/python/test/files/junit-xml/pytest/junit.mpi.static.results new file mode 100644 index 0000000..78a3045 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.mpi.static.results @@ -0,0 +1,335 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=24, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=124, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=24, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_gloo_local_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.067 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_gloo_local_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.856 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_gloo_mixed_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.578 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_gloo_mixed_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.311 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_gloo_remote_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.867 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_gloo_remote_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.054 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_mpi_local_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.032 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_mpi_local_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.664 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_mpi_mixed_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.398 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_mpi_mixed_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.151 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_mpi_remote_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.365 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=148, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_failure_mpi_remote_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.825 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_gloo_local_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.439 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_gloo_local_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.429 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_gloo_mixed_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.804 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_gloo_mixed_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.134 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_gloo_remote_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.886 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_gloo_remote_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.585 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_mpi_local_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.761 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_mpi_local_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.591 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_mpi_mixed_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.203 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_mpi_mixed_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.504 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_mpi_remote_cmd', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.238 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.mpi.static.xml', + test_file='test/integration/test_static_run.py', + line=135, + class_name='test.integration.test_static_run.StaticRunTests', + test_name='test_run_success_mpi_remote_func', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.697 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.mpi.static.xml b/python/test/files/junit-xml/pytest/junit.mpi.static.xml new file mode 100644 index 0000000..c76171f --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.mpi.static.xml @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.spark.integration.1.annotations b/python/test/files/junit-xml/pytest/junit.spark.integration.1.annotations new file mode 100644 index 0000000..863ba84 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.spark.integration.1.annotations @@ -0,0 +1,109 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 33 tests pass, 2 skipped in 2m 45s', + 'summary': + '35 tests\u2002\u2003\u200333 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '2m 45s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20041 suites\u2003\u2003\u205f\u20042 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20040 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0KoLfxECy9jCErcKGAWqIx3d1GzavdmXjK7NLBOQfaiKoQMCSLDmFBF8C5j15KgK+azYR' + 'hC0jqb5jULbGRqFkbBSqJkMSF6fAwmx8W8f8FbvL2LP7mLvzXtrYVI8CwRZiWPEwEjqVj' + 'jAAAA\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 2 skipped tests, see "Raw output" for the full list of ' + 'skipped tests.', + 'title': '2 skipped tests found', + 'raw_details': + 'test.test_spark_keras.SparkKerasTests ‑ test_session\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_happy_run_elastic_fault_tolerant_fails' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 35 tests, see "Raw output" for the full list of tests.', + 'title': '35 tests found', + 'raw_details': + 'test.test_spark_keras.SparkKerasTests ‑ test_batch_generator_fn\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_calculate_shuffle_buffer_size\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_calculate_shuffle_buffer_size_small_row_size\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_convert_custom_sparse_to_dense_bare_keras_fn\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_custom_sparse_to_dense_fn\n' + 'test.test_spark_keras.SparkKerasTests ‑ test_fit_model\n' + 'test.test_spark_keras.SparkKerasTests ‑ test_fit_model_multiclass\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_keras_direct_parquet_train\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_keras_model_checkpoint_callback\n' + 'test.test_spark_keras.SparkKerasTests ‑ test_model_serialization\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_prep_data_tf_keras_fn_with_sparse_col\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_prep_data_tf_keras_fn_without_sparse_col\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_prepare_data_bare_keras_fn\n' + 'test.test_spark_keras.SparkKerasTests ‑ test_reshape\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_restore_from_checkpoint\n' + 'test.test_spark_keras.SparkKerasTests ‑ test_serialize_param_value\n' + 'test.test_spark_keras.SparkKerasTests ‑ test_session\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_calculate_loss_with_sample_weight\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_calculate_loss_without_sample_weight\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_calculate_shuffle_buffer_size\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_calculate_shuffle_buffer_size_small_row_size\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_construct_metric_value_holders_one_metric_for_all_labels\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_fit_model\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_get_metric_avgs\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_happy_run_elastic\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_happy_run_elastic_fault_tolerant\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_happy_run_elastic_fault_tolerant_fails\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_metric_class\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_prepare_np_data\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_pytorch_get_optimizer_with_unscaled_lr\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_restore_from_checkpoint\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_torch_direct_parquet_train\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_torch_param_serialize\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_transform_multi_class\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_update_metrics' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.spark.integration.1.junit-xml b/python/test/files/junit-xml/pytest/junit.spark.integration.1.junit-xml new file mode 100644 index 0000000..3e67db1 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.spark.integration.1.junit-xml @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/framework/test_util.py:2076: Not a test. + + + + + + + + + + + + /horovod/test/test_spark_torch.py:469: elastic horovod does not support shutdown from the spark driver while elastic driver is waiting for hosts to come up + + + + + + + + + + + diff --git a/python/test/files/junit-xml/pytest/junit.spark.integration.1.results b/python/test/files/junit-xml/pytest/junit.spark.integration.1.results new file mode 100644 index 0000000..bb254f4 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.spark.integration.1.results @@ -0,0 +1,483 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=35, + suite_skipped=2, + suite_failures=0, + suite_errors=0, + suite_time=165, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=35, + skipped=2, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=454, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_batch_generator_fn', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=385, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_calculate_shuffle_buffer_size', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=371, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_calculate_shuffle_buffer_size_small_row_size', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=410, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_convert_custom_sparse_to_dense_bare_keras_fn', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=399, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_custom_sparse_to_dense_fn', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.041 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=75, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_fit_model', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=14.352 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=103, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_fit_model_multiclass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=34.284 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=186, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_keras_direct_parquet_train', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=11.54 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=225, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_keras_model_checkpoint_callback', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=14.137 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=322, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_model_serialization', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.851 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=575, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_prep_data_tf_keras_fn_with_sparse_col', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.051 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=612, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_prep_data_tf_keras_fn_without_sparse_col', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.032 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=416, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_prepare_data_bare_keras_fn', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=528, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_reshape', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.039 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=139, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_restore_from_checkpoint', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.23 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_keras.py', + line=361, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_serialize_param_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='../usr/local/lib/python3.6/dist-packages/tensorflow_core/python/' + 'framework/test_util.py', + line=2075, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_session', + result='skipped', + message='Not a test.', + content='/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/framewo' + 'rk/test_util.py:2076: Not a test.', + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=372, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_calculate_loss_with_sample_weight', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.022 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=401, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_calculate_loss_without_sample_weight', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=198, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_calculate_shuffle_buffer_size', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=184, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_calculate_shuffle_buffer_size_small_row_size', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=227, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_construct_metric_value_holders_one_metric_for_all_labels', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=73, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_fit_model', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=12.786 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=269, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_get_metric_avgs', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=433, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_happy_run_elastic', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.165 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=447, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_happy_run_elastic_fault_tolerant', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=23.934 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=468, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_happy_run_elastic_fault_tolerant_fails', + result='skipped', + message='elastic horovod does not support shutdown from the spark driver ' + 'while elastic driver is waiting for hosts to come up', + content='/horovod/test/test_spark_torch.py:469: elastic horovod does not ' + 'support shutdown from the spark driver while elastic driver is ' + 'waiting for hosts to come up', + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=213, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_metric_class', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.026 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=251, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_prepare_np_data', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.319 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=167, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_pytorch_get_optimizer_with_unscaled_lr', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=103, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_restore_from_checkpoint', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.198 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=335, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_torch_direct_parquet_train', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.305 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=325, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_torch_param_serialize', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=140, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_transform_multi_class', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.549 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.1.xml', + test_file='test/test_spark_torch.py', + line=291, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_update_metrics', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.008 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.spark.integration.1.xml b/python/test/files/junit-xml/pytest/junit.spark.integration.1.xml new file mode 100644 index 0000000..73e6501 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.spark.integration.1.xml @@ -0,0 +1 @@ +/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/framework/test_util.py:2076: Not a test./horovod/test/test_spark_torch.py:469: elastic horovod does not support shutdown from the spark driver while elastic driver is waiting for hosts to come up \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.spark.integration.2.annotations b/python/test/files/junit-xml/pytest/junit.spark.integration.2.annotations new file mode 100644 index 0000000..cc90146 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.spark.integration.2.annotations @@ -0,0 +1,109 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 33 tests pass, 2 skipped in 2m 52s', + 'summary': + '35 tests\u2002\u2003\u200333 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '2m 52s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20041 suites\u2003\u2003\u205f\u20042 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20040 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMSwqAMAwFr' + '1K6duEHEbxMKVUx+Kmk7Uq8u6mfVHeZeWF2OcDcO9mKIhPSBfAMXUDtwa4Rm5IETT6OVf' + '2CcsGYaKpkJtjI8L8aNMwkchY9osXHYFi5GO9f8Bapd/End/G3ZuyygCd4LuFGLY8TfGY' + 'a1uMAAAA=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 2 skipped tests, see "Raw output" for the full list of ' + 'skipped tests.', + 'title': '2 skipped tests found', + 'raw_details': + 'test.test_spark_keras.SparkKerasTests ‑ test_session\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_happy_run_elastic_fault_tolerant_fails' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 35 tests, see "Raw output" for the full list of tests.', + 'title': '35 tests found', + 'raw_details': + 'test.test_spark_keras.SparkKerasTests ‑ test_batch_generator_fn\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_calculate_shuffle_buffer_size\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_calculate_shuffle_buffer_size_small_row_size\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_convert_custom_sparse_to_dense_bare_keras_fn\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_custom_sparse_to_dense_fn\n' + 'test.test_spark_keras.SparkKerasTests ‑ test_fit_model\n' + 'test.test_spark_keras.SparkKerasTests ‑ test_fit_model_multiclass\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_keras_direct_parquet_train\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_keras_model_checkpoint_callback\n' + 'test.test_spark_keras.SparkKerasTests ‑ test_model_serialization\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_prep_data_tf_keras_fn_with_sparse_col\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_prep_data_tf_keras_fn_without_sparse_col\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_prepare_data_bare_keras_fn\n' + 'test.test_spark_keras.SparkKerasTests ‑ test_reshape\n' + 'test.test_spark_keras.SparkKerasTests ‑ ' + 'test_restore_from_checkpoint\n' + 'test.test_spark_keras.SparkKerasTests ‑ test_serialize_param_value\n' + 'test.test_spark_keras.SparkKerasTests ‑ test_session\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_calculate_loss_with_sample_weight\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_calculate_loss_without_sample_weight\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_calculate_shuffle_buffer_size\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_calculate_shuffle_buffer_size_small_row_size\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_construct_metric_value_holders_one_metric_for_all_labels\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_fit_model\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_get_metric_avgs\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_happy_run_elastic\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_happy_run_elastic_fault_tolerant\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_happy_run_elastic_fault_tolerant_fails\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_metric_class\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_prepare_np_data\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_pytorch_get_optimizer_with_unscaled_lr\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_restore_from_checkpoint\n' + 'test.test_spark_torch.SparkTorchTests ‑ ' + 'test_torch_direct_parquet_train\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_torch_param_serialize\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_transform_multi_class\n' + 'test.test_spark_torch.SparkTorchTests ‑ test_update_metrics' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.spark.integration.2.junit-xml b/python/test/files/junit-xml/pytest/junit.spark.integration.2.junit-xml new file mode 100644 index 0000000..182df17 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.spark.integration.2.junit-xml @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + /usr/local/lib/python3.6/dist-packages/tensorflow_core/python/framework/test_util.py:2076: Not a test. + + + + + + + + + + + + /horovod/test/test_spark_torch.py:469: elastic horovod does not support shutdown from the spark driver while elastic driver is waiting for hosts to come up + + + + + + + + + + + diff --git a/python/test/files/junit-xml/pytest/junit.spark.integration.2.results b/python/test/files/junit-xml/pytest/junit.spark.integration.2.results new file mode 100644 index 0000000..fc82e8c --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.spark.integration.2.results @@ -0,0 +1,483 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=35, + suite_skipped=2, + suite_failures=0, + suite_errors=0, + suite_time=172, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=35, + skipped=2, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=454, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_batch_generator_fn', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=385, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_calculate_shuffle_buffer_size', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=371, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_calculate_shuffle_buffer_size_small_row_size', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=410, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_convert_custom_sparse_to_dense_bare_keras_fn', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=399, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_custom_sparse_to_dense_fn', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.038 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=75, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_fit_model', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=12.424 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=103, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_fit_model_multiclass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=31.925 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=186, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_keras_direct_parquet_train', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=11.57 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=225, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_keras_model_checkpoint_callback', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=14.517 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=322, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_model_serialization', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.223 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=575, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_prep_data_tf_keras_fn_with_sparse_col', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.051 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=612, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_prep_data_tf_keras_fn_without_sparse_col', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.034 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=416, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_prepare_data_bare_keras_fn', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=528, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_reshape', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.04 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=139, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_restore_from_checkpoint', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.92 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_keras.py', + line=361, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_serialize_param_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='../usr/local/lib/python3.6/dist-packages/tensorflow_core/python/' + 'framework/test_util.py', + line=2075, + class_name='test.test_spark_keras.SparkKerasTests', + test_name='test_session', + result='skipped', + message='Not a test.', + content='/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/framewo' + 'rk/test_util.py:2076: Not a test.', + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=372, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_calculate_loss_with_sample_weight', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.025 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=401, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_calculate_loss_without_sample_weight', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=198, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_calculate_shuffle_buffer_size', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=184, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_calculate_shuffle_buffer_size_small_row_size', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=227, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_construct_metric_value_holders_one_metric_for_all_labels', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=73, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_fit_model', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=13.773 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=269, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_get_metric_avgs', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=433, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_happy_run_elastic', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.573 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=447, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_happy_run_elastic_fault_tolerant', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=27.56 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=468, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_happy_run_elastic_fault_tolerant_fails', + result='skipped', + message='elastic horovod does not support shutdown from the spark driver ' + 'while elastic driver is waiting for hosts to come up', + content='/horovod/test/test_spark_torch.py:469: elastic horovod does not ' + 'support shutdown from the spark driver while elastic driver is ' + 'waiting for hosts to come up', + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=213, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_metric_class', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.023 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=251, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_prepare_np_data', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.061 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=167, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_pytorch_get_optimizer_with_unscaled_lr', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=103, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_restore_from_checkpoint', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.464 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=335, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_torch_direct_parquet_train', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.825 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=325, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_torch_param_serialize', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=140, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_transform_multi_class', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.384 + ), + publish.unittestresults.UnitTestCase( + result_file='pytest/junit.spark.integration.2.xml', + test_file='test/test_spark_torch.py', + line=291, + class_name='test.test_spark_torch.SparkTorchTests', + test_name='test_update_metrics', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.008 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/pytest/junit.spark.integration.2.xml b/python/test/files/junit-xml/pytest/junit.spark.integration.2.xml new file mode 100644 index 0000000..1464a93 --- /dev/null +++ b/python/test/files/junit-xml/pytest/junit.spark.integration.2.xml @@ -0,0 +1 @@ +/usr/local/lib/python3.6/dist-packages/tensorflow_core/python/framework/test_util.py:2076: Not a test./horovod/test/test_spark_torch.py:469: elastic horovod does not support shutdown from the spark driver while elastic driver is waiting for hosts to come up \ No newline at end of file diff --git a/python/test/files/junit-xml/scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.annotations b/python/test/files/junit-xml/scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.annotations new file mode 100644 index 0000000..59ed37a --- /dev/null +++ b/python/test/files/junit-xml/scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.annotations @@ -0,0 +1,49 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 5 tests pass in 2s', + 'summary': + '5 tests\u2002\u2003\u20035 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '2s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMwQqAIBAFf' + '0U8d6igSz8TYkZLlrHqKfr3VlOz25t5MBdfQCvLR9Y1jFsPrsDsUTgwB2FPSIcL15D3ZL' + '2Uf7HBSaItYhGgf0IhGkwG/ZF7Yda5l79a5CoWuW5Js+/gCNJidhX8fgDdy7133QAAAA=' + '=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 5 tests, see "Raw output" for the full list of tests.', + 'title': '5 tests found', + 'raw_details': + 'uk.co.gresearch.spark.diff.DiffOptionsSuite ‑ diff options diff ' + 'value\nuk.co.gresearch.spark.diff.DiffOptionsSuite ‑ diff options ' + 'left and right prefixes\n' + 'uk.co.gresearch.spark.diff.DiffOptionsSuite ‑ diff options with ' + 'change column name same as diff column\n' + 'uk.co.gresearch.spark.diff.DiffOptionsSuite ‑ diff options with ' + 'empty diff column name\n' + 'uk.co.gresearch.spark.diff.DiffOptionsSuite ‑ fluent methods of ' + 'diff options' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.junit-xml b/python/test/files/junit-xml/scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.junit-xml new file mode 100644 index 0000000..0d757f8 --- /dev/null +++ b/python/test/files/junit-xml/scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.junit-xml @@ -0,0 +1,84 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.results b/python/test/files/junit-xml/scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.results new file mode 100644 index 0000000..f03c190 --- /dev/null +++ b/python/test/files/junit-xml/scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.results @@ -0,0 +1,88 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=5, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=2, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='uk.co.gresearch.spark.diff.DiffOptionsSuite', + tests=5, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.xml', + test_file=None, + line=None, + class_name='uk.co.gresearch.spark.diff.DiffOptionsSuite', + test_name='diff options with empty diff column name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.259 + ), + publish.unittestresults.UnitTestCase( + result_file='scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.xml', + test_file=None, + line=None, + class_name='uk.co.gresearch.spark.diff.DiffOptionsSuite', + test_name='diff options left and right prefixes', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.959 + ), + publish.unittestresults.UnitTestCase( + result_file='scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.xml', + test_file=None, + line=None, + class_name='uk.co.gresearch.spark.diff.DiffOptionsSuite', + test_name='diff options diff value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.xml', + test_file=None, + line=None, + class_name='uk.co.gresearch.spark.diff.DiffOptionsSuite', + test_name='diff options with change column name same as diff column', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.xml', + test_file=None, + line=None, + class_name='uk.co.gresearch.spark.diff.DiffOptionsSuite', + test_name='fluent methods of diff options', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.xml b/python/test/files/junit-xml/scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.xml new file mode 100644 index 0000000..67162d7 --- /dev/null +++ b/python/test/files/junit-xml/scalatest/TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.xml @@ -0,0 +1,105 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/testsuite-in-testsuite.annotations b/python/test/files/junit-xml/testsuite-in-testsuite.annotations new file mode 100644 index 0000000..7c5453e --- /dev/null +++ b/python/test/files/junit-xml/testsuite-in-testsuite.annotations @@ -0,0 +1,42 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 5 tests pass in 4s', + 'summary': + '5 tests\u2002\u2003\u20035 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '4s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '4 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMywqAIBQFf' + '0VctyioTT8TYkqXfMRVV9G/Z6aluzNzYE4qQQlHZzJ0hLoAPsEYYQ3IPFiTMR7+uaayFx' + 'c4b8UORxT9JyQD1QiBaDEbDKb0nlnnXv5riatY4rrFrdbgI+RF3MbodQOdcxe63QAAAA=' + '=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 5 tests, see "Raw output" for the full list of tests.', + 'title': '5 tests found', + 'raw_details': + 'someName ‑ TestCase1\nsomeName ‑ TestCase2\nsomeName ‑ TestCase3\n' + 'someName ‑ TestCase4\nsomeName ‑ TestCase5' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/testsuite-in-testsuite.junit-xml b/python/test/files/junit-xml/testsuite-in-testsuite.junit-xml new file mode 100644 index 0000000..a67a292 --- /dev/null +++ b/python/test/files/junit-xml/testsuite-in-testsuite.junit-xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/testsuite-in-testsuite.results b/python/test/files/junit-xml/testsuite-in-testsuite.results new file mode 100644 index 0000000..4e029c4 --- /dev/null +++ b/python/test/files/junit-xml/testsuite-in-testsuite.results @@ -0,0 +1,115 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=4, + suite_tests=5, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=4, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='TestSuite1', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='TestSuite2.1', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='TestSuite2', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='Project Test Suite', + tests=5, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='testsuite-in-testsuite.xml', + test_file='/somepath', + line=34, + class_name='someName', + test_name='TestCase1', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.32159 + ), + publish.unittestresults.UnitTestCase( + result_file='testsuite-in-testsuite.xml', + test_file='/somepath', + line=65, + class_name='someName', + test_name='TestCase2', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.321319 + ), + publish.unittestresults.UnitTestCase( + result_file='testsuite-in-testsuite.xml', + test_file='/somepath', + line=40, + class_name='someName', + test_name='TestCase3', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.08817 + ), + publish.unittestresults.UnitTestCase( + result_file='testsuite-in-testsuite.xml', + test_file='/somepath', + line=40, + class_name='someName', + test_name='TestCase4', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.98817 + ), + publish.unittestresults.UnitTestCase( + result_file='testsuite-in-testsuite.xml', + test_file='/somepath', + line=40, + class_name='someName', + test_name='TestCase5', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.08817 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/testsuite-in-testsuite.xml b/python/test/files/junit-xml/testsuite-in-testsuite.xml new file mode 100644 index 0000000..a5c4b69 --- /dev/null +++ b/python/test/files/junit-xml/testsuite-in-testsuite.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/testsuite-root.annotations b/python/test/files/junit-xml/testsuite-root.annotations new file mode 100644 index 0000000..7c5453e --- /dev/null +++ b/python/test/files/junit-xml/testsuite-root.annotations @@ -0,0 +1,42 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 5 tests pass in 4s', + 'summary': + '5 tests\u2002\u2003\u20035 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '4s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '4 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMywqAIBQFf' + '0VctyioTT8TYkqXfMRVV9G/Z6aluzNzYE4qQQlHZzJ0hLoAPsEYYQ3IPFiTMR7+uaayFx' + 'c4b8UORxT9JyQD1QiBaDEbDKb0nlnnXv5riatY4rrFrdbgI+RF3MbodQOdcxe63QAAAA=' + '=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 5 tests, see "Raw output" for the full list of tests.', + 'title': '5 tests found', + 'raw_details': + 'someName ‑ TestCase1\nsomeName ‑ TestCase2\nsomeName ‑ TestCase3\n' + 'someName ‑ TestCase4\nsomeName ‑ TestCase5' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/testsuite-root.junit-xml b/python/test/files/junit-xml/testsuite-root.junit-xml new file mode 100644 index 0000000..b1ab523 --- /dev/null +++ b/python/test/files/junit-xml/testsuite-root.junit-xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/testsuite-root.results b/python/test/files/junit-xml/testsuite-root.results new file mode 100644 index 0000000..a259c63 --- /dev/null +++ b/python/test/files/junit-xml/testsuite-root.results @@ -0,0 +1,115 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=4, + suite_tests=5, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=4, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='TestSuite1', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='TestSuite2.1', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='TestSuite2', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='Project Test Suite', + tests=5, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='testsuite-root.xml', + test_file='/somepath', + line=34, + class_name='someName', + test_name='TestCase1', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.32159 + ), + publish.unittestresults.UnitTestCase( + result_file='testsuite-root.xml', + test_file='/somepath', + line=65, + class_name='someName', + test_name='TestCase2', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.321319 + ), + publish.unittestresults.UnitTestCase( + result_file='testsuite-root.xml', + test_file='/somepath', + line=40, + class_name='someName', + test_name='TestCase3', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.08817 + ), + publish.unittestresults.UnitTestCase( + result_file='testsuite-root.xml', + test_file='/somepath', + line=40, + class_name='someName', + test_name='TestCase4', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.98817 + ), + publish.unittestresults.UnitTestCase( + result_file='testsuite-root.xml', + test_file='/somepath', + line=40, + class_name='someName', + test_name='TestCase5', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.08817 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/testsuite-root.xml b/python/test/files/junit-xml/testsuite-root.xml new file mode 100644 index 0000000..b3beb1f --- /dev/null +++ b/python/test/files/junit-xml/testsuite-root.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/tst/disabled.annotations b/python/test/files/junit-xml/tst/disabled.annotations new file mode 100644 index 0000000..3f1f6ba --- /dev/null +++ b/python/test/files/junit-xml/tst/disabled.annotations @@ -0,0 +1,294 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 errors, 19 fail, 5 skipped, 6 pass in 0s', + 'summary': + '\u205f\u20041 files\u2004\u2003\u205f\u20042 suites\u2004\u2003\u2002' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '31 tests\u2003\u205f\u20046 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '5 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '19 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\u20031 ' + '[:fire:](https://github.com/step-security/publish-unit-test-result-ac' + 'tion/blob/VERSION/README.md#the-symbols "test errors")\n31 runs\u2006\u2003' + '11 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '19 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\u20031 ' + '[:fire:](https://github.com/step-security/publish-unit-test-result-ac' + 'tion/blob/VERSION/README.md#the-symbols "test errors")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02NQQqAIBBFr' + 'yKuW2RRUJcJsaIhyxh1Fd29sdLczXsf3px8Bj1Z3jNRMG49uAcqgtGjdGB2wpKQBhemWk' + 'QYrFeKTPuLFQ4STRKzBB3aXTITosHvHfo9FcMdg+IXb7CMnPcekeeU2TZwBN/F7CL5dQP' + 'prhoZ4gAAAA==\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'factorial_of_value_from_fixture failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/main.cpp:72: error: ' + 'check_eq(3628800, 3628801)' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'positive_arguments_must_produce_expected_result failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/main.cpp:45: error: check_ne(6, ' + '6)hello world!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'failure', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'test_which_throws_unknown_exception with error', + 'raw_details': 'uncaught (anonymous namespace)::some_unknown_exception' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'positive_arguments_must_produce_expected_result[2] failed', + 'raw_details': '/home/ivan/prj/tst/tests/failed/main.cpp:85: error: check(false)' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'factorial_of_value_from_fixture[0] failed', + 'raw_details': '/home/ivan/prj/tst/tests/failed/main.cpp:109: error: expected 2' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'test_which_fails_check_eq_with_custom_message failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/main.cpp:62: error: check_eq(6, ' + '7)hello world!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check_ge_print failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/checks.cpp:59: error: check_ge(2, ' + '3)failed!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check_ge failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/checks.cpp:55: error: check_ge(2, ' + '3)Hello world!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check_gt_print failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/checks.cpp:43: error: check_gt(2, ' + '2)failed!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check_lt_print failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/checks.cpp:35: error: check_lt(2, ' + '2)failed!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check_print failed', + 'raw_details': '/home/ivan/prj/tst/tests/failed/checks.cpp:11: error: failed!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check_gt failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/checks.cpp:39: error: check_gt(2, ' + '2)Hello world!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check failed', + 'raw_details': '/home/ivan/prj/tst/tests/failed/checks.cpp:7: error: Hello world!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check_le_print failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/checks.cpp:51: error: check_le(2, ' + '1)failed!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check_eq failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/checks.cpp:15: error: check_eq(1, ' + '2)Hello world!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check_eq_print failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/checks.cpp:19: error: check_eq(1, ' + '2)failed!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check_le failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/checks.cpp:47: error: check_le(2, ' + '1)Hello world!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check_ne failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/checks.cpp:23: error: check_ne(2, ' + '2)Hello world!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check_lt failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/checks.cpp:31: error: check_lt(2, ' + '2)Hello world!' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tst/disabled.xml\u2003[took 0s]', + 'title': 'check_ne_print failed', + 'raw_details': + '/home/ivan/prj/tst/tests/failed/checks.cpp:27: error: check_ne(2, ' + '2)failed!' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 5 skipped tests, see "Raw output" for the full list of ' + 'skipped tests.', + 'title': '5 skipped tests found', + 'raw_details': + 'disabled_param_test[0]\ndisabled_param_test[1]\n' + 'disabled_param_test[2]\ndisabled_param_test[3]\ndisabled_test' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 31 tests, see "Raw output" for the full list of tests.', + 'title': '31 tests found', + 'raw_details': + 'check\ncheck_eq\ncheck_eq_print\ncheck_ge\ncheck_ge_print\n' + 'check_gt\ncheck_gt_print\ncheck_le\ncheck_le_print\ncheck_lt\n' + 'check_lt_print\ncheck_ne\ncheck_ne_print\ncheck_print\n' + 'disabled_param_test[0]\ndisabled_param_test[1]\n' + 'disabled_param_test[2]\ndisabled_param_test[3]\ndisabled_test\n' + 'factorial_of_value_from_fixture\n' + 'factorial_of_value_from_fixture[0]\n' + 'factorial_of_value_from_fixture[1]\n' + 'factorial_of_value_from_fixture[2]\n' + 'factorial_of_value_from_fixture[3]\n' + 'positive_arguments_must_produce_expected_result\n' + 'positive_arguments_must_produce_expected_result[0]\n' + 'positive_arguments_must_produce_expected_result[1]\n' + 'positive_arguments_must_produce_expected_result[2]\n' + 'positive_arguments_must_produce_expected_result[3]\n' + 'test_which_fails_check_eq_with_custom_message\n' + 'test_which_throws_unknown_exception' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/tst/disabled.junit-xml b/python/test/files/junit-xml/tst/disabled.junit-xml new file mode 100644 index 0000000..f9bd94d --- /dev/null +++ b/python/test/files/junit-xml/tst/disabled.junit-xml @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/tst/disabled.results b/python/test/files/junit-xml/tst/disabled.results new file mode 100644 index 0000000..f65756b --- /dev/null +++ b/python/test/files/junit-xml/tst/disabled.results @@ -0,0 +1,450 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=2, + suite_tests=31, + suite_skipped=0, + suite_failures=19, + suite_errors=1, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='factorial', + tests=17, + skipped=0, + failures=5, + errors=1, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='failing_checks', + tests=14, + skipped=0, + failures=14, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='positive_arguments_must_produce_expected_result[0]', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='factorial_of_value_from_fixture', + result='failure', + message='/home/ivan/prj/tst/tests/failed/main.cpp:72: error: ' + 'check_eq(3628800, 3628801)', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='factorial_of_value_from_fixture[3]', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='factorial_of_value_from_fixture[2]', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='disabled_test', + result='disabled', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='positive_arguments_must_produce_expected_result', + result='failure', + message='/home/ivan/prj/tst/tests/failed/main.cpp:45: error: check_ne(6, ' + '6)hello world!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='test_which_throws_unknown_exception', + result='error', + message='uncaught (anonymous namespace)::some_unknown_exception', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='positive_arguments_must_produce_expected_result[2]', + result='failure', + message='/home/ivan/prj/tst/tests/failed/main.cpp:85: error: check(false)', + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='positive_arguments_must_produce_expected_result[3]', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='factorial_of_value_from_fixture[0]', + result='failure', + message='/home/ivan/prj/tst/tests/failed/main.cpp:109: error: expected 2', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='disabled_param_test[0]', + result='disabled', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='disabled_param_test[1]', + result='disabled', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='disabled_param_test[2]', + result='disabled', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='test_which_fails_check_eq_with_custom_message', + result='failure', + message='/home/ivan/prj/tst/tests/failed/main.cpp:62: error: check_eq(6, ' + '7)hello world!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='disabled_param_test[3]', + result='disabled', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='positive_arguments_must_produce_expected_result[1]', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='factorial_of_value_from_fixture[1]', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check_ge_print', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:59: error: check_ge(2, ' + '3)failed!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check_ge', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:55: error: check_ge(2, ' + '3)Hello world!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check_gt_print', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:43: error: check_gt(2, ' + '2)failed!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check_lt_print', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:35: error: check_lt(2, ' + '2)failed!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check_print', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:11: error: failed!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check_gt', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:39: error: check_gt(2, ' + '2)Hello world!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:7: error: Hello world!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check_le_print', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:51: error: check_le(2, ' + '1)failed!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check_eq', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:15: error: check_eq(1, ' + '2)Hello world!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check_eq_print', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:19: error: check_eq(1, ' + '2)failed!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check_le', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:47: error: check_le(2, ' + '1)Hello world!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check_ne', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:23: error: check_ne(2, ' + '2)Hello world!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check_lt', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:31: error: check_lt(2, ' + '2)Hello world!', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tst/disabled.xml', + test_file=None, + line=None, + class_name=None, + test_name='check_ne_print', + result='failure', + message='/home/ivan/prj/tst/tests/failed/checks.cpp:27: error: check_ne(2, ' + '2)failed!', + content=None, + stdout=None, + stderr=None, + time=0.001 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/tst/disabled.xml b/python/test/files/junit-xml/tst/disabled.xml new file mode 100644 index 0000000..a724e3e --- /dev/null +++ b/python/test/files/junit-xml/tst/disabled.xml @@ -0,0 +1,78 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/junit-xml/unsupported-unicode.annotations b/python/test/files/junit-xml/unsupported-unicode.annotations new file mode 100644 index 0000000..fa0a557 --- /dev/null +++ b/python/test/files/junit-xml/unsupported-unicode.annotations @@ -0,0 +1,99 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '2 errors, 2 fail, 2 skipped, 1 pass in 8s', + 'summary': + '7 tests\u2002\u2003\u20031 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '8s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20032 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20032 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\u2003\u20032 ' + '[:fire:](https://github.com/step-security/publish-unit-test-result-ac' + 'tion/blob/VERSION/README.md#the-symbols "test errors")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMMQ6AIAxFr' + '0KYXXTReBlDUGIjgikwGe9uQVDc/ntt3skV6MXxkbUN4y6Af2EOKDxYQzgQ0sHHU1/25I' + 'KU+TeLDQ4S3SuUAP0TC6LFbDCY0ouzzj381RJXscR1S9p9B0+QF3Or4NcNSlhwMN0AAAA' + '=\n', + 'annotations': [ + { + 'path': 'test/test-4.py', + 'start_line': 4, + 'end_line': 4, + 'annotation_level': 'warning', + 'message': 'unsupported-unicode.xml\u2003[took 1s]', + 'title': 'test 4 failed', + 'raw_details': + 'Some unsupported unicode characters: ' + '헴䜝헱홐㣇㿷䔭\\U0001237a\\U000214ff\\U00020109㦓\nfailed' + }, + { + 'path': 'test/test-5.py', + 'start_line': 5, + 'end_line': 5, + 'annotation_level': 'warning', + 'message': 'unsupported-unicode.xml\u2003[took 1s]', + 'title': 'test 5 failed', + 'raw_details': + 'message\nSome unsupported unicode characters: ' + '헴䜝헱홐㣇㿷䔭\\U0001237a\\U000214ff\\U00020109㦓' + }, + { + 'path': 'test/test-6.py', + 'start_line': 6, + 'end_line': 6, + 'annotation_level': 'failure', + 'message': 'unsupported-unicode.xml\u2003[took 1s]', + 'title': 'test 6 with error', + 'raw_details': + 'Some unsupported unicode characters: ' + '헴䜝헱홐㣇㿷䔭\\U0001237a\\U000214ff\\U00020109㦓\nerror' + }, + { + 'path': 'test/test-7.py', + 'start_line': 7, + 'end_line': 7, + 'annotation_level': 'failure', + 'message': 'unsupported-unicode.xml\u2003[took 1s]', + 'title': 'test 7 with error', + 'raw_details': + 'message\nSome unsupported unicode characters: ' + '헴䜝헱홐㣇㿷䔭\\U0001237a\\U000214ff\\U00020109㦓' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 2 skipped tests, see "Raw output" for the full list of ' + 'skipped tests.', + 'title': '2 skipped tests found', + 'raw_details': 'test 2\ntest 3' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 7 tests, see "Raw output" for the full list of tests.', + 'title': '7 tests found', + 'raw_details': + 'test 1 헴䜝헱홐㣇㿷䔭\\U0001237a\\U000214ff\\U00020109㦓\ntest 2\ntest 3\n' + 'test 4\ntest 5\ntest 6\ntest 7' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/unsupported-unicode.junit-xml b/python/test/files/junit-xml/unsupported-unicode.junit-xml new file mode 100644 index 0000000..d05c2df --- /dev/null +++ b/python/test/files/junit-xml/unsupported-unicode.junit-xml @@ -0,0 +1,30 @@ + + + + + + skipped + + + + Some unsupported unicode characters: 헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓 + + + + failed + + + + Some unsupported unicode characters: 헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓 + + + + error + + + + Some unsupported unicode characters: 헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓 + + + + diff --git a/python/test/files/junit-xml/unsupported-unicode.results b/python/test/files/junit-xml/unsupported-unicode.results new file mode 100644 index 0000000..98e49b6 --- /dev/null +++ b/python/test/files/junit-xml/unsupported-unicode.results @@ -0,0 +1,114 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=7, + suite_skipped=2, + suite_failures=2, + suite_errors=2, + suite_time=8, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='pytest', + tests=7, + skipped=2, + failures=2, + errors=2, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='unsupported-unicode.xml', + test_file='test/test-1.py', + line=1, + class_name=None, + test_name='test 1 헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.23 + ), + publish.unittestresults.UnitTestCase( + result_file='unsupported-unicode.xml', + test_file='test/test-2.py', + line=2, + class_name=None, + test_name='test 2', + result='skipped', + message='Some unsupported unicode characters: 헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓', + content='skipped\n ', + stdout=None, + stderr=None, + time=1.23 + ), + publish.unittestresults.UnitTestCase( + result_file='unsupported-unicode.xml', + test_file='test/test-3.py', + line=3, + class_name=None, + test_name='test 3', + result='skipped', + message='message', + content='Some unsupported unicode characters: 헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓\n ', + stdout=None, + stderr=None, + time=1.23 + ), + publish.unittestresults.UnitTestCase( + result_file='unsupported-unicode.xml', + test_file='test/test-4.py', + line=4, + class_name=None, + test_name='test 4', + result='failure', + message='Some unsupported unicode characters: 헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓', + content='failed\n ', + stdout=None, + stderr=None, + time=1.23 + ), + publish.unittestresults.UnitTestCase( + result_file='unsupported-unicode.xml', + test_file='test/test-5.py', + line=5, + class_name=None, + test_name='test 5', + result='failure', + message='message', + content='Some unsupported unicode characters: 헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓\n ', + stdout=None, + stderr=None, + time=1.23 + ), + publish.unittestresults.UnitTestCase( + result_file='unsupported-unicode.xml', + test_file='test/test-6.py', + line=6, + class_name=None, + test_name='test 6', + result='error', + message='Some unsupported unicode characters: 헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓', + content='error\n ', + stdout=None, + stderr=None, + time=1.23 + ), + publish.unittestresults.UnitTestCase( + result_file='unsupported-unicode.xml', + test_file='test/test-7.py', + line=7, + class_name=None, + test_name='test 7', + result='error', + message='message', + content='Some unsupported unicode characters: 헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓\n ', + stdout=None, + stderr=None, + time=1.23 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/unsupported-unicode.xml b/python/test/files/junit-xml/unsupported-unicode.xml new file mode 100644 index 0000000..cec4e5a --- /dev/null +++ b/python/test/files/junit-xml/unsupported-unicode.xml @@ -0,0 +1,30 @@ + + + + + + skipped + + + + Some unsupported unicode characters: 헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓 + + + + failed + + + + Some unsupported unicode characters: 헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓 + + + + error + + + + Some unsupported unicode characters: 헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓 + + + + diff --git a/python/test/files/junit-xml/with-xml-entities.annotations b/python/test/files/junit-xml/with-xml-entities.annotations new file mode 100644 index 0000000..b8a6577 --- /dev/null +++ b/python/test/files/junit-xml/with-xml-entities.annotations @@ -0,0 +1,76 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 errors, 1 fail, 2 skipped in 0s', + 'summary': + '4 tests\u2002\u2003\u20030 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20032 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\u2003\u20031 ' + '[:fire:](https://github.com/step-security/publish-unit-test-result-ac' + 'tion/blob/VERSION/README.md#the-symbols "test errors")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MQQ5AMBBFr' + '9J0bYFYuYw0RUyUyky7Enc3qmV2/72fvFPP4CbSvWoqpSlC+GCMaAL4nbFm5CM8V1f2QN' + 'FaeQ60wsGi/cRswOXaKyZEj9lg3EvvmTL38l9LLGKJZcv6bYPAkJeixejrBpBXIV3dAAA' + 'A\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'with-xml-entities.xml\u2003[took 0s]', + 'title': "Test with 'apostrophe' in the test name failed", + 'raw_details': "A message with 'apostrophes'\nContent with 'apostrophes'" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'failure', + 'message': 'with-xml-entities.xml\u2003[took 0s]', + 'title': 'Test with & in the test name with error', + 'raw_details': 'A message with &\nContent with &' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 2 skipped tests, see "Raw output" for the full list of ' + 'skipped tests.', + 'title': '2 skipped tests found', + 'raw_details': + 'Test with "quotes" in the test name\nTest with < and > in the test ' + 'name' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 4 tests, see "Raw output" for the full list of tests.', + 'title': '4 tests found', + 'raw_details': + 'Test with "quotes" in the test name\nTest with & in the test name\n' + 'Test with \'apostrophe\' in the test name\nTest with < and > in ' + 'the test name' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/with-xml-entities.junit-xml b/python/test/files/junit-xml/with-xml-entities.junit-xml new file mode 100644 index 0000000..0a38ad9 --- /dev/null +++ b/python/test/files/junit-xml/with-xml-entities.junit-xml @@ -0,0 +1,17 @@ + + + + + Content with "quotes" + + + Content with 'apostrophes' + + + Content with & + + + Content with < and > + + + diff --git a/python/test/files/junit-xml/with-xml-entities.results b/python/test/files/junit-xml/with-xml-entities.results new file mode 100644 index 0000000..1c8006d --- /dev/null +++ b/python/test/files/junit-xml/with-xml-entities.results @@ -0,0 +1,75 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=4, + suite_skipped=2, + suite_failures=1, + suite_errors=1, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name=None, + tests=4, + skipped=2, + failures=1, + errors=1, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='with-xml-entities.xml', + test_file=None, + line=None, + class_name=None, + test_name='Test with "quotes" in the test name', + result='skipped', + message='A message with "quotes"', + content='Content with "quotes"', + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='with-xml-entities.xml', + test_file=None, + line=None, + class_name=None, + test_name="Test with 'apostrophe' in the test name", + result='failure', + message="A message with 'apostrophes'", + content="Content with 'apostrophes'", + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='with-xml-entities.xml', + test_file=None, + line=None, + class_name=None, + test_name='Test with & in the test name', + result='error', + message='A message with &', + content='Content with &', + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='with-xml-entities.xml', + test_file=None, + line=None, + class_name=None, + test_name='Test with < and > in the test name', + result='skipped', + message='A message with < and >', + content='Content with < and >', + stdout=None, + stderr=None, + time=0.0 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/with-xml-entities.xml b/python/test/files/junit-xml/with-xml-entities.xml new file mode 100644 index 0000000..63f8c6d --- /dev/null +++ b/python/test/files/junit-xml/with-xml-entities.xml @@ -0,0 +1,17 @@ + + + + + Content with "quotes" + + + Content with 'apostrophes' + + + Content with & + + + Content with < and > + + + \ No newline at end of file diff --git a/python/test/files/junit-xml/xunit/xunit.annotations b/python/test/files/junit-xml/xunit/xunit.annotations new file mode 100644 index 0000000..82a3ca7 --- /dev/null +++ b/python/test/files/junit-xml/xunit/xunit.annotations @@ -0,0 +1,41 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 2 tests pass in 0s', + 'summary': + '2 tests\u2002\u2003\u20032 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBBEr' + '0KoLdTSyxCCEDfyMQtUxrsLCgrdvJnJO6kCLT1dyDQQ6iOED9aIPICzCceEaQh5mmtmPg' + 'rRFzsc7ZspDrorJKLD0mC01Zdjq3v5tz3cyB5uXcIZAyFBScRvnF43yWbLod0AAAA=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 2 tests, see "Raw output" for the full list of tests.', + 'title': '2 tests found', + 'raw_details': + 'mytestapp.Tests.AttriubteTests.GetTestNoFeature\n' + 'mytestapp.Tests.AttriubteTests.SetTestNoFeature' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/junit-xml/xunit/xunit.junit-xml b/python/test/files/junit-xml/xunit/xunit.junit-xml new file mode 100644 index 0000000..4cbcee2 --- /dev/null +++ b/python/test/files/junit-xml/xunit/xunit.junit-xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/python/test/files/junit-xml/xunit/xunit.results b/python/test/files/junit-xml/xunit/xunit.results new file mode 100644 index 0000000..9088d0c --- /dev/null +++ b/python/test/files/junit-xml/xunit/xunit.results @@ -0,0 +1,49 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=2, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='Rhino Collection', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='xunit/xunit.xml', + test_file=None, + line=None, + class_name=None, + test_name='mytestapp.Tests.AttriubteTests.SetTestNoFeature', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.4540354 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/xunit.xml', + test_file=None, + line=None, + class_name=None, + test_name='mytestapp.Tests.AttriubteTests.GetTestNoFeature', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0039778 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/junit-xml/xunit/xunit.xml b/python/test/files/junit-xml/xunit/xunit.xml new file mode 100644 index 0000000..cf8c6a0 --- /dev/null +++ b/python/test/files/junit-xml/xunit/xunit.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + diff --git a/python/test/files/mocha/tests.annotations b/python/test/files/mocha/tests.annotations new file mode 100644 index 0000000..4f47301 --- /dev/null +++ b/python/test/files/mocha/tests.annotations @@ -0,0 +1,114 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 errors, 1 fail, 1 skipped, 2 pass in 12s', + 'summary': + '5 tests\u2002\u2003\u20032 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '12s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20031 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\u2003\u20031 ' + '[:fire:](https://github.com/step-security/publish-unit-test-result-ac' + 'tion/blob/VERSION/README.md#the-symbols "test errors")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0KobSSx8TKGoMaNfMwClfHu8hXs3sxm56Y7yM3SmYwDodaD+2D1yB0YHZEFDhcXb1Pdi/' + 'VCBMGaOOEq31nsHORPbIgGi0Gvay/OPpe51RJ3scR9SxilwAUoi9iD0+cFI3viF94AAAA' + '=\n', + 'annotations': [ + { + 'path': '/home/runner/work/mocha/mocha/test/unit/runner.spec.js', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'tests.json', + 'title': + 'Runner instance method grep() should update the runner.total with ' + 'number of matched tests failed', + 'raw_details': + 'Required\nError: Required at Context. ' + '(test/unit/runner.spec.js:43:15) at callFn ' + '(lib/runnable.js:366:21) at Test.Runnable.run ' + '(lib/runnable.js:354:5) at Runner.runTest ' + '(lib/runner.js:666:10) at ' + '/home/runner/work/mocha/mocha/lib/runner.js:789:12 at next ' + '(lib/runner.js:581:14) at ' + '/home/runner/work/mocha/mocha/lib/runner.js:591:7 at next ' + '(lib/runner.js:474:14) at Immediate._onImmediate ' + '(lib/runner.js:559:5) at processImmediate ' + '(internal/timers.js:464:21)' + }, + { + 'path': '/home/runner/work/mocha/mocha/test/unit/test.spec.js', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'failure', + 'message': 'tests.json\u2003[took 4s]', + 'title': 'Test .clone() should copy the title with error', + 'raw_details': + "[31m[1mexpected[22m[39m [36m'To be cloned'[39m [31m[1mto " + "be[22m[39m [36m'Not to be cloned'[39m[41m[30mTo[39m[49m[31m be " + "cloned[39m[42m[30mNot[39m[49m[32m [39m[42m[30mto [39m[49m[32mbe " + "cloned[39m\nUnexpectedError\n[31m[1mexpected[22m[39m [36m'To be " + "cloned'[39m [31m[1mto be[22m[39m [36m'Not to be " + "cloned'[39m[41m[30mTo[39m[49m[31m be " + "cloned[39m[42m[30mNot[39m[49m[32m [39m[42m[30mto [39m[49m[32mbe " + "cloned[39m\nUnexpectedError: [31m[1mexpected[22m[39m [36m'To be " + "cloned'[39m [31m[1mto be[22m[39m [36m'Not to be " + "cloned'[39m[41m[30mTo[39m[49m[31m be " + "cloned[39m[42m[30mNot[39m[49m[32m [39m[42m[30mto [39m[49m[32mbe " + "cloned[39m at Context. (test/unit/test.spec.js:26:7) " + " at callFn (lib/runnable.js:366:21) at Test.Runnable.run " + "(lib/runnable.js:354:5) at Runner.runTest " + "(lib/runner.js:666:10) at " + "/home/runner/work/mocha/mocha/lib/runner.js:789:12 at next " + "(lib/runner.js:581:14) at " + "/home/runner/work/mocha/mocha/lib/runner.js:591:7 at next " + "(lib/runner.js:474:14) at cbHookRun (lib/runner.js:539:7) at " + "done (lib/runnable.js:310:5) at callFn (lib/runnable.js:389:7) " + " at Hook.Runnable.run (lib/runnable.js:354:5) at next " + "(lib/runner.js:498:10) at Immediate._onImmediate " + "(lib/runner.js:559:5) at processImmediate " + "(internal/timers.js:464:21) set UNEXPECTED_FULL_TRACE=true to " + "see the full stack trace" + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'Mocha instance method run() should initialize the stats collector' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 5 tests, see "Raw output" for the full list of tests.', + 'title': '5 tests found', + 'raw_details': + 'Context Siblings sequestered sibling should work\nContext nested ' + 'should work\nMocha instance method run() should initialize the ' + 'stats collector\nRunner instance method grep() should update the ' + 'runner.total with number of matched tests\nTest .clone() should ' + 'copy the title' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/mocha/tests.json b/python/test/files/mocha/tests.json new file mode 100644 index 0000000..3435f7e --- /dev/null +++ b/python/test/files/mocha/tests.json @@ -0,0 +1,140 @@ +{ + "stats": { + "suites": 3, + "tests": 5, + "passes": 2, + "pending": 1, + "failures": 2, + "start": "2023-01-14T21:18:12.420Z", + "end": "2023-01-14T21:18:16.042Z", + "duration": 12 + }, + "tests": [ + { + "title": "should work", + "fullTitle": "Context nested should work", + "file": "/home/runner/work/mocha/mocha/test/unit/context.spec.js", + "duration": 3, + "currentRetry": 0, + "speed": "fast", + "err": {} + }, + { + "title": "should work", + "fullTitle": "Context Siblings sequestered sibling should work", + "file": "/home/runner/work/mocha/mocha/test/unit/context.spec.js", + "duration": 1, + "currentRetry": 0, + "speed": "fast", + "err": {} + }, + { + "title": "should initialize the stats collector", + "fullTitle": "Mocha instance method run() should initialize the stats collector", + "file": "/home/runner/work/mocha/mocha/test/unit/mocha.spec.js", + "currentRetry": 0, + "err": {} + }, + { + "title": "should update the runner.total with number of matched tests", + "fullTitle": "Runner instance method grep() should update the runner.total with number of matched tests", + "file": "/home/runner/work/mocha/mocha/test/unit/runner.spec.js", + "duration": 0, + "currentRetry": 0, + "err": { + "stack": "Error: Required\n at Context. (test/unit/runner.spec.js:43:15)\n at callFn (lib/runnable.js:366:21)\n at Test.Runnable.run (lib/runnable.js:354:5)\n at Runner.runTest (lib/runner.js:666:10)\n at /home/runner/work/mocha/mocha/lib/runner.js:789:12\n at next (lib/runner.js:581:14)\n at /home/runner/work/mocha/mocha/lib/runner.js:591:7\n at next (lib/runner.js:474:14)\n at Immediate._onImmediate (lib/runner.js:559:5)\n at processImmediate (internal/timers.js:464:21)", + "message": "Required" + } + }, + { + "title": "should copy the title", + "fullTitle": "Test .clone() should copy the title", + "file": "/home/runner/work/mocha/mocha/test/unit/test.spec.js", + "duration": 4, + "currentRetry": 0, + "err": { + "errorMode": "default", + "stack": "UnexpectedError: \n\u001b[31m\u001b[1mexpected\u001b[22m\u001b[39m \u001b[36m'To be cloned'\u001b[39m \u001b[31m\u001b[1mto be\u001b[22m\u001b[39m \u001b[36m'Not to be cloned'\u001b[39m\n\n\u001b[41m\u001b[30mTo\u001b[39m\u001b[49m\u001b[31m be cloned\u001b[39m\n\u001b[42m\u001b[30mNot\u001b[39m\u001b[49m\u001b[32m \u001b[39m\u001b[42m\u001b[30mto \u001b[39m\u001b[49m\u001b[32mbe cloned\u001b[39m\n\n at Context. (test/unit/test.spec.js:26:7)\n at callFn (lib/runnable.js:366:21)\n at Test.Runnable.run (lib/runnable.js:354:5)\n at Runner.runTest (lib/runner.js:666:10)\n at /home/runner/work/mocha/mocha/lib/runner.js:789:12\n at next (lib/runner.js:581:14)\n at /home/runner/work/mocha/mocha/lib/runner.js:591:7\n at next (lib/runner.js:474:14)\n at cbHookRun (lib/runner.js:539:7)\n at done (lib/runnable.js:310:5)\n at callFn (lib/runnable.js:389:7)\n at Hook.Runnable.run (lib/runnable.js:354:5)\n at next (lib/runner.js:498:10)\n at Immediate._onImmediate (lib/runner.js:559:5)\n at processImmediate (internal/timers.js:464:21)\n set UNEXPECTED_FULL_TRACE=true to see the full stack trace", + "parent": { + "errorMode": "default", + "parent": { + "errorMode": "default", + "parent": null, + "name": "UnexpectedError", + "label": "should equal" + }, + "name": "UnexpectedError" + }, + "name": "UnexpectedError", + "message": "\n\u001b[31m\u001b[1mexpected\u001b[22m\u001b[39m \u001b[36m'To be cloned'\u001b[39m \u001b[31m\u001b[1mto be\u001b[22m\u001b[39m \u001b[36m'Not to be cloned'\u001b[39m\n\n\u001b[41m\u001b[30mTo\u001b[39m\u001b[49m\u001b[31m be cloned\u001b[39m\n\u001b[42m\u001b[30mNot\u001b[39m\u001b[49m\u001b[32m \u001b[39m\u001b[42m\u001b[30mto \u001b[39m\u001b[49m\u001b[32mbe cloned\u001b[39m\n", + "_hasSerializedErrorMessage": true + } + } + ], + "pending": [ + { + "title": "should initialize the stats collector", + "fullTitle": "Mocha instance method run() should initialize the stats collector", + "file": "/home/runner/work/mocha/mocha/test/unit/mocha.spec.js", + "currentRetry": 0, + "err": {} + } + ], + "failures": [ + { + "title": "should update the runner.total with number of matched tests", + "fullTitle": "Runner instance method grep() should update the runner.total with number of matched tests", + "file": "/home/runner/work/mocha/mocha/test/unit/runner.spec.js", + "duration": 0, + "currentRetry": 0, + "err": { + "stack": "Error: Required\n at Context. (test/unit/runner.spec.js:43:15)\n at callFn (lib/runnable.js:366:21)\n at Test.Runnable.run (lib/runnable.js:354:5)\n at Runner.runTest (lib/runner.js:666:10)\n at /home/runner/work/mocha/mocha/lib/runner.js:789:12\n at next (lib/runner.js:581:14)\n at /home/runner/work/mocha/mocha/lib/runner.js:591:7\n at next (lib/runner.js:474:14)\n at Immediate._onImmediate (lib/runner.js:559:5)\n at processImmediate (internal/timers.js:464:21)", + "message": "Required" + } + }, + { + "title": "should copy the title", + "fullTitle": "Test .clone() should copy the title", + "file": "/home/runner/work/mocha/mocha/test/unit/test.spec.js", + "duration": 4, + "currentRetry": 0, + "err": { + "errorMode": "default", + "stack": "UnexpectedError: \n\u001b[31m\u001b[1mexpected\u001b[22m\u001b[39m \u001b[36m'To be cloned'\u001b[39m \u001b[31m\u001b[1mto be\u001b[22m\u001b[39m \u001b[36m'Not to be cloned'\u001b[39m\n\n\u001b[41m\u001b[30mTo\u001b[39m\u001b[49m\u001b[31m be cloned\u001b[39m\n\u001b[42m\u001b[30mNot\u001b[39m\u001b[49m\u001b[32m \u001b[39m\u001b[42m\u001b[30mto \u001b[39m\u001b[49m\u001b[32mbe cloned\u001b[39m\n\n at Context. (test/unit/test.spec.js:26:7)\n at callFn (lib/runnable.js:366:21)\n at Test.Runnable.run (lib/runnable.js:354:5)\n at Runner.runTest (lib/runner.js:666:10)\n at /home/runner/work/mocha/mocha/lib/runner.js:789:12\n at next (lib/runner.js:581:14)\n at /home/runner/work/mocha/mocha/lib/runner.js:591:7\n at next (lib/runner.js:474:14)\n at cbHookRun (lib/runner.js:539:7)\n at done (lib/runnable.js:310:5)\n at callFn (lib/runnable.js:389:7)\n at Hook.Runnable.run (lib/runnable.js:354:5)\n at next (lib/runner.js:498:10)\n at Immediate._onImmediate (lib/runner.js:559:5)\n at processImmediate (internal/timers.js:464:21)\n set UNEXPECTED_FULL_TRACE=true to see the full stack trace", + "parent": { + "errorMode": "default", + "parent": { + "errorMode": "default", + "parent": null, + "name": "UnexpectedError", + "label": "should equal" + }, + "name": "UnexpectedError" + }, + "name": "UnexpectedError", + "message": "\n\u001b[31m\u001b[1mexpected\u001b[22m\u001b[39m \u001b[36m'To be cloned'\u001b[39m \u001b[31m\u001b[1mto be\u001b[22m\u001b[39m \u001b[36m'Not to be cloned'\u001b[39m\n\n\u001b[41m\u001b[30mTo\u001b[39m\u001b[49m\u001b[31m be cloned\u001b[39m\n\u001b[42m\u001b[30mNot\u001b[39m\u001b[49m\u001b[32m \u001b[39m\u001b[42m\u001b[30mto \u001b[39m\u001b[49m\u001b[32mbe cloned\u001b[39m\n", + "_hasSerializedErrorMessage": true + } + } + ], + "passes": [ + { + "title": "should work", + "fullTitle": "Context nested should work", + "file": "/home/runner/work/mocha/mocha/test/unit/context.spec.js", + "duration": 3, + "currentRetry": 0, + "speed": "fast", + "err": {} + }, + { + "title": "should work", + "fullTitle": "Context Siblings sequestered sibling should work", + "file": "/home/runner/work/mocha/mocha/test/unit/context.spec.js", + "duration": 1, + "currentRetry": 0, + "speed": "fast", + "err": {} + } + ] +} diff --git a/python/test/files/mocha/tests.junit-xml b/python/test/files/mocha/tests.junit-xml new file mode 100644 index 0000000..8654609 --- /dev/null +++ b/python/test/files/mocha/tests.junit-xml @@ -0,0 +1,17 @@ + + + + + + + + + (test/unit/runner.spec.js:43:15) at callFn (lib/runnable.js:366:21) at Test.Runnable.run (lib/runnable.js:354:5) at Runner.runTest (lib/runner.js:666:10) at /home/runner/work/mocha/mocha/lib/runner.js:789:12 at next (lib/runner.js:581:14) at /home/runner/work/mocha/mocha/lib/runner.js:591:7 at next (lib/runner.js:474:14) at Immediate._onImmediate (lib/runner.js:559:5) at processImmediate (internal/timers.js:464:21)]]> + + + (test/unit/test.spec.js:26:7) at callFn (lib/runnable.js:366:21) at Test.Runnable.run (lib/runnable.js:354:5) at Runner.runTest (lib/runner.js:666:10) at /home/runner/work/mocha/mocha/lib/runner.js:789:12 at next (lib/runner.js:581:14) at /home/runner/work/mocha/mocha/lib/runner.js:591:7 at next (lib/runner.js:474:14) at cbHookRun (lib/runner.js:539:7) at done (lib/runnable.js:310:5) at callFn (lib/runnable.js:389:7) at Hook.Runnable.run (lib/runnable.js:354:5) at next (lib/runner.js:498:10) at Immediate._onImmediate (lib/runner.js:559:5) at processImmediate (internal/timers.js:464:21) set UNEXPECTED_FULL_TRACE=true to see the full stack trace]]> + + diff --git a/python/test/files/mocha/tests.results b/python/test/files/mocha/tests.results new file mode 100644 index 0000000..f60c1e3 --- /dev/null +++ b/python/test/files/mocha/tests.results @@ -0,0 +1,121 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=5, + suite_skipped=1, + suite_failures=1, + suite_errors=1, + suite_time=12, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name=None, + tests=5, + skipped=1, + failures=1, + errors=1, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='tests.json', + test_file='/home/runner/work/mocha/mocha/test/unit/context.spec.js', + line=None, + class_name=None, + test_name='Context nested should work', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tests.json', + test_file='/home/runner/work/mocha/mocha/test/unit/context.spec.js', + line=None, + class_name=None, + test_name='Context Siblings sequestered sibling should work', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.0 + ), + publish.unittestresults.UnitTestCase( + result_file='tests.json', + test_file='/home/runner/work/mocha/mocha/test/unit/mocha.spec.js', + line=None, + class_name=None, + test_name='Mocha instance method run() should initialize the stats collector', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='tests.json', + test_file='/home/runner/work/mocha/mocha/test/unit/runner.spec.js', + line=None, + class_name=None, + test_name='Runner instance method grep() should update the runner.total with ' + 'number of matched tests', + result='failure', + message='Required', + content='Required\nError: Required at Context. ' + '(test/unit/runner.spec.js:43:15) at callFn ' + '(lib/runnable.js:366:21) at Test.Runnable.run ' + '(lib/runnable.js:354:5) at Runner.runTest (lib/runner.js:666:10) ' + ' at /home/runner/work/mocha/mocha/lib/runner.js:789:12 at next ' + '(lib/runner.js:581:14) at ' + '/home/runner/work/mocha/mocha/lib/runner.js:591:7 at next ' + '(lib/runner.js:474:14) at Immediate._onImmediate ' + '(lib/runner.js:559:5) at processImmediate ' + '(internal/timers.js:464:21)', + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='tests.json', + test_file='/home/runner/work/mocha/mocha/test/unit/test.spec.js', + line=None, + class_name=None, + test_name='Test .clone() should copy the title', + result='error', + message="[31m[1mexpected[22m[39m [36m'To be cloned'[39m [31m[1mto be[22m[39m " + "[36m'Not to be cloned'[39m[41m[30mTo[39m[49m[31m be " + "cloned[39m[42m[30mNot[39m[49m[32m [39m[42m[30mto [39m[49m[32mbe " + "cloned[39m", + content="UnexpectedError\n[31m[1mexpected[22m[39m [36m'To be cloned'[39m " + "[31m[1mto be[22m[39m [36m'Not to be " + "cloned'[39m[41m[30mTo[39m[49m[31m be " + "cloned[39m[42m[30mNot[39m[49m[32m [39m[42m[30mto [39m[49m[32mbe " + "cloned[39m\nUnexpectedError: [31m[1mexpected[22m[39m [36m'To be " + "cloned'[39m [31m[1mto be[22m[39m [36m'Not to be " + "cloned'[39m[41m[30mTo[39m[49m[31m be " + "cloned[39m[42m[30mNot[39m[49m[32m [39m[42m[30mto [39m[49m[32mbe " + "cloned[39m at Context. (test/unit/test.spec.js:26:7) " + " at callFn (lib/runnable.js:366:21) at Test.Runnable.run " + "(lib/runnable.js:354:5) at Runner.runTest (lib/runner.js:666:10) " + " at /home/runner/work/mocha/mocha/lib/runner.js:789:12 at next " + "(lib/runner.js:581:14) at " + "/home/runner/work/mocha/mocha/lib/runner.js:591:7 at next " + "(lib/runner.js:474:14) at cbHookRun (lib/runner.js:539:7) at " + "done (lib/runnable.js:310:5) at callFn (lib/runnable.js:389:7) " + "at Hook.Runnable.run (lib/runnable.js:354:5) at next " + "(lib/runner.js:498:10) at Immediate._onImmediate " + "(lib/runner.js:559:5) at processImmediate " + "(internal/timers.js:464:21) set UNEXPECTED_FULL_TRACE=true to see " + "the full stack trace", + stdout=None, + stderr=None, + time=4.0 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/mstest/clicketyclackety.annotations b/python/test/files/nunit/mstest/clicketyclackety.annotations new file mode 100644 index 0000000..17c31d8 --- /dev/null +++ b/python/test/files/nunit/mstest/clicketyclackety.annotations @@ -0,0 +1,189 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '10 fail, 12 pass in 0s', + 'summary': + '\u205f\u20041 files\u2004\u2003\u205f\u20048 suites\u2004\u2003\u2002' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '22 tests\u200312 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '10 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n23 runs\u2006\u2003' + '13 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '10 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MSw6AIAxEr' + '0JYu/CzMV7GENTYCGJKWRnvbiEo7Ppmpu+WG5jVy0l0jZA+ACUYGZaAisCdjC0jFxSrvv' + '9g9kHr+FklB1z1ft4UmDgpyYroMG8wnEk55Ps3lqAIE9e+FNQ67awFYsiX8LuSzwvzas/' + 'j4wAAAA==\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/clicketyclackety.xml\u2003[took 0s]', + 'title': 'BakeDrawings failed', + 'raw_details': + 'System.InvalidOperationException : Assert.Equals should not be ' + 'used. Use Assert.AreEqual instead.\n at ' + 'NUnit.Framework.Assert.Equals(Object a, Object b)\n at ' + 'MyProject.Tests.Real.UserInput.BakeDrawingCommandTests.BakeDrawings' + '()' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/clicketyclackety.xml\u2003[took 0s]', + 'title': 'SilentRun failed', + 'raw_details': + 'System.NullReferenceException : Object reference not set to an ' + 'instance of an object.\n at ' + 'MyProject.Tests.Real.UserInput.ProjectInitCommandTests.SilentRun()' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/clicketyclackety.xml\u2003[took 0s]', + 'title': 'DiscardDrawingTests failed', + 'raw_details': + "System.IO.DirectoryNotFoundException : Could not find a part of " + "the path " + "'C:\\Users\\USER\\actions-runner\\_work\\MyProject\\MyProject\\SC\\f4a8fa46" + "-245d-4cd5-88c1-80fcfbda6369'.\n at " + "System.IO.__Error.WinIOError(Int32 errorCode, String " + "maybeFullPath)\n at " + "System.IO.FileSystemEnumerableIterator`1.CommonInit()\n at " + "System.IO.FileSystemEnumerableIterator`1..ctor(String path, String " + "originalUserPath, String searchPattern, SearchOption searchOption, " + "SearchResultHandler`1 resultHandler, Boolean checkHost)\n at " + "System.IO.Directory.GetFiles(String path)\n at " + "MyProject.Tests.Real.FlagTests.DiscardDrawingTests()" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/clicketyclackety.xml\u2003[took 0s]', + 'title': 'LoadDrawingsEventFlagTests failed', + 'raw_details': + ' Expected: 3\n But was: 0\n at ' + 'MyProject.Tests.Real.FlagTests.LoadDrawingsEventFlagTests()' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/clicketyclackety.xml\u2003[took 0s]', + 'title': 'ResetProjectEventFlagTests failed', + 'raw_details': + 'System.NullReferenceException : Object reference not set to an ' + 'instance of an object.\n at ' + 'MyProject.Tests.Real.FlagTests.ResetProjectEventFlagTests()' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/clicketyclackety.xml\u2003[took 0s]', + 'title': 'SetupLayersEventFlagTests failed', + 'raw_details': + "om.Exceptions.DocumentException : Document should be initlised, " + "but isn't!\n at MyProject.Runtime.Events.SetupLayers.Execute()\n " + " at MyProject.Tests.Real.FlagTests.SetupLayersEventFlagTests()" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/clicketyclackety.xml\u2003[took 0s]', + 'title': 'SetupPipeEventFlagTests failed', + 'raw_details': + 'System.NullReferenceException : Object reference not set to an ' + 'instance of an object.\n at ' + 'MyProject.Tests.Real.FlagTests.SetupPipeEventFlagTests()' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/clicketyclackety.xml\u2003[took 0s]', + 'title': 'DrawingConstants failed', + 'raw_details': + 'System.NullReferenceException : Object reference not set to an ' + 'instance of an object.\n at ' + 'MyProject.Tests.Real.RuntimeTests.DrawingConstants()' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/clicketyclackety.xml\u2003[took 0s]', + 'title': 'FileConstants failed', + 'raw_details': + 'System.NullReferenceException : Object reference not set to an ' + 'instance of an object.\n at ' + 'MyProject.Tests.Real.RuntimeTests.FileConstants()' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/clicketyclackety.xml\u2003[took 0s]', + 'title': 'PluginConstants failed', + 'raw_details': + "System.MissingMethodException : Method not found: 'System.Object " + "MyProject.MyProjectPlugIn.get_Instance()'.\n at " + "MyProject.Tests.Real.RuntimeTests.PluginConstants()" + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 22 tests, see "Raw output" for the full list of tests.', + 'title': '22 tests found', + 'raw_details': + 'BakeDrawings\nDeleteMyProjectObjectEventFlagTests\n' + 'DiscardDrawingTests\nDisplayGraphicConstants\nDrawingConstants\n' + 'EventRegisterTests\nFileConstants\nLoadDrawingsEventFlagTests\n' + 'LoadedDrawings\nModifyNewObjectUniqueIdEventFlagTests\n' + 'MoveControlPointEventFlagTests\nObjectConstants\nPluginConstants\n' + 'ResetProjectEventFlagTests\nSetupLayersEventFlagTests\n' + 'SetupPipeEventFlagTests\nSilentRun\nTest\nUIPanelConstants\n' + 'UIPropertyConstants\nUpdateDrawingsPanelEventFlagTests\n' + 'UpdatePropertiesPanelEventFlagTests' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/mstest/clicketyclackety.junit-xml b/python/test/files/nunit/mstest/clicketyclackety.junit-xml new file mode 100644 index 0000000..e909256 --- /dev/null +++ b/python/test/files/nunit/mstest/clicketyclackety.junit-xml @@ -0,0 +1,96 @@ + + + + + + + + + + + + + + + + + + + + at NUnit.Framework.Assert.Equals(Object a, Object b) + at MyProject.Tests.Real.UserInput.BakeDrawingCommandTests.BakeDrawings() + + + + + + at MyProject.Tests.Real.UserInput.ProjectInitCommandTests.SilentRun() + + + + + + + + at System.IO.__Error.WinIOError(Int32 errorCode, String maybeFullPath) + at System.IO.FileSystemEnumerableIterator`1.CommonInit() + at System.IO.FileSystemEnumerableIterator`1..ctor(String path, String originalUserPath, String searchPattern, SearchOption searchOption, SearchResultHandler`1 resultHandler, Boolean checkHost) + at System.IO.Directory.GetFiles(String path) + at MyProject.Tests.Real.FlagTests.DiscardDrawingTests() + + + at MyProject.Tests.Real.FlagTests.LoadDrawingsEventFlagTests() + + + + + + + at MyProject.Tests.Real.FlagTests.ResetProjectEventFlagTests() + + + at MyProject.Runtime.Events.SetupLayers.Execute() + at MyProject.Tests.Real.FlagTests.SetupLayersEventFlagTests() + + + at MyProject.Tests.Real.FlagTests.SetupPipeEventFlagTests() + + + + + + + + + + + + + + + at MyProject.Tests.Real.RuntimeTests.DrawingConstants() + + + at MyProject.Tests.Real.RuntimeTests.FileConstants() + + + + at MyProject.Tests.Real.RuntimeTests.PluginConstants() + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/mstest/clicketyclackety.results b/python/test/files/nunit/mstest/clicketyclackety.results new file mode 100644 index 0000000..ebe15d2 --- /dev/null +++ b/python/test/files/nunit/mstest/clicketyclackety.results @@ -0,0 +1,406 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=8, + suite_tests=23, + suite_skipped=0, + suite_failures=10, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='MyProject.Tests.Real.UserInput.BakeDrawingCommandTests', + tests=1, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='MyProject.Tests.Real.UserInput.ProjectInitCommandTests', + tests=1, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='MyProject.Tests.Real.FlagTests', + tests=10, + skipped=0, + failures=5, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='MyProject.Tests.Real.NewDocumentTests', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='MyProject.Tests.Real.PipeTests', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='MyProject.Tests.Real.RuntimeTests', + tests=7, + skipped=0, + failures=3, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='MyProject.Tests.Real.SwitchingTests', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='MyProject.Tests.Real.Tests.RuntimeConstants', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='BakeDrawings', + result='failure', + message='System.InvalidOperationException : Assert.Equals should not be used. ' + 'Use Assert.AreEqual instead.', + content=' at NUnit.Framework.Assert.Equals(Object a, Object b)\n at ' + 'MyProject.Tests.Real.UserInput.BakeDrawingCommandTests.BakeDrawings()', + stdout=None, + stderr=None, + time=0.135485 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='SilentRun', + result='failure', + message='System.NullReferenceException : Object reference not set to an ' + 'instance of an object.', + content=' at ' + 'MyProject.Tests.Real.UserInput.ProjectInitCommandTests.SilentRun()', + stdout=None, + stderr=None, + time=0.052338 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='DeleteMyProjectObjectEventFlagTests', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000201 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='DiscardDrawingTests', + result='failure', + message="System.IO.DirectoryNotFoundException : Could not find a part of the " + "path " + "'C:\\Users\\USER\\actions-runner\\_work\\MyProject\\MyProject\\SC\\f4a8fa46-2" + "45d-4cd5-88c1-80fcfbda6369'.", + content=' at System.IO.__Error.WinIOError(Int32 errorCode, String ' + 'maybeFullPath)\n at ' + 'System.IO.FileSystemEnumerableIterator`1.CommonInit()\n at ' + 'System.IO.FileSystemEnumerableIterator`1..ctor(String path, String ' + 'originalUserPath, String searchPattern, SearchOption searchOption, ' + 'SearchResultHandler`1 resultHandler, Boolean checkHost)\n at ' + 'System.IO.Directory.GetFiles(String path)\n at ' + 'MyProject.Tests.Real.FlagTests.DiscardDrawingTests()', + stdout=None, + stderr=None, + time=0.004832 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='LoadDrawingsEventFlagTests', + result='failure', + message=' Expected: 3\n But was: 0\n', + content=' at MyProject.Tests.Real.FlagTests.LoadDrawingsEventFlagTests()\n', + stdout=None, + stderr=None, + time=0.057537 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='ModifyNewObjectUniqueIdEventFlagTests', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000104 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='MoveControlPointEventFlagTests', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000112 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='ResetProjectEventFlagTests', + result='failure', + message='System.NullReferenceException : Object reference not set to an ' + 'instance of an object.', + content=' at MyProject.Tests.Real.FlagTests.ResetProjectEventFlagTests()', + stdout=None, + stderr=None, + time=0.025094 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='SetupLayersEventFlagTests', + result='failure', + message="om.Exceptions.DocumentException : Document should be initlised, but " + "isn't!", + content=' at MyProject.Runtime.Events.SetupLayers.Execute()\n at ' + 'MyProject.Tests.Real.FlagTests.SetupLayersEventFlagTests()', + stdout=None, + stderr=None, + time=0.00231 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='SetupPipeEventFlagTests', + result='failure', + message='System.NullReferenceException : Object reference not set to an ' + 'instance of an object.', + content=' at MyProject.Tests.Real.FlagTests.SetupPipeEventFlagTests()', + stdout=None, + stderr=None, + time=0.233069 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='UpdateDrawingsPanelEventFlagTests', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000363 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='UpdatePropertiesPanelEventFlagTests', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.8e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='EventRegisterTests', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005957 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='Test', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000102 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='DisplayGraphicConstants', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.6e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='DrawingConstants', + result='failure', + message='System.NullReferenceException : Object reference not set to an ' + 'instance of an object.', + content=' at MyProject.Tests.Real.RuntimeTests.DrawingConstants()', + stdout=None, + stderr=None, + time=0.001262 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='FileConstants', + result='failure', + message='System.NullReferenceException : Object reference not set to an ' + 'instance of an object.', + content=' at MyProject.Tests.Real.RuntimeTests.FileConstants()', + stdout=None, + stderr=None, + time=0.001455 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='ObjectConstants', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.00029 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='PluginConstants', + result='failure', + message="System.MissingMethodException : Method not found: 'System.Object " + "MyProject.MyProjectPlugIn.get_Instance()'.", + content=' at MyProject.Tests.Real.RuntimeTests.PluginConstants()', + stdout=None, + stderr=None, + time=0.005593 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='UIPanelConstants', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007398 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='UIPropertyConstants', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000517 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='Test', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000254 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/clicketyclackety.xml', + test_file=None, + line=None, + class_name='', + test_name='LoadedDrawings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000986 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/mstest/clicketyclackety.xml b/python/test/files/nunit/mstest/clicketyclackety.xml new file mode 100644 index 0000000..bb3059b --- /dev/null +++ b/python/test/files/nunit/mstest/clicketyclackety.xml @@ -0,0 +1,160 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/mstest/pickles.annotations b/python/test/files/nunit/mstest/pickles.annotations new file mode 100644 index 0000000..b3ee25b --- /dev/null +++ b/python/test/files/nunit/mstest/pickles.annotations @@ -0,0 +1,79 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail, 3 pass in 0s', + 'summary': + '4 tests\u2002\u2003\u20033 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '2 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MOw6AIBBEr' + '0KoLfxVXoYQxLiRj1mgMt7dFSHSzZuZvItvYHTgCxs6xkOCmGEkWBPKCN4R9oQ0xHeaax' + 'YhKUXF9BcHnO1bbBJMUX+FRvRYLphc9b2x1X382zI3ssytS3lrIRKUxMIu+f0AuKmg790' + 'AAAA=\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/pickles.xml\u2003[took 0s]', + 'title': 'Pickles.TestHarness.AdditionFeature.FailToAddTwoNumbers failed', + 'raw_details': + '\n at ' + 'Pickles.TestHarness.xUnit.Steps.ThenTheResultShouldBePass(Int32 ' + 'result) in ' + 'C:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarn' + 'ess.NUnit\\Steps.cs:line 26\nat lambda_method(Closure , ' + 'IContextManager , Int32 )\nat ' + 'TechTalk.SpecFlow.Bindings.MethodBinding.InvokeAction(IContextManag' + 'er contextManager, Object[] arguments, ITestTracer testTracer, ' + 'TimeSpan& duration)\nat ' + 'TechTalk.SpecFlow.Bindings.StepDefinitionBinding.Invoke(IContextMan' + 'ager contextManager, ITestTracer testTracer, Object[] arguments, ' + 'TimeSpan& duration)\nat ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStepMat' + 'ch(BindingMatch match, Object[] arguments)\nat ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStep(St' + 'epArgs stepArgs)\nat ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.OnAfterLastSte' + 'p()\nat TechTalk.SpecFlow.TestRunner.CollectScenarioErrors()\nat ' + 'Pickles.TestHarness.AdditionFeature.ScenarioCleanup() in ' + 'C:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarn' + 'ess.NUnit\\Addition.feature.cs:line 0\nat ' + 'Pickles.TestHarness.AdditionFeature.FailToAddTwoNumbers() in ' + 'c:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarn' + 'ess.NUnit\\Addition.feature:line 18' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 4 tests, see "Raw output" for the full list of tests.', + 'title': '4 tests found', + 'raw_details': + 'Pickles.TestHarness.AdditionFeature.AddTwoNumbers\n' + 'Pickles.TestHarness.AdditionFeature.AddingSeveralNumbers("40","50",' + '"90",System.String[])\n' + 'Pickles.TestHarness.AdditionFeature.AddingSeveralNumbers("60","70",' + '"130",System.String[])\n' + 'Pickles.TestHarness.AdditionFeature.FailToAddTwoNumbers' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/mstest/pickles.junit-xml b/python/test/files/nunit/mstest/pickles.junit-xml new file mode 100644 index 0000000..2e954f6 --- /dev/null +++ b/python/test/files/nunit/mstest/pickles.junit-xml @@ -0,0 +1,53 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + at Pickles.TestHarness.xUnit.Steps.ThenTheResultShouldBePass(Int32 result) in C:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.NUnit\Steps.cs:line 26 +at lambda_method(Closure , IContextManager , Int32 ) +at TechTalk.SpecFlow.Bindings.MethodBinding.InvokeAction(IContextManager contextManager, Object[] arguments, ITestTracer testTracer, TimeSpan& duration) +at TechTalk.SpecFlow.Bindings.StepDefinitionBinding.Invoke(IContextManager contextManager, ITestTracer testTracer, Object[] arguments, TimeSpan& duration) +at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStepMatch(BindingMatch match, Object[] arguments) +at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStep(StepArgs stepArgs) +at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.OnAfterLastStep() +at TechTalk.SpecFlow.TestRunner.CollectScenarioErrors() +at Pickles.TestHarness.AdditionFeature.ScenarioCleanup() in C:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.NUnit\Addition.feature.cs:line 0 +at Pickles.TestHarness.AdditionFeature.FailToAddTwoNumbers() in c:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.NUnit\Addition.feature:line 18 + + + + + + + + + + + + diff --git a/python/test/files/nunit/mstest/pickles.results b/python/test/files/nunit/mstest/pickles.results new file mode 100644 index 0000000..5e3f4d9 --- /dev/null +++ b/python/test/files/nunit/mstest/pickles.results @@ -0,0 +1,109 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=2, + suite_tests=4, + suite_skipped=0, + suite_failures=1, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='Pickles.TestHarness.AddingSeveralNumbers', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='Pickles.TestHarness.AdditionFeature', + tests=4, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='mstest/pickles.xml', + test_file=None, + line=None, + class_name='', + test_name='Pickles.TestHarness.AdditionFeature.AddingSeveralNumbers("60","70","' + '130",System.String[])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.137 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/pickles.xml', + test_file=None, + line=None, + class_name='', + test_name='Pickles.TestHarness.AdditionFeature.AddingSeveralNumbers("40","50","' + '90",System.String[])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.009 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/pickles.xml', + test_file=None, + line=None, + class_name='', + test_name='Pickles.TestHarness.AdditionFeature.AddTwoNumbers', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/pickles.xml', + test_file=None, + line=None, + class_name='', + test_name='Pickles.TestHarness.AdditionFeature.FailToAddTwoNumbers', + result='failure', + message=None, + content='\n at ' + 'Pickles.TestHarness.xUnit.Steps.ThenTheResultShouldBePass(Int32 ' + 'result) in ' + 'C:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarnes' + 's.NUnit\\Steps.cs:line 26\nat lambda_method(Closure , ' + 'IContextManager , Int32 )\nat ' + 'TechTalk.SpecFlow.Bindings.MethodBinding.InvokeAction(IContextManager' + ' contextManager, Object[] arguments, ITestTracer testTracer, ' + 'TimeSpan& duration)\nat ' + 'TechTalk.SpecFlow.Bindings.StepDefinitionBinding.Invoke(IContextManag' + 'er contextManager, ITestTracer testTracer, Object[] arguments, ' + 'TimeSpan& duration)\nat ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStepMatch' + '(BindingMatch match, Object[] arguments)\nat ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStep(Step' + 'Args stepArgs)\nat ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.OnAfterLastStep(' + ')\nat TechTalk.SpecFlow.TestRunner.CollectScenarioErrors()\nat ' + 'Pickles.TestHarness.AdditionFeature.ScenarioCleanup() in ' + 'C:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarnes' + 's.NUnit\\Addition.feature.cs:line 0\nat ' + 'Pickles.TestHarness.AdditionFeature.FailToAddTwoNumbers() in ' + 'c:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarnes' + 's.NUnit\\Addition.feature:line 18\n\n ', + stdout=None, + stderr=None, + time=0.028 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/mstest/pickles.xml b/python/test/files/nunit/mstest/pickles.xml new file mode 100644 index 0000000..8b245a7 --- /dev/null +++ b/python/test/files/nunit/mstest/pickles.xml @@ -0,0 +1,74 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/mstest/timewarpinc.annotations b/python/test/files/nunit/mstest/timewarpinc.annotations new file mode 100644 index 0000000..94bd876 --- /dev/null +++ b/python/test/files/nunit/mstest/timewarpinc.annotations @@ -0,0 +1,122 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail in 2s', + 'summary': + '1 tests\u2002\u2003\u20030 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '2s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMQQ6AIAwEv' + '0I4e1CPfsYQhNiIYAqcjH+3IgjednbbObkGozyf2NAx7iOED5aIIoCzhCMhDaFMKc8+Sk' + 'lFX4sNjl+hBZjfi0J0mE8w2uJ7Yqt7udoSN7LErUu6fYdAkBPzq+DXDXGDl7HdAAAA\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/timewarpinc.xml\u2003[took 2s]', + 'title': 'ValidateSceneContainer("Assets/Scenes/Grid/GridTest.unity") failed', + 'raw_details': + 'Zenject.ZenjectException : Zenject Validation Failed! See errors ' + 'below for details.\n at ' + 'Zenject.Internal.ZenUnityEditorUtil.ValidateCurrentSceneSetup () ' + '[0x0009c] in ' + '/github/workspace/Assets/ThirdParty/Zenject/Source/Editor/ZenUnityE' + 'ditorUtil.cs:82\n at ' + 'MP.Tests.AssetValidatorTest.ValidateSceneContainer (System.String ' + 'scenePath) [0x00009] in ' + '/github/workspace/Assets/Tests/EditorMode/AssetValidatorTest.cs:58\n' + ' at (wrapper managed-to-native) ' + 'System.Reflection.RuntimeMethodInfo.InternalInvoke(System.Reflectio' + 'n.RuntimeMethodInfo,object,object[],System.Exception&)\n at ' + 'System.Reflection.RuntimeMethodInfo.Invoke (System.Object obj, ' + 'System.Reflection.BindingFlags invokeAttr, ' + 'System.Reflection.Binder binder, System.Object[] parameters, ' + 'System.Globalization.CultureInfo culture) [0x0006a] in ' + ':0\nAssertionException: Could ' + 'not find a tilemap tagged with LevelBounds.\nAssertion failure. ' + 'Value was Null\nExpected: Value was not Null\n' + 'UnityEngine.Assertions.Assert.Fail (System.String message, ' + 'System.String userMessage) (at ' + '/home/bokken/buildslave/unity/build/Runtime/Export/Assertions/Asser' + 't/AssertBase.cs:29)\nUnityEngine.Assertions.Assert.IsNotNull ' + '(UnityEngine.Object value, System.String message) (at ' + '/home/bokken/buildslave/unity/build/Runtime/Export/Assertions/Asser' + 't/AssertNull.cs:58)\nUnityEngine.Assertions.Assert.IsNotNull[T] (T ' + 'value, System.String message) (at ' + '/home/bokken/buildslave/unity/build/Runtime/Export/Assertions/Asser' + 't/AssertNull.cs:46)\n' + 'MP.Gameplay.Level.LevelInstaller.InstallBindings () (at ' + 'Assets/Scripts/Gameplay/Level/LevelInstaller.cs:30)\n' + 'Zenject.CompositeMonoInstaller.InstallBindings () (at ' + 'Assets/ThirdParty/Zenject/Source/Install/CompositeMonoInstaller.cs:' + '25)\nZenject.Context.InstallInstallers ' + '(System.Collections.Generic.List`1[T] normalInstallers, ' + 'System.Collections.Generic.List`1[T] normalInstallerTypes, ' + 'System.Collections.Generic.List`1[T] scriptableObjectInstallers, ' + 'System.Collections.Generic.List`1[T] installers, ' + 'System.Collections.Generic.List`1[T] installerPrefabs) (at ' + 'Assets/ThirdParty/Zenject/Source/Install/Contexts/Context.cs:218)\n' + 'Zenject.Context.InstallInstallers () (at ' + 'Assets/ThirdParty/Zenject/Source/Install/Contexts/Context.cs:139)\n' + 'Zenject.SceneContext.InstallBindings ' + '(System.Collections.Generic.List`1[T] injectableMonoBehaviours) ' + '(at ' + 'Assets/ThirdParty/Zenject/Source/Install/Contexts/SceneContext.cs:3' + '46)\nZenject.SceneContext.Install () (at ' + 'Assets/ThirdParty/Zenject/Source/Install/Contexts/SceneContext.cs:2' + '65)\nZenject.SceneContext.Validate () (at ' + 'Assets/ThirdParty/Zenject/Source/Install/Contexts/SceneContext.cs:1' + '21)\nZenject.Internal.ZenUnityEditorUtil.ValidateCurrentSceneSetup ' + '() (at ' + 'Assets/ThirdParty/Zenject/Source/Editor/ZenUnityEditorUtil.cs:67)\n' + 'UnityEngine.Debug:LogException(Exception)\n' + 'ModestTree.Log:ErrorException(Exception) (at ' + 'Assets/ThirdParty/Zenject/Source/Internal/Log.cs:60)\n' + 'Zenject.Internal.ZenUnityEditorUtil:ValidateCurrentSceneSetup() ' + '(at ' + 'Assets/ThirdParty/Zenject/Source/Editor/ZenUnityEditorUtil.cs:72)\n' + 'MP.Tests.AssetValidatorTest:ValidateSceneContainer(String) (at ' + 'Assets/Tests/EditorMode/AssetValidatorTest.cs:58)\n' + 'System.Reflection.MethodBase:Invoke(Object, Object[])\n' + 'NUnit.Framework.Internal.Reflect:InvokeMethod(MethodInfo, Object, ' + 'Object[])\nNUnit.Framework.Internal.MethodWrapper:Invoke(Object, ' + 'Object[])\n' + 'NUnit.Framework.Internal.Commands.TestMethodCommand:RunNonAsyncTest' + 'Method(ITestExecutionContext)\n' + 'NUnit.Framework.Internal.Commands.TestMethodCommand:RunTestMethod(I' + 'TestExecutionContext)\n' + 'NUnit.Framework.Internal.Commands.TestMethodCommand:Execute(ITestEx' + 'ecutionContext)\n' + 'UnityEditor.EditorApplication:Internal_CallUpdateFunctions() (at ' + '/home/bokken/buildslave/unity/build/Editor/Mono/EditorApplication.c' + 's:359)' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There is 1 test, see "Raw output" for the name of the test.', + 'title': '1 test found', + 'raw_details': 'ValidateSceneContainer("Assets/Scenes/Grid/GridTest.unity")' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/mstest/timewarpinc.junit-xml b/python/test/files/nunit/mstest/timewarpinc.junit-xml new file mode 100644 index 0000000..81486b8 --- /dev/null +++ b/python/test/files/nunit/mstest/timewarpinc.junit-xml @@ -0,0 +1,62 @@ + + + + + + + + + + + + + + + + + + + + + + + + at Zenject.Internal.ZenUnityEditorUtil.ValidateCurrentSceneSetup () [0x0009c] in /github/workspace/Assets/ThirdParty/Zenject/Source/Editor/ZenUnityEditorUtil.cs:82 + at MP.Tests.AssetValidatorTest.ValidateSceneContainer (System.String scenePath) [0x00009] in /github/workspace/Assets/Tests/EditorMode/AssetValidatorTest.cs:58 + at (wrapper managed-to-native) System.Reflection.RuntimeMethodInfo.InternalInvoke(System.Reflection.RuntimeMethodInfo,object,object[],System.Exception&) + at System.Reflection.RuntimeMethodInfo.Invoke (System.Object obj, System.Reflection.BindingFlags invokeAttr, System.Reflection.Binder binder, System.Object[] parameters, System.Globalization.CultureInfo culture) [0x0006a] in <b67d2f60bf2548a58dc569b37fe71c3d>:0 + AssertionException: Could not find a tilemap tagged with LevelBounds. +Assertion failure. Value was Null +Expected: Value was not Null +UnityEngine.Assertions.Assert.Fail (System.String message, System.String userMessage) (at /home/bokken/buildslave/unity/build/Runtime/Export/Assertions/Assert/AssertBase.cs:29) +UnityEngine.Assertions.Assert.IsNotNull (UnityEngine.Object value, System.String message) (at /home/bokken/buildslave/unity/build/Runtime/Export/Assertions/Assert/AssertNull.cs:58) +UnityEngine.Assertions.Assert.IsNotNull[T] (T value, System.String message) (at /home/bokken/buildslave/unity/build/Runtime/Export/Assertions/Assert/AssertNull.cs:46) +MP.Gameplay.Level.LevelInstaller.InstallBindings () (at Assets/Scripts/Gameplay/Level/LevelInstaller.cs:30) +Zenject.CompositeMonoInstaller.InstallBindings () (at Assets/ThirdParty/Zenject/Source/Install/CompositeMonoInstaller.cs:25) +Zenject.Context.InstallInstallers (System.Collections.Generic.List`1[T] normalInstallers, System.Collections.Generic.List`1[T] normalInstallerTypes, System.Collections.Generic.List`1[T] scriptableObjectInstallers, System.Collections.Generic.List`1[T] installers, System.Collections.Generic.List`1[T] installerPrefabs) (at Assets/ThirdParty/Zenject/Source/Install/Contexts/Context.cs:218) +Zenject.Context.InstallInstallers () (at Assets/ThirdParty/Zenject/Source/Install/Contexts/Context.cs:139) +Zenject.SceneContext.InstallBindings (System.Collections.Generic.List`1[T] injectableMonoBehaviours) (at Assets/ThirdParty/Zenject/Source/Install/Contexts/SceneContext.cs:346) +Zenject.SceneContext.Install () (at Assets/ThirdParty/Zenject/Source/Install/Contexts/SceneContext.cs:265) +Zenject.SceneContext.Validate () (at Assets/ThirdParty/Zenject/Source/Install/Contexts/SceneContext.cs:121) +Zenject.Internal.ZenUnityEditorUtil.ValidateCurrentSceneSetup () (at Assets/ThirdParty/Zenject/Source/Editor/ZenUnityEditorUtil.cs:67) +UnityEngine.Debug:LogException(Exception) +ModestTree.Log:ErrorException(Exception) (at Assets/ThirdParty/Zenject/Source/Internal/Log.cs:60) +Zenject.Internal.ZenUnityEditorUtil:ValidateCurrentSceneSetup() (at Assets/ThirdParty/Zenject/Source/Editor/ZenUnityEditorUtil.cs:72) +MP.Tests.AssetValidatorTest:ValidateSceneContainer(String) (at Assets/Tests/EditorMode/AssetValidatorTest.cs:58) +System.Reflection.MethodBase:Invoke(Object, Object[]) +NUnit.Framework.Internal.Reflect:InvokeMethod(MethodInfo, Object, Object[]) +NUnit.Framework.Internal.MethodWrapper:Invoke(Object, Object[]) +NUnit.Framework.Internal.Commands.TestMethodCommand:RunNonAsyncTestMethod(ITestExecutionContext) +NUnit.Framework.Internal.Commands.TestMethodCommand:RunTestMethod(ITestExecutionContext) +NUnit.Framework.Internal.Commands.TestMethodCommand:Execute(ITestExecutionContext) +UnityEditor.EditorApplication:Internal_CallUpdateFunctions() (at /home/bokken/buildslave/unity/build/Editor/Mono/EditorApplication.cs:359) + + + + + + + + + + diff --git a/python/test/files/nunit/mstest/timewarpinc.results b/python/test/files/nunit/mstest/timewarpinc.results new file mode 100644 index 0000000..36df5f9 --- /dev/null +++ b/python/test/files/nunit/mstest/timewarpinc.results @@ -0,0 +1,103 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=1, + suite_skipped=0, + suite_failures=1, + suite_errors=0, + suite_time=2, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='MP.Tests.AssetValidatorTest.ValidateSceneContainer', + tests=1, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='mstest/timewarpinc.xml', + test_file=None, + line=None, + class_name='', + test_name='ValidateSceneContainer("Assets/Scenes/Grid/GridTest.unity")', + result='failure', + message='Zenject.ZenjectException : Zenject Validation Failed! See errors ' + 'below for details.', + content=' at Zenject.Internal.ZenUnityEditorUtil.ValidateCurrentSceneSetup ' + '() [0x0009c] in ' + '/github/workspace/Assets/ThirdParty/Zenject/Source/Editor/ZenUnityEdi' + 'torUtil.cs:82\n at ' + 'MP.Tests.AssetValidatorTest.ValidateSceneContainer (System.String ' + 'scenePath) [0x00009] in ' + '/github/workspace/Assets/Tests/EditorMode/AssetValidatorTest.cs:58\n ' + 'at (wrapper managed-to-native) ' + 'System.Reflection.RuntimeMethodInfo.InternalInvoke(System.Reflection.' + 'RuntimeMethodInfo,object,object[],System.Exception&)\n at ' + 'System.Reflection.RuntimeMethodInfo.Invoke (System.Object obj, ' + 'System.Reflection.BindingFlags invokeAttr, System.Reflection.Binder ' + 'binder, System.Object[] parameters, System.Globalization.CultureInfo ' + 'culture) [0x0006a] in :0 ', + stdout='AssertionException: Could not find a tilemap tagged with ' + 'LevelBounds.\nAssertion failure. Value was Null\nExpected: Value was ' + 'not Null\nUnityEngine.Assertions.Assert.Fail (System.String message, ' + 'System.String userMessage) (at ' + '/home/bokken/buildslave/unity/build/Runtime/Export/Assertions/Assert/' + 'AssertBase.cs:29)\nUnityEngine.Assertions.Assert.IsNotNull ' + '(UnityEngine.Object value, System.String message) (at ' + '/home/bokken/buildslave/unity/build/Runtime/Export/Assertions/Assert/' + 'AssertNull.cs:58)\nUnityEngine.Assertions.Assert.IsNotNull[T] (T ' + 'value, System.String message) (at ' + '/home/bokken/buildslave/unity/build/Runtime/Export/Assertions/Assert/' + 'AssertNull.cs:46)\nMP.Gameplay.Level.LevelInstaller.InstallBindings ' + '() (at Assets/Scripts/Gameplay/Level/LevelInstaller.cs:30)\n' + 'Zenject.CompositeMonoInstaller.InstallBindings () (at ' + 'Assets/ThirdParty/Zenject/Source/Install/CompositeMonoInstaller.cs:25' + ')\nZenject.Context.InstallInstallers ' + '(System.Collections.Generic.List`1[T] normalInstallers, ' + 'System.Collections.Generic.List`1[T] normalInstallerTypes, ' + 'System.Collections.Generic.List`1[T] scriptableObjectInstallers, ' + 'System.Collections.Generic.List`1[T] installers, ' + 'System.Collections.Generic.List`1[T] installerPrefabs) (at ' + 'Assets/ThirdParty/Zenject/Source/Install/Contexts/Context.cs:218)\n' + 'Zenject.Context.InstallInstallers () (at ' + 'Assets/ThirdParty/Zenject/Source/Install/Contexts/Context.cs:139)\n' + 'Zenject.SceneContext.InstallBindings ' + '(System.Collections.Generic.List`1[T] injectableMonoBehaviours) (at ' + 'Assets/ThirdParty/Zenject/Source/Install/Contexts/SceneContext.cs:346' + ')\nZenject.SceneContext.Install () (at ' + 'Assets/ThirdParty/Zenject/Source/Install/Contexts/SceneContext.cs:265' + ')\nZenject.SceneContext.Validate () (at ' + 'Assets/ThirdParty/Zenject/Source/Install/Contexts/SceneContext.cs:121' + ')\nZenject.Internal.ZenUnityEditorUtil.ValidateCurrentSceneSetup () ' + '(at ' + 'Assets/ThirdParty/Zenject/Source/Editor/ZenUnityEditorUtil.cs:67)\n' + 'UnityEngine.Debug:LogException(Exception)\n' + 'ModestTree.Log:ErrorException(Exception) (at ' + 'Assets/ThirdParty/Zenject/Source/Internal/Log.cs:60)\n' + 'Zenject.Internal.ZenUnityEditorUtil:ValidateCurrentSceneSetup() (at ' + 'Assets/ThirdParty/Zenject/Source/Editor/ZenUnityEditorUtil.cs:72)\n' + 'MP.Tests.AssetValidatorTest:ValidateSceneContainer(String) (at ' + 'Assets/Tests/EditorMode/AssetValidatorTest.cs:58)\n' + 'System.Reflection.MethodBase:Invoke(Object, Object[])\n' + 'NUnit.Framework.Internal.Reflect:InvokeMethod(MethodInfo, Object, ' + 'Object[])\nNUnit.Framework.Internal.MethodWrapper:Invoke(Object, ' + 'Object[])\n' + 'NUnit.Framework.Internal.Commands.TestMethodCommand:RunNonAsyncTestMe' + 'thod(ITestExecutionContext)\n' + 'NUnit.Framework.Internal.Commands.TestMethodCommand:RunTestMethod(ITe' + 'stExecutionContext)\n' + 'NUnit.Framework.Internal.Commands.TestMethodCommand:Execute(ITestExec' + 'utionContext)\n' + 'UnityEditor.EditorApplication:Internal_CallUpdateFunctions() (at ' + '/home/bokken/buildslave/unity/build/Editor/Mono/EditorApplication.cs:' + '359)\n\n', + stderr=None, + time=2.117365 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/mstest/timewarpinc.xml b/python/test/files/nunit/mstest/timewarpinc.xml new file mode 100644 index 0000000..ca256b6 --- /dev/null +++ b/python/test/files/nunit/mstest/timewarpinc.xml @@ -0,0 +1,86 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + :0 ]]> + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-correct.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-correct.annotations new file mode 100644 index 0000000..c396e30 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-correct.annotations @@ -0,0 +1,115 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 errors, 1 fail, 8 skipped, 18 pass in 0s', + 'summary': + '28 tests\u2002\u2003\u200318 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '11 suites\u2003\u2003\u205f\u20048 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20041 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\u2003\u20031 ' + '[:fire:](https://github.com/step-security/publish-unit-test-result-ac' + 'tion/blob/VERSION/README.md#the-symbols "test errors")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMMQ6AIAxFr' + '0KYHdTJeBlDUGKjiGlhMt7dIqi49b3fvEMaWCeSvWgqISmATxBpDKg8uI25ZuTFx63tHh' + 'goaB2/C7PAzuYTRsGa60lMiA6zwbC9xXj/gkl8vZuL3M1lTTtrwTPkS9Cs5HkBSPFg+uI' + 'AAAA=\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-correct.xml\u2003[took 0s]', + 'title': 'NUnit.Tests.Assemblies.MockTestFixture.FailingTest failed', + 'raw_details': + 'Intentional failure\n\n ' + ' at NUnit.Tests.Assemblies.MockTestFixture.FailingTest () ' + '[0x00000] in ' + '/home/charlie/Dev/NUnit/nunit-2.5/work/src/tests/mock-assembly/Mock' + 'Assembly.cs:121' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'failure', + 'message': 'nunit3/jenkins/NUnit-correct.xml\u2003[took 0s]', + 'title': 'NUnit.Tests.Assemblies.MockTestFixture.TestWithException with error', + 'raw_details': + 'System.ApplicationException : Intentional Exception\n\n ' + ' at ' + 'NUnit.Tests.Assemblies.MockTestFixture.TestWithException () ' + '[0x00000] in ' + '/home/charlie/Dev/NUnit/nunit-2.5/work/src/tests/mock-assembly/Mock' + 'Assembly.cs:153' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 8 skipped tests, see "Raw output" for the full list of ' + 'skipped tests.', + 'title': '8 skipped tests found', + 'raw_details': + 'NUnit.Tests.Assemblies.MockTestFixture.InconclusiveTest\n' + 'NUnit.Tests.Assemblies.MockTestFixture.MockTest4\n' + 'NUnit.Tests.Assemblies.MockTestFixture.MockTest5\n' + 'NUnit.Tests.Assemblies.MockTestFixture.NotRunnableTest\n' + 'NUnit.Tests.BadFixture.SomeTest\nNUnit.Tests.IgnoredFixture.Test1\n' + 'NUnit.Tests.IgnoredFixture.Test2\nNUnit.Tests.IgnoredFixture.Test3' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 28 tests, see "Raw output" for the full list of tests.', + 'title': '28 tests found', + 'raw_details': + 'NUnit.Tests.Assemblies.MockTestFixture.FailingTest\n' + 'NUnit.Tests.Assemblies.MockTestFixture.InconclusiveTest\n' + 'NUnit.Tests.Assemblies.MockTestFixture.MockTest1\n' + 'NUnit.Tests.Assemblies.MockTestFixture.MockTest2\n' + 'NUnit.Tests.Assemblies.MockTestFixture.MockTest3\n' + 'NUnit.Tests.Assemblies.MockTestFixture.MockTest4\n' + 'NUnit.Tests.Assemblies.MockTestFixture.MockTest5\n' + 'NUnit.Tests.Assemblies.MockTestFixture.NotRunnableTest\n' + 'NUnit.Tests.Assemblies.MockTestFixture.TestWithException\n' + 'NUnit.Tests.Assemblies.MockTestFixture.TestWithManyProperties\n' + 'NUnit.Tests.BadFixture.SomeTest\n' + 'NUnit.Tests.FixtureWithTestCases.GenericMethod(9.2d,11.7d)\n' + 'NUnit.Tests.FixtureWithTestCases.GenericMethod(2,4)\n' + 'NUnit.Tests.FixtureWithTestCases.MethodWithParameters(2,2)\n' + 'NUnit.Tests.FixtureWithTestCases.MethodWithParameters(9,11)\n' + 'NUnit.Tests.GenericFixture(11.5d).Test1\n' + 'NUnit.Tests.GenericFixture(11.5d).Test2\n' + 'NUnit.Tests.GenericFixture(5).Test1\n' + 'NUnit.Tests.GenericFixture(5).Test2\n' + 'NUnit.Tests.IgnoredFixture.Test1\nNUnit.Tests.IgnoredFixture.Test2\n' + 'NUnit.Tests.IgnoredFixture.Test3\n' + 'NUnit.Tests.ParameterizedFixture(42).Test1\n' + 'NUnit.Tests.ParameterizedFixture(42).Test2\n' + 'NUnit.Tests.ParameterizedFixture(5).Test1\n' + 'NUnit.Tests.ParameterizedFixture(5).Test2\n' + 'NUnit.Tests.Singletons.OneTestCase.TestCase\n' + 'NUnit.Tests.TestAssembly.MockTestFixture.MyTest' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-correct.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-correct.junit-xml new file mode 100644 index 0000000..e586442 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-correct.junit-xml @@ -0,0 +1,160 @@ + + + + + + + + + + + + + + + + + + + + at NUnit.Tests.Assemblies.MockTestFixture.FailingTest () [0x00000] in /home/charlie/Dev/NUnit/nunit-2.5/work/src/tests/mock-assembly/MockAssembly.cs:121 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + at NUnit.Tests.Assemblies.MockTestFixture.TestWithException () [0x00000] in /home/charlie/Dev/NUnit/nunit-2.5/work/src/tests/mock-assembly/MockAssembly.cs:153 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-correct.results b/python/test/files/nunit/nunit3/jenkins/NUnit-correct.results new file mode 100644 index 0000000..f19758e --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-correct.results @@ -0,0 +1,484 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=11, + suite_tests=28, + suite_skipped=8, + suite_failures=1, + suite_errors=1, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='NUnit.Tests.Assemblies.MockTestFixture', + tests=10, + skipped=4, + failures=1, + errors=1, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='NUnit.Tests.BadFixture', + tests=1, + skipped=1, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='NUnit.Tests.GenericMethod', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='NUnit.Tests.MethodWithParameters', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='NUnit.Tests.GenericFixture(11.5d)', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='NUnit.Tests.GenericFixture(5)', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='NUnit.Tests.IgnoredFixture', + tests=3, + skipped=3, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='NUnit.Tests.ParameterizedFixture(42)', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='NUnit.Tests.ParameterizedFixture(5)', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='NUnit.Tests.Singletons.OneTestCase', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='NUnit.Tests.TestAssembly.MockTestFixture', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.Assemblies.MockTestFixture.FailingTest', + result='failure', + message='Intentional failure', + content='\n at ' + 'NUnit.Tests.Assemblies.MockTestFixture.FailingTest () [0x00000] in ' + '/home/charlie/Dev/NUnit/nunit-2.5/work/src/tests/mock-assembly/MockAs' + 'sembly.cs:121\n\n ', + stdout=None, + stderr=None, + time=0.013 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.Assemblies.MockTestFixture.InconclusiveTest', + result='skipped', + message='No valid data', + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.Assemblies.MockTestFixture.MockTest1', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.Assemblies.MockTestFixture.MockTest2', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.Assemblies.MockTestFixture.MockTest3', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.Assemblies.MockTestFixture.MockTest4', + result='skipped', + message='ignoring this test method for now', + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.Assemblies.MockTestFixture.MockTest5', + result='skipped', + message='Method is not public', + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.Assemblies.MockTestFixture.NotRunnableTest', + result='skipped', + message='No arguments were provided', + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.Assemblies.MockTestFixture.TestWithException', + result='error', + message='System.ApplicationException : Intentional Exception', + content='\n at ' + 'NUnit.Tests.Assemblies.MockTestFixture.TestWithException () ' + '[0x00000] in ' + '/home/charlie/Dev/NUnit/nunit-2.5/work/src/tests/mock-assembly/MockAs' + 'sembly.cs:153\n\n ', + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.Assemblies.MockTestFixture.TestWithManyProperties', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.BadFixture.SomeTest', + result='skipped', + message='No suitable constructor was found', + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.FixtureWithTestCases.GenericMethod(9.2d,11.7d)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.FixtureWithTestCases.GenericMethod(2,4)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.FixtureWithTestCases.MethodWithParameters(9,11)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.FixtureWithTestCases.MethodWithParameters(2,2)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.GenericFixture(11.5d).Test1', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.GenericFixture(11.5d).Test2', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.GenericFixture(5).Test1', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.GenericFixture(5).Test2', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.IgnoredFixture.Test1', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.IgnoredFixture.Test2', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.IgnoredFixture.Test3', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.ParameterizedFixture(42).Test1', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.ParameterizedFixture(42).Test2', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.ParameterizedFixture(5).Test1', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.ParameterizedFixture(5).Test2', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.Singletons.OneTestCase.TestCase', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct.xml', + test_file=None, + line=None, + class_name='', + test_name='NUnit.Tests.TestAssembly.MockTestFixture.MyTest', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-correct.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-correct.xml new file mode 100644 index 0000000..abd12b0 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-correct.xml @@ -0,0 +1,194 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-correct2.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-correct2.annotations new file mode 100644 index 0000000..4369109 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-correct2.annotations @@ -0,0 +1,486 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 183 tests pass in 0s', + 'summary': + '\u205f\u2004\u205f\u20041 files\u2004\u2003102 suites\u2004\u2003\u2002' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '183 tests\u2003183 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '0 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n218 runs\u2006\u2003' + '218 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '0 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/12MSw6AIAwFr' + '0JYu/CzMV7GEITY+MGUsjLe3YoY0V1nXjq7tDAbLztRFUL6AHRDWTMOARWBW1mUjDxRHN' + 'vmod4Hrf9qgi3/6K2C+SMMosNkMKxXs67aBE8yN28xchaMnPe0WxYghnQJPyp5nNtosNP' + 'nAAAA\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 183 tests, see "Raw output" for the full list of tests.', + 'title': '183 tests found', + 'raw_details': + 'imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Clone.should' + '_return_different_object\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Clone.should' + '_return_same_value\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AmountTests+MutableCompo' + 'nents.should_return_empty\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Nudge.should' + '_change_value_in_range(100,-10,1)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Nudge.should' + '_change_value_in_range(100,-100,1)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Nudge.should' + '_change_value_in_range(100,-2,8)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Nudge.should' + '_change_value_in_range(100,2,12)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Nudge.should' + '_change_value_in_range(1000,1000,321)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AmountTests+RandomizeVal' + 'ues.should_randomize_value_in_range\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AngleTests+Clone.should_' + 'return_different_object\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AngleTests+Clone.should_' + 'return_same_value\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AngleTests+MutableCompon' + 'ents.should_return_empty\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AngleTests+Nudge.should_' + 'wrap_by_a_value_between_1_and_max_value_in_both_directions(10,355,T' + 'rue,5)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AngleTests+Nudge.should_' + 'wrap_by_a_value_between_1_and_max_value_in_both_directions(10,40,Fa' + 'lse,330)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AngleTests+Nudge.should_' + 'wrap_by_a_value_between_1_and_max_value_in_both_directions(10,40,Tr' + 'ue,50)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AngleTests+Nudge.should_' + 'wrap_by_a_value_between_1_and_max_value_in_both_directions(10,6,Fal' + 'se,4)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.AngleTests+RandomizeValu' + 'e.should_select_a_value_between_0_and_360\n' + 'imG.Approx.Tests.Components.BuildingBlocks.ColorTests+Clone.should_' + 'clone_component\n' + 'imG.Approx.Tests.Components.BuildingBlocks.ColorTests+Clone.should_' + 'copy_values\n' + 'imG.Approx.Tests.Components.BuildingBlocks.ColorTests+Constructor.s' + 'hould_initialize_data\n' + 'imG.Approx.Tests.Components.BuildingBlocks.ColorTests+ImplicitConve' + 'rsionToDrawingColor.should_convert_to_drawing_color\n' + 'imG.Approx.Tests.Components.BuildingBlocks.ColorTests+MutableCompon' + 'ents.should_not_contain_anything\n' + 'imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeAlph' + 'a.should_randomize_alpha_in_range\n' + 'imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeBlue' + '.should_randomize_alpha_in_range\n' + 'imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeGree' + 'n.should_randomize_alpha_in_range\n' + 'imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeRed.' + 'should_randomize_alpha_in_range\n' + 'imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeValu' + 'es.should_randomize_colors_in_the_range\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Clone.shoul' + 'd_return_different_object\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Clone.shoul' + 'd_return_same_value\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+MutableComp' + 'onents.should_return_empty\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Nudge.shoul' + 'd_change_value_in_range(100,-10,1)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Nudge.shoul' + 'd_change_value_in_range(100,-100,1)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Nudge.shoul' + 'd_change_value_in_range(100,-2,8)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Nudge.shoul' + 'd_change_value_in_range(100,10,16)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Nudge.shoul' + 'd_change_value_in_range(100,2,12)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+RandomizeVa' + 'lues.should_randomize_value_in_range\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PositionTests+Clone.shou' + 'ld_return_different_object\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PositionTests+Clone.shou' + 'ld_return_same_value\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PositionTests+MutableCom' + 'ponents.should_return_empty\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PositionTests+Nudge.shou' + 'ld_change_value_in_range(10,10,1,False,9,9)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PositionTests+Nudge.shou' + 'ld_change_value_in_range(10,10,1,True,11,11)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PositionTests+Nudge.shou' + 'ld_change_value_in_range(10,10,100,False,0,0)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PositionTests+Nudge.shou' + 'ld_change_value_in_range(10,10,1000,True,321,654)\n' + 'imG.Approx.Tests.Components.BuildingBlocks.PositionTests+RandomizeV' + 'alues.should_return_value_inside_target_limits\n' + 'imG.Approx.Tests.Components.DrawingTest+Clone.should_clone_all_shap' + 'es\n' + 'imG.Approx.Tests.Components.DrawingTest+Clone.should_clone_inner_co' + 'mponents\n' + 'imG.Approx.Tests.Components.DrawingTest+Clone.should_copy_propertie' + 's\n' + 'imG.Approx.Tests.Components.DrawingTest+Clone.should_create_clone_o' + 'f_target\n' + 'imG.Approx.Tests.Components.DrawingTest+Constructor.should_keep_dat' + 'a\n' + 'imG.Approx.Tests.Components.DrawingTest+Draw.should_draw_all_shapes' + '\n' + 'imG.Approx.Tests.Components.DrawingTest+Draw.should_fill_image_with' + '_background_color\n' + 'imG.Approx.Tests.Components.DrawingTest+Draw.should_return_correct_' + 'size_image\n' + 'imG.Approx.Tests.Components.DrawingTest+MutableComponents.should_co' + 'ntain_all_shapes\n' + 'imG.Approx.Tests.Components.DrawingTest+MutableComponents.should_co' + 'ntain_color\n' + 'imG.Approx.Tests.Components.Shapes.AreaTests+Clone.should_return_di' + 'fferent_object(System.Func`2[imG.Approx.Components.Shapes.Area,Syst' + 'em.Object])\n' + 'imG.Approx.Tests.Components.Shapes.AreaTests+Draw.should_draw\n' + 'imG.Approx.Tests.Components.Shapes.AreaTests+InitializeComponents.s' + 'hould_randomize_elements\n' + 'imG.Approx.Tests.Components.Shapes.AreaTests+MutableComponents.shou' + 'ld_return_components(imG.Approx.Components.Shapes.Area,imG.Approx.C' + 'omponents.BuildingBlocks.Angle)\n' + 'imG.Approx.Tests.Components.Shapes.AreaTests+MutableComponents.shou' + 'ld_return_components(imG.Approx.Components.Shapes.Area,imG.Approx.C' + 'omponents.BuildingBlocks.Color)\n' + 'imG.Approx.Tests.Components.Shapes.AreaTests+MutableComponents.shou' + 'ld_return_components(imG.Approx.Components.Shapes.Area,imG.Approx.C' + 'omponents.BuildingBlocks.Position)\n' + 'imG.Approx.Tests.Components.Shapes.BezierTests+Clone.should_return_' + 'different_object(System.Func`2[imG.Approx.Components.Shapes.Bezier,' + 'System.Object])\n' + 'imG.Approx.Tests.Components.Shapes.BezierTests+Draw.should_draw\n' + 'imG.Approx.Tests.Components.Shapes.BezierTests+InitializeComponents' + '.should_randomize_elements\n' + 'imG.Approx.Tests.Components.Shapes.BezierTests+MutableComponents.sh' + 'ould_return_components(imG.Approx.Components.Shapes.Bezier,imG.Appr' + 'ox.Components.BuildingBlocks.Color)\n' + 'imG.Approx.Tests.Components.Shapes.BezierTests+MutableComponents.sh' + 'ould_return_components(imG.Approx.Components.Shapes.Bezier,imG.Appr' + 'ox.Components.BuildingBlocks.PenSize)\n' + 'imG.Approx.Tests.Components.Shapes.BezierTests+MutableComponents.sh' + 'ould_return_components(imG.Approx.Components.Shapes.Bezier,imG.Appr' + 'ox.Components.BuildingBlocks.Position)\n' + 'imG.Approx.Tests.Components.Shapes.BlobTests+Clone.should_return_di' + 'fferent_object(System.Func`2[imG.Approx.Components.Shapes.Blob,Syst' + 'em.Object])\n' + 'imG.Approx.Tests.Components.Shapes.BlobTests+Draw.should_draw\n' + 'imG.Approx.Tests.Components.Shapes.BlobTests+InitializeComponents.s' + 'hould_randomize_elements\n' + 'imG.Approx.Tests.Components.Shapes.BlobTests+MutableComponents.shou' + 'ld_return_components(imG.Approx.Components.Shapes.Blob,imG.Approx.C' + 'omponents.BuildingBlocks.Color)\n' + 'imG.Approx.Tests.Components.Shapes.BlobTests+MutableComponents.shou' + 'ld_return_components(imG.Approx.Components.Shapes.Blob,imG.Approx.C' + 'omponents.BuildingBlocks.Position)\n' + 'imG.Approx.Tests.Components.Shapes.CircleTests+Clone.should_return_' + 'different_object(System.Func`2[imG.Approx.Components.Shapes.Circle,' + 'System.Object])\n' + 'imG.Approx.Tests.Components.Shapes.CircleTests+Draw.should_draw\n' + 'imG.Approx.Tests.Components.Shapes.CircleTests+InitializeComponents' + '.should_randomize_elements\n' + 'imG.Approx.Tests.Components.Shapes.CircleTests+MutableComponents.sh' + 'ould_return_components(imG.Approx.Components.Shapes.Circle,imG.Appr' + 'ox.Components.BuildingBlocks.Amount)\n' + 'imG.Approx.Tests.Components.Shapes.CircleTests+MutableComponents.sh' + 'ould_return_components(imG.Approx.Components.Shapes.Circle,imG.Appr' + 'ox.Components.BuildingBlocks.Color)\n' + 'imG.Approx.Tests.Components.Shapes.CircleTests+MutableComponents.sh' + 'ould_return_components(imG.Approx.Components.Shapes.Circle,imG.Appr' + 'ox.Components.BuildingBlocks.Position)\n' + 'imG.Approx.Tests.Components.Shapes.Factories.ConcreteFactory.ShapeF' + 'actoryTests+GetShape.should_return_shape\n' + 'imG.Approx.Tests.Components.Shapes.Factories.ConcreteFactory.ShapeF' + 'actoryTests+Name.should_return_name_by_default\n' + 'imG.Approx.Tests.Components.Shapes.Factories.ShapeFactoryCatalogTes' + 'ts+ActiveFactories.should_return_only_active_factories\n' + 'imG.Approx.Tests.Components.Shapes.Factories.ShapeFactoryCatalogTes' + 'ts+Disable.should_enable_factories_named\n' + 'imG.Approx.Tests.Components.Shapes.Factories.ShapeFactoryCatalogTes' + 'ts+DisableAll.should_disable_all_factories\n' + 'imG.Approx.Tests.Components.Shapes.Factories.ShapeFactoryCatalogTes' + 'ts+Enable.should_enable_factories_named\n' + 'imG.Approx.Tests.Components.Shapes.Factories.ShapeFactoryCatalogTes' + 'ts+EnableAll.should_enable_all_factories\n' + 'imG.Approx.Tests.Components.Shapes.Factories.ShapeFactoryCatalogTes' + 'ts+Register.should_add_factory\n' + 'imG.Approx.Tests.Components.Shapes.Factories.ShapeFactoryCatalogTes' + 'ts+RegisterAllFactories.should_register_all_factories\n' + 'imG.Approx.Tests.Components.Shapes.LineTests+Clone.should_return_di' + 'fferent_object(System.Func`2[imG.Approx.Components.Shapes.Line,Syst' + 'em.Object])\n' + 'imG.Approx.Tests.Components.Shapes.LineTests+Draw.should_draw\n' + 'imG.Approx.Tests.Components.Shapes.LineTests+InitializeComponents.s' + 'hould_randomize_elements\n' + 'imG.Approx.Tests.Components.Shapes.LineTests+MutableComponents.shou' + 'ld_return_components(imG.Approx.Components.Shapes.Line,imG.Approx.C' + 'omponents.BuildingBlocks.Color)\n' + 'imG.Approx.Tests.Components.Shapes.LineTests+MutableComponents.shou' + 'ld_return_components(imG.Approx.Components.Shapes.Line,imG.Approx.C' + 'omponents.BuildingBlocks.PenSize)\n' + 'imG.Approx.Tests.Components.Shapes.LineTests+MutableComponents.shou' + 'ld_return_components(imG.Approx.Components.Shapes.Line,imG.Approx.C' + 'omponents.BuildingBlocks.Position)\n' + 'imG.Approx.Tests.Components.Shapes.PolygonTests+Clone.should_return' + '_different_object(System.Func`2[imG.Approx.Components.Shapes.Polygo' + 'n,System.Object])\n' + 'imG.Approx.Tests.Components.Shapes.PolygonTests+Draw.should_draw\n' + 'imG.Approx.Tests.Components.Shapes.PolygonTests+InitializeComponent' + 's.should_randomize_elements\n' + 'imG.Approx.Tests.Components.Shapes.PolygonTests+MutableComponents.s' + 'hould_return_components(imG.Approx.Components.Shapes.Polygon,imG.Ap' + 'prox.Components.BuildingBlocks.Color)\n' + 'imG.Approx.Tests.Components.Shapes.PolygonTests+MutableComponents.s' + 'hould_return_components(imG.Approx.Components.Shapes.Polygon,imG.Ap' + 'prox.Components.BuildingBlocks.Position)\n' + 'imG.Approx.Tests.Components.Shapes.RectangleTests+Clone.should_retu' + 'rn_different_object(System.Func`2[imG.Approx.Components.Shapes.Rect' + 'angle,System.Object])\n' + 'imG.Approx.Tests.Components.Shapes.RectangleTests+Draw.should_draw\n' + 'imG.Approx.Tests.Components.Shapes.RectangleTests+InitializeCompone' + 'nts.should_randomize_elements\n' + 'imG.Approx.Tests.Components.Shapes.RectangleTests+MutableComponents' + '.should_return_components(imG.Approx.Components.Shapes.Rectangle,im' + 'G.Approx.Components.BuildingBlocks.Amount)\n' + 'imG.Approx.Tests.Components.Shapes.RectangleTests+MutableComponents' + '.should_return_components(imG.Approx.Components.Shapes.Rectangle,im' + 'G.Approx.Components.BuildingBlocks.Color)\n' + 'imG.Approx.Tests.Components.Shapes.RectangleTests+MutableComponents' + '.should_return_components(imG.Approx.Components.Shapes.Rectangle,im' + 'G.Approx.Components.BuildingBlocks.Position)\n' + 'imG.Approx.Tests.Components.Shapes.TriangleTests+Clone.should_retur' + 'n_different_object(System.Func`2[imG.Approx.Components.Shapes.Trian' + 'gle,System.Object])\n' + 'imG.Approx.Tests.Components.Shapes.TriangleTests+Draw.should_draw\n' + 'imG.Approx.Tests.Components.Shapes.TriangleTests+InitializeComponen' + 'ts.should_randomize_elements\n' + 'imG.Approx.Tests.Components.Shapes.TriangleTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Triangle,imG.' + 'Approx.Components.BuildingBlocks.Color)\n' + 'imG.Approx.Tests.Components.Shapes.TriangleTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Triangle,imG.' + 'Approx.Components.BuildingBlocks.Position)\n' + 'imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.should_return' + '_mutation_description_determined_by_random_provider(System.Collecti' + 'ons.Generic.Dictionary`2[imG.Approx.Mutation.IMutationDescription,S' + 'ystem.Collections.Generic.List`1[imG.Approx.Mutation.IMutable]],Cas' + 'tle.Proxies.IMutationDescriptionProxy,Castle.Proxies.IMutableProxy,' + '0,0)\n' + 'imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.should_return' + '_mutation_description_determined_by_random_provider(System.Collecti' + 'ons.Generic.Dictionary`2[imG.Approx.Mutation.IMutationDescription,S' + 'ystem.Collections.Generic.List`1[imG.Approx.Mutation.IMutable]],Cas' + 'tle.Proxies.IMutationDescriptionProxy,Castle.Proxies.IMutableProxy,' + '0,1)\n' + 'imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.should_return' + '_mutation_description_determined_by_random_provider(System.Collecti' + 'ons.Generic.Dictionary`2[imG.Approx.Mutation.IMutationDescription,S' + 'ystem.Collections.Generic.List`1[imG.Approx.Mutation.IMutable]],Cas' + 'tle.Proxies.IMutationDescriptionProxy,Castle.Proxies.IMutableProxy,' + '6,0)\n' + 'imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.should_return' + '_mutation_description_determined_by_random_provider(System.Collecti' + 'ons.Generic.Dictionary`2[imG.Approx.Mutation.IMutationDescription,S' + 'ystem.Collections.Generic.List`1[imG.Approx.Mutation.IMutable]],Cas' + 'tle.Proxies.IMutationDescriptionProxy,Castle.Proxies.IMutableProxy,' + '6,1)\n' + 'imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.should_return' + '_mutation_description_determined_by_random_provider(System.Collecti' + 'ons.Generic.Dictionary`2[imG.Approx.Mutation.IMutationDescription,S' + 'ystem.Collections.Generic.List`1[imG.Approx.Mutation.IMutable]],Cas' + 'tle.Proxies.IMutationDescriptionProxy,Castle.Proxies.IMutableProxy,' + '7,0)\n' + 'imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.should_return' + '_mutation_description_determined_by_random_provider(System.Collecti' + 'ons.Generic.Dictionary`2[imG.Approx.Mutation.IMutationDescription,S' + 'ystem.Collections.Generic.List`1[imG.Approx.Mutation.IMutable]],Cas' + 'tle.Proxies.IMutationDescriptionProxy,Castle.Proxies.IMutableProxy,' + '7,1)\n' + 'imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.should_return' + '_null_if_no_mutation_exists\n' + 'imG.Approx.Tests.Mutation.MutagenTests+GetMutationsFor.should_retur' + 'n_active_and_applicable_and_selectable_mutations\n' + 'imG.Approx.Tests.Mutation.MutagenTests+GetMutationsFor.should_retur' + 'n_empty_if_mutable_is_unknown\n' + 'imG.Approx.Tests.Mutation.MutagenTests+GetMutationsFor.should_retur' + 'n_mutations_recursively\n' + 'imG.Approx.Tests.Mutation.MutagenTests+NoOpDescription.should_alway' + 's_have_occasions_to_mutate\n' + 'imG.Approx.Tests.Mutation.MutagenTests+NoOpDescription.should_alway' + 's_mutate_without_doing_anything_to_the_target\n' + 'imG.Approx.Tests.Mutation.MutagenTests+NoOpDescription.should_alway' + 's_target_IMutableType\n' + 'imG.Approx.Tests.Mutation.MutagenTests+NoOpDescription.should_be_al' + 'ways_able_to_mutate\n' + 'imG.Approx.Tests.Mutation.MutagenTests+NoOpDescription.should_be_al' + 'ways_active\n' + 'imG.Approx.Tests.Mutation.MutagenTests+SelectMutation.should_return' + '_a_mutation\n' + 'imG.Approx.Tests.Mutation.MutagenTests+SelectMutation.should_return' + '_matching_selected_mutation\n' + 'imG.Approx.Tests.Mutation.MutagenTests+SelectMutation.should_return' + '_the_default_mutation_if_no_mutation_exists\n' + 'imG.Approx.Tests.Mutation.MutagenTests+SelectMutation.should_throw_' + 'if_any_component_is_null(imG.Approx.Mutation.Process,null)\n' + 'imG.Approx.Tests.Mutation.MutagenTests+SelectMutation.should_throw_' + 'if_any_component_is_null(null,Castle.Proxies.IMutableProxy)\n' + 'imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+DeclareMut' + 'ation.should_add_description_to_catalog\n' + 'imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+DeclareMut' + 'ation.should_throw_when_the_same_description_is_declared_twice\n' + 'imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+For.should' + '_return_empty_list_for_unknown_mutable_type\n' + 'imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+For.should' + '_return_list_of_descriptions_for_type(imG.Approx.Tests.Mutation.Mut' + 'ableAndDescription.Mutable1,imG.Approx.Mutation.MutationDescription' + '`1[imG.Approx.Tests.Mutation.MutableAndDescription.Mutable1],imG.Ap' + 'prox.Mutation.MutationDescription`1[imG.Approx.Tests.Mutation.Mutab' + 'leAndDescription.Mutable2])\n' + 'imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+For.should' + '_return_list_of_descriptions_for_type(imG.Approx.Tests.Mutation.Mut' + 'ableAndDescription.Mutable2,imG.Approx.Mutation.MutationDescription' + '`1[imG.Approx.Tests.Mutation.MutableAndDescription.Mutable2],imG.Ap' + 'prox.Mutation.MutationDescription`1[imG.Approx.Tests.Mutation.Mutab' + 'leAndDescription.Mutable3])\n' + 'imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+For.should' + '_return_list_of_descriptions_for_type(imG.Approx.Tests.Mutation.Mut' + 'ableAndDescription.Mutable3,imG.Approx.Mutation.MutationDescription' + '`1[imG.Approx.Tests.Mutation.MutableAndDescription.Mutable3],imG.Ap' + 'prox.Mutation.MutationDescription`1[imG.Approx.Tests.Mutation.Mutab' + 'leAndDescription.Mutable1])\n' + 'imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+RegisterAl' + 'lMutations.should_register_all_mutations_declared_by_registrars\n' + 'imG.Approx.Tests.Mutation.MutationDescriptionTests+CanMutate.lambda' + '_is_called_when_checking\n' + 'imG.Approx.Tests.Mutation.MutationDescriptionTests+Constructor.shou' + 'ld_refuse_odds_that_are_not_positive(-1)\n' + 'imG.Approx.Tests.Mutation.MutationDescriptionTests+Constructor.shou' + 'ld_refuse_odds_that_are_not_positive(-1000)\n' + 'imG.Approx.Tests.Mutation.MutationDescriptionTests+Constructor.shou' + 'ld_refuse_odds_that_are_not_positive(0)\n' + 'imG.Approx.Tests.Mutation.MutationDescriptionTests+GetMutationTarge' + 'tType.should_return_type_of_generic\n' + 'imG.Approx.Tests.Mutation.MutationDescriptionTests+Mutate.lambda_is' + '_called_when_mutating\n' + 'imG.Approx.Tests.Mutation.ProcessTests+Constructor.should_throw_if_' + 'any_argument_is_null(Castle.Proxies.IRandomizationProviderProxy,Cas' + 'tle.Proxies.IMutationDescriptionCatalogProxy,Castle.Proxies.ITarget' + 'Proxy,null)\n' + 'imG.Approx.Tests.Mutation.ProcessTests+Constructor.should_throw_if_' + 'any_argument_is_null(Castle.Proxies.IRandomizationProviderProxy,Cas' + 'tle.Proxies.IMutationDescriptionCatalogProxy,null,Castle.Proxies.IS' + 'hapeFactoryCatalogProxy)\n' + 'imG.Approx.Tests.Mutation.ProcessTests+Constructor.should_throw_if_' + 'any_argument_is_null(Castle.Proxies.IRandomizationProviderProxy,nul' + 'l,Castle.Proxies.ITargetProxy,Castle.Proxies.IShapeFactoryCatalogPr' + 'oxy)\n' + 'imG.Approx.Tests.Mutation.ProcessTests+Constructor.should_throw_if_' + 'any_argument_is_null(null,Castle.Proxies.IMutationDescriptionCatalo' + 'gProxy,Castle.Proxies.ITargetProxy,Castle.Proxies.IShapeFactoryCata' + 'logProxy)\n' + 'imG.Approx.Tests.Mutation.ProcessTests+Mutate.should_always_keep_be' + 'st_drawing_according_to_distance(False)\n' + 'imG.Approx.Tests.Mutation.ProcessTests+Mutate.should_always_keep_be' + 'st_drawing_according_to_distance(True)\n' + 'imG.Approx.Tests.Mutation.ProcessTests+Mutate.should_increase_evolu' + 'tions_when_drawing_is_better\n' + 'imG.Approx.Tests.Mutation.ProcessTests+Mutate.should_increase_gener' + 'ation_number\n' + 'imG.Approx.Tests.Mutation.ProcessTests+Mutate.should_trigger_event_' + 'when_drawing_is_better\n' + 'imG.Approx.Tests.Mutation.ProcessTests+Mutate.should_trigger_event_' + 'when_drawing_is_worse\n' + 'imG.Approx.Tests.Mutation.ProcessTests+SetupDrawing.should_compute_' + 'the_distance_only_the_first_time\n' + 'imG.Approx.Tests.Mutation.ProcessTests+SetupDrawing.should_create_d' + 'rawing_based_on_target\n' + 'imG.Approx.Tests.Mutation.ProcessTests+SetupDrawing.should_create_t' + 'he_drawing_only_the_first_time\n' + 'imG.Approx.Tests.Mutation.RandomizationProviderTests+Constructor.sh' + 'ould_keep_the_seed\n' + 'imG.Approx.Tests.Mutation.RandomizationProviderTests+Next.should_re' + 'turn_integer\n' + 'imG.Approx.Tests.Mutation.TargetTests+Constructor.should_keep_initi' + 'alized_data(System.Func`2[imG.Approx.Mutation.Target,System.Object]' + ',"data\\\\red.png")\n' + 'imG.Approx.Tests.Mutation.TargetTests+Constructor.should_keep_initi' + 'alized_data(System.Func`2[imG.Approx.Mutation.Target,System.Object]' + ',25)\n' + 'imG.Approx.Tests.Mutation.TargetTests+DistanceTo.should_not_throw_i' + 'f_dimensions_are_identical\n' + 'imG.Approx.Tests.Mutation.TargetTests+DistanceTo.should_throw_if_di' + 'mensions_are_different(imG.Approx.Components.Drawing)\n' + 'imG.Approx.Tests.Mutation.TargetTests+LoadImageData.should_load_dim' + 'ensions_from_image\n' + 'imG.Approx.Tests.Mutation.TargetTests+LoadImageData.should_load_ima' + 'ge_data\n' + 'imG.Approx.Tests.Mutation.TargetTests+LoadImageData.should_not_resi' + 'ze_if_image_dimensions_are_over_or_equal_to_maxDimension(100)\n' + 'imG.Approx.Tests.Mutation.TargetTests+LoadImageData.should_not_resi' + 'ze_if_image_dimensions_are_over_or_equal_to_maxDimension(50)\n' + 'imG.Approx.Tests.Mutation.TargetTests+LoadImageData.should_resize_i' + 'f_image_dimensions_are_over_maxDimension\n' + 'imG.Approx.Tests.Mutation.TargetTests+LoadImageData.should_set_rati' + 'o_to_correct_value_when_loading(10,0.2f)\n' + 'imG.Approx.Tests.Mutation.TargetTests+LoadImageData.should_set_rati' + 'o_to_correct_value_when_loading(25,0.5f)\n' + 'imG.Approx.Tests.Mutation.TargetTests+LoadImageData.should_set_rati' + 'o_to_correct_value_when_loading(50,1.0f)\n' + 'imG.Approx.Tests.Mutation.TargetTests+LoadImageData.should_set_rati' + 'o_to_correct_value_when_loading(99,1.0f)\n' + 'imG.Approx.Tests.Mutation.TargetTests+Name.should_return_filename\n' + 'imG.Approx.Tests.Tools.TestValues+Clamp.should_return_max_value_bet' + 'ween_original_and_min_value(1,0,1)\n' + 'imG.Approx.Tests.Tools.TestValues+Clamp.should_return_max_value_bet' + 'ween_original_and_min_value(1,1,1)\n' + 'imG.Approx.Tests.Tools.TestValues+Clamp.should_return_max_value_bet' + 'ween_original_and_min_value(1,10,10)\n' + 'imG.Approx.Tests.Tools.TestValues+Clamp.should_return_min_value_bet' + 'ween_original_and_max_value(1,0,0)\n' + 'imG.Approx.Tests.Tools.TestValues+Clamp.should_return_min_value_bet' + 'ween_original_and_max_value(1,1,1)\n' + 'imG.Approx.Tests.Tools.TestValues+Clamp.should_return_min_value_bet' + 'ween_original_and_max_value(1,10,1)\n' + 'imG.Approx.Tests.Tools.TestValues+Clamp.should_throw_if_min_is_abov' + 'e_max\n' + 'imG.Approx.Tests.Tools.TestValues+Wrap.should_throw_if_min_is_above' + '_max\n' + 'imG.Approx.Tests.Tools.TestValues+Wrap.should_wrap_back_to_range(-1' + '01,10,20,19)\n' + 'imG.Approx.Tests.Tools.TestValues+Wrap.should_wrap_back_to_range(10' + ',10,25,10)\n' + 'imG.Approx.Tests.Tools.TestValues+Wrap.should_wrap_back_to_range(10' + '1,10,20,11)\n' + 'imG.Approx.Tests.Tools.TestValues+Wrap.should_wrap_back_to_range(16' + ',10,25,16)\n' + 'imG.Approx.Tests.Tools.TestValues+Wrap.should_wrap_back_to_range(25' + ',10,25,10)' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-correct2.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-correct2.junit-xml new file mode 100644 index 0000000..031729c --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-correct2.junit-xml @@ -0,0 +1,767 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-correct2.results b/python/test/files/nunit/nunit3/jenkins/NUnit-correct2.results new file mode 100644 index 0000000..1c398ea --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-correct2.results @@ -0,0 +1,4131 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=102, + suite_tests=218, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Clone', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.AmountTests+' + 'MutableComponents', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.' + 'should_change_value_in_range', + tests=5, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.AmountTests+' + 'RandomizeValues', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.AngleTests+Clone', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.AngleTests+' + 'MutableComponents', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.' + 'should_wrap_by_a_value_between_1_and_max_value_in_both_directions', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.AngleTests+RandomizeValue', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+Clone', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+Constructor', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+' + 'ImplicitConversionToDrawingColor', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+' + 'MutableComponents', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeAlpha', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeBlue', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeGreen', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeRed', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeValues', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Clone', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+' + 'MutableComponents', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.' + 'should_change_value_in_range', + tests=5, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+' + 'RandomizeValues', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.PositionTests+Clone', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.PositionTests+' + 'MutableComponents', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.' + 'should_change_value_in_range', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.BuildingBlocks.PositionTests+' + 'RandomizeValues', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.DrawingTest+Clone', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.DrawingTest+Constructor', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.DrawingTest+Draw', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.DrawingTest+MutableComponents', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_different_object', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.AreaTests+Draw', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.AreaTests+InitializeComponents', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_components', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_different_object', + tests=6, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.BezierTests+Draw', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.BezierTests+InitializeComponents', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_components', + tests=6, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_different_object', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.BlobTests+Draw', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.BlobTests+InitializeComponents', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_components', + tests=5, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_different_object', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.CircleTests+Draw', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.CircleTests+InitializeComponents', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_components', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.Factories.ConcreteFactory.' + 'ShapeFactoryTests+GetShape', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.Factories.ConcreteFactory.' + 'ShapeFactoryTests+Name', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+ActiveFactories', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+Disable', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+DisableAll', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+Enable', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+EnableAll', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+Register', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+RegisterAllFactories', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_different_object', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.LineTests+Draw', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.LineTests+InitializeComponents', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_components', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_different_object', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.PolygonTests+Draw', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.PolygonTests+InitializeComponents', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_components', + tests=5, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_different_object', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.RectangleTests+Draw', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.RectangleTests+' + 'InitializeComponents', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_components', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_different_object', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.TriangleTests+Draw', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.TriangleTests+InitializeComponents', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Components.Shapes.should_return_components', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.' + 'should_return_mutation_description_determined_by_random_provider', + tests=6, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation', + tests=7, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.MutagenTests+GetMutationsFor', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.MutagenTests+NoOpDescription', + tests=5, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.should_throw_if_any_component_is_null', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.MutagenTests+SelectMutation', + tests=5, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+' + 'DeclareMutation', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.should_return_list_of_descriptions_for_type', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+For', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+' + 'RegisterAllMutations', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.MutationDescriptionTests+CanMutate', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.should_refuse_odds_that_are_not_positive', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.MutationDescriptionTests+' + 'GetMutationTargetType', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.MutationDescriptionTests+Mutate', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.should_throw_if_any_argument_is_null', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.' + 'should_always_keep_best_drawing_according_to_distance', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.ProcessTests+Mutate', + tests=6, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.ProcessTests+SetupDrawing', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.RandomizationProviderTests+Constructor', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.RandomizationProviderTests+Next', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.should_keep_initialized_data', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.should_throw_if_dimensions_are_different', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.TargetTests+DistanceTo', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.' + 'should_not_resize_if_image_dimensions_are_over_or_equal_to_maxDimensi' + 'on', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.' + 'should_set_ratio_to_correct_value_when_loading', + tests=4, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.TargetTests+LoadImageData', + tests=9, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Mutation.TargetTests+Name', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Tools.' + 'should_return_max_value_between_original_and_min_value', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Tools.' + 'should_return_min_value_between_original_and_max_value', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Tools.TestValues+Clamp', + tests=7, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Tools.should_wrap_back_to_range', + tests=5, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='imG.Approx.Tests.Tools.TestValues+Wrap', + tests=6, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Clone.' + 'should_return_different_object', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.012 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Clone.' + 'should_return_same_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AmountTests+' + 'MutableComponents.should_return_empty', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Nudge.' + 'should_change_value_in_range(100,-2,8)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Nudge.' + 'should_change_value_in_range(100,-100,1)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Nudge.' + 'should_change_value_in_range(100,-10,1)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Nudge.' + 'should_change_value_in_range(1000,1000,321)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AmountTests+Nudge.' + 'should_change_value_in_range(100,2,12)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AmountTests+' + 'RandomizeValues.should_randomize_value_in_range', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AngleTests+Clone.' + 'should_return_different_object', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AngleTests+Clone.' + 'should_return_same_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AngleTests+' + 'MutableComponents.should_return_empty', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AngleTests+Nudge.' + 'should_wrap_by_a_value_between_1_and_max_value_in_both_directions(10,' + '40,False,330)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AngleTests+Nudge.' + 'should_wrap_by_a_value_between_1_and_max_value_in_both_directions(10,' + '355,True,5)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AngleTests+Nudge.' + 'should_wrap_by_a_value_between_1_and_max_value_in_both_directions(10,' + '6,False,4)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AngleTests+Nudge.' + 'should_wrap_by_a_value_between_1_and_max_value_in_both_directions(10,' + '40,True,50)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.AngleTests+RandomizeValue.' + 'should_select_a_value_between_0_and_360', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+Clone.' + 'should_clone_component', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+Clone.' + 'should_copy_values', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+Constructor.' + 'should_initialize_data', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+' + 'ImplicitConversionToDrawingColor.should_convert_to_drawing_color', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+' + 'MutableComponents.should_not_contain_anything', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeAlpha.' + 'should_randomize_alpha_in_range', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeBlue.' + 'should_randomize_alpha_in_range', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeGreen.' + 'should_randomize_alpha_in_range', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+RandomizeRed.' + 'should_randomize_alpha_in_range', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.ColorTests+' + 'RandomizeValues.should_randomize_colors_in_the_range', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Clone.' + 'should_return_different_object', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Clone.' + 'should_return_same_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+' + 'MutableComponents.should_return_empty', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Nudge.' + 'should_change_value_in_range(100,-100,1)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Nudge.' + 'should_change_value_in_range(100,10,16)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Nudge.' + 'should_change_value_in_range(100,2,12)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Nudge.' + 'should_change_value_in_range(100,-2,8)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+Nudge.' + 'should_change_value_in_range(100,-10,1)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PenSizeTests+' + 'RandomizeValues.should_randomize_value_in_range', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PositionTests+Clone.' + 'should_return_different_object', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PositionTests+Clone.' + 'should_return_same_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PositionTests+' + 'MutableComponents.should_return_empty', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PositionTests+Nudge.' + 'should_change_value_in_range(10,10,1,True,11,11)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PositionTests+Nudge.' + 'should_change_value_in_range(10,10,100,False,0,0)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PositionTests+Nudge.' + 'should_change_value_in_range(10,10,1,False,9,9)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PositionTests+Nudge.' + 'should_change_value_in_range(10,10,1000,True,321,654)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.BuildingBlocks.PositionTests+' + 'RandomizeValues.should_return_value_inside_target_limits', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.DrawingTest+Clone.should_clone_all_shapes', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.DrawingTest+Clone.' + 'should_clone_inner_components', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.DrawingTest+Clone.should_copy_properties', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.DrawingTest+Clone.' + 'should_create_clone_of_target', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.DrawingTest+Constructor.should_keep_data', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.DrawingTest+Draw.should_draw_all_shapes', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.009 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.DrawingTest+Draw.' + 'should_fill_image_with_background_color', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.DrawingTest+Draw.' + 'should_return_correct_size_image', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.DrawingTest+MutableComponents.' + 'should_contain_all_shapes', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.DrawingTest+MutableComponents.' + 'should_contain_color', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.AreaTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Area,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.AreaTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Area,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.AreaTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Area,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.AreaTests+Draw.should_draw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.AreaTests+InitializeComponents.' + 'should_randomize_elements', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.AreaTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Area,imG.' + 'Approx.Components.BuildingBlocks.Color)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.AreaTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Area,imG.' + 'Approx.Components.BuildingBlocks.Angle)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.AreaTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Area,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Bezier,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Bezier,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Bezier,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Bezier,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Bezier,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Bezier,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+Draw.should_draw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+InitializeComponents.' + 'should_randomize_elements', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Bezier,imG.' + 'Approx.Components.BuildingBlocks.Color)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Bezier,imG.' + 'Approx.Components.BuildingBlocks.PenSize)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Bezier,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Bezier,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Bezier,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BezierTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Bezier,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BlobTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Blob,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BlobTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Blob,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BlobTests+Draw.should_draw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BlobTests+InitializeComponents.' + 'should_randomize_elements', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BlobTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Blob,imG.' + 'Approx.Components.BuildingBlocks.Color)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BlobTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Blob,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BlobTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Blob,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BlobTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Blob,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.BlobTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Blob,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.CircleTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Circle,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.CircleTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Circle,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.CircleTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Circle,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.CircleTests+Draw.should_draw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.CircleTests+InitializeComponents.' + 'should_randomize_elements', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.CircleTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Circle,imG.' + 'Approx.Components.BuildingBlocks.Color)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.CircleTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Circle,imG.' + 'Approx.Components.BuildingBlocks.Amount)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.CircleTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Circle,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.Factories.ConcreteFactory.' + 'ShapeFactoryTests+GetShape.should_return_shape', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.Factories.ConcreteFactory.' + 'ShapeFactoryTests+Name.should_return_name_by_default', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+ActiveFactories.' + 'should_return_only_active_factories', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+Disable.should_enable_factories_named', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.011 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+DisableAll.should_disable_all_factories', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+Enable.should_enable_factories_named', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+EnableAll.should_enable_all_factories', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+Register.should_add_factory', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.Factories.' + 'ShapeFactoryCatalogTests+RegisterAllFactories.' + 'should_register_all_factories', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.504 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.LineTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Line,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.LineTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Line,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.LineTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Line,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.LineTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Line,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.LineTests+Draw.should_draw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.LineTests+InitializeComponents.' + 'should_randomize_elements', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.LineTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Line,imG.' + 'Approx.Components.BuildingBlocks.Color)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.LineTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Line,imG.' + 'Approx.Components.BuildingBlocks.PenSize)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.LineTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Line,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.LineTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Line,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.PolygonTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Polygon,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.PolygonTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Polygon,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.PolygonTests+Draw.should_draw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.PolygonTests+InitializeComponents.' + 'should_randomize_elements', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.PolygonTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Polygon,imG.' + 'Approx.Components.BuildingBlocks.Color)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.PolygonTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Polygon,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.PolygonTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Polygon,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.PolygonTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Polygon,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.PolygonTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Polygon,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.RectangleTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Rectangle,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.RectangleTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Rectangle,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.RectangleTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Rectangle,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.RectangleTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Rectangle,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.RectangleTests+Draw.should_draw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.RectangleTests+' + 'InitializeComponents.should_randomize_elements', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.RectangleTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Rectangle,imG.' + 'Approx.Components.BuildingBlocks.Color)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.RectangleTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Rectangle,imG.' + 'Approx.Components.BuildingBlocks.Amount)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.RectangleTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Rectangle,imG.' + 'Approx.Components.BuildingBlocks.Amount)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.RectangleTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Rectangle,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.TriangleTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Triangle,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.TriangleTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Triangle,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.TriangleTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Triangle,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.TriangleTests+Clone.' + 'should_return_different_object(System.Func`2[imG.Approx.Components.' + 'Shapes.Triangle,System.Object])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.TriangleTests+Draw.should_draw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.TriangleTests+' + 'InitializeComponents.should_randomize_elements', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.TriangleTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Triangle,imG.' + 'Approx.Components.BuildingBlocks.Color)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.TriangleTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Triangle,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.TriangleTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Triangle,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Components.Shapes.TriangleTests+MutableComponents.' + 'should_return_components(imG.Approx.Components.Shapes.Triangle,imG.' + 'Approx.Components.BuildingBlocks.Position)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.' + 'should_return_mutation_description_determined_by_random_provider(' + 'System.Collections.Generic.Dictionary`2[imG.Approx.Mutation.' + 'IMutationDescription,System.Collections.Generic.List`1[imG.Approx.' + 'Mutation.IMutable]],Castle.Proxies.IMutationDescriptionProxy,Castle.' + 'Proxies.IMutableProxy,0,0)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.009 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.' + 'should_return_mutation_description_determined_by_random_provider(' + 'System.Collections.Generic.Dictionary`2[imG.Approx.Mutation.' + 'IMutationDescription,System.Collections.Generic.List`1[imG.Approx.' + 'Mutation.IMutable]],Castle.Proxies.IMutationDescriptionProxy,Castle.' + 'Proxies.IMutableProxy,6,0)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.' + 'should_return_mutation_description_determined_by_random_provider(' + 'System.Collections.Generic.Dictionary`2[imG.Approx.Mutation.' + 'IMutationDescription,System.Collections.Generic.List`1[imG.Approx.' + 'Mutation.IMutable]],Castle.Proxies.IMutationDescriptionProxy,Castle.' + 'Proxies.IMutableProxy,0,1)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.' + 'should_return_mutation_description_determined_by_random_provider(' + 'System.Collections.Generic.Dictionary`2[imG.Approx.Mutation.' + 'IMutationDescription,System.Collections.Generic.List`1[imG.Approx.' + 'Mutation.IMutable]],Castle.Proxies.IMutationDescriptionProxy,Castle.' + 'Proxies.IMutableProxy,6,1)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.' + 'should_return_mutation_description_determined_by_random_provider(' + 'System.Collections.Generic.Dictionary`2[imG.Approx.Mutation.' + 'IMutationDescription,System.Collections.Generic.List`1[imG.Approx.' + 'Mutation.IMutable]],Castle.Proxies.IMutationDescriptionProxy,Castle.' + 'Proxies.IMutableProxy,7,0)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.' + 'should_return_mutation_description_determined_by_random_provider(' + 'System.Collections.Generic.Dictionary`2[imG.Approx.Mutation.' + 'IMutationDescription,System.Collections.Generic.List`1[imG.Approx.' + 'Mutation.IMutable]],Castle.Proxies.IMutationDescriptionProxy,Castle.' + 'Proxies.IMutableProxy,7,1)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+ChooseMutation.' + 'should_return_null_if_no_mutation_exists', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+GetMutationsFor.' + 'should_return_active_and_applicable_and_selectable_mutations', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+GetMutationsFor.' + 'should_return_empty_if_mutable_is_unknown', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+GetMutationsFor.' + 'should_return_mutations_recursively', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+NoOpDescription.' + 'should_always_have_occasions_to_mutate', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+NoOpDescription.' + 'should_always_mutate_without_doing_anything_to_the_target', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+NoOpDescription.' + 'should_always_target_IMutableType', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+NoOpDescription.' + 'should_be_always_able_to_mutate', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+NoOpDescription.' + 'should_be_always_active', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+SelectMutation.' + 'should_throw_if_any_component_is_null(null,Castle.Proxies.' + 'IMutableProxy)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+SelectMutation.' + 'should_throw_if_any_component_is_null(imG.Approx.Mutation.Process,' + 'null)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+SelectMutation.' + 'should_return_a_mutation', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+SelectMutation.' + 'should_return_matching_selected_mutation', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutagenTests+SelectMutation.' + 'should_return_the_default_mutation_if_no_mutation_exists', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+' + 'DeclareMutation.should_add_description_to_catalog', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+' + 'DeclareMutation.' + 'should_throw_when_the_same_description_is_declared_twice', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+For.' + 'should_return_list_of_descriptions_for_type(imG.Approx.Tests.' + 'Mutation.MutableAndDescription.Mutable1,imG.Approx.Mutation.' + 'MutationDescription`1[imG.Approx.Tests.Mutation.' + 'MutableAndDescription.Mutable1],imG.Approx.Mutation.' + 'MutationDescription`1[imG.Approx.Tests.Mutation.' + 'MutableAndDescription.Mutable2])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+For.' + 'should_return_list_of_descriptions_for_type(imG.Approx.Tests.' + 'Mutation.MutableAndDescription.Mutable2,imG.Approx.Mutation.' + 'MutationDescription`1[imG.Approx.Tests.Mutation.' + 'MutableAndDescription.Mutable2],imG.Approx.Mutation.' + 'MutationDescription`1[imG.Approx.Tests.Mutation.' + 'MutableAndDescription.Mutable3])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+For.' + 'should_return_list_of_descriptions_for_type(imG.Approx.Tests.' + 'Mutation.MutableAndDescription.Mutable3,imG.Approx.Mutation.' + 'MutationDescription`1[imG.Approx.Tests.Mutation.' + 'MutableAndDescription.Mutable3],imG.Approx.Mutation.' + 'MutationDescription`1[imG.Approx.Tests.Mutation.' + 'MutableAndDescription.Mutable1])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+For.' + 'should_return_empty_list_for_unknown_mutable_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutationDescriptionCatalogTest+' + 'RegisterAllMutations.' + 'should_register_all_mutations_declared_by_registrars', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.204 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutationDescriptionTests+CanMutate.' + 'lambda_is_called_when_checking', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutationDescriptionTests+Constructor.' + 'should_refuse_odds_that_are_not_positive(-1000)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutationDescriptionTests+Constructor.' + 'should_refuse_odds_that_are_not_positive(0)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutationDescriptionTests+Constructor.' + 'should_refuse_odds_that_are_not_positive(-1)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutationDescriptionTests+' + 'GetMutationTargetType.should_return_type_of_generic', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.MutationDescriptionTests+Mutate.' + 'lambda_is_called_when_mutating', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.ProcessTests+Constructor.' + 'should_throw_if_any_argument_is_null(null,Castle.Proxies.' + 'IMutationDescriptionCatalogProxy,Castle.Proxies.ITargetProxy,Castle.' + 'Proxies.IShapeFactoryCatalogProxy)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.ProcessTests+Constructor.' + 'should_throw_if_any_argument_is_null(Castle.Proxies.' + 'IRandomizationProviderProxy,null,Castle.Proxies.ITargetProxy,Castle.' + 'Proxies.IShapeFactoryCatalogProxy)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.ProcessTests+Constructor.' + 'should_throw_if_any_argument_is_null(Castle.Proxies.' + 'IRandomizationProviderProxy,Castle.Proxies.' + 'IMutationDescriptionCatalogProxy,null,Castle.Proxies.' + 'IShapeFactoryCatalogProxy)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.ProcessTests+Constructor.' + 'should_throw_if_any_argument_is_null(Castle.Proxies.' + 'IRandomizationProviderProxy,Castle.Proxies.' + 'IMutationDescriptionCatalogProxy,Castle.Proxies.ITargetProxy,null)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.ProcessTests+Mutate.' + 'should_always_keep_best_drawing_according_to_distance(False)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.ProcessTests+Mutate.' + 'should_always_keep_best_drawing_according_to_distance(True)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.ProcessTests+Mutate.' + 'should_increase_evolutions_when_drawing_is_better', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.ProcessTests+Mutate.' + 'should_increase_generation_number', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.ProcessTests+Mutate.' + 'should_trigger_event_when_drawing_is_better', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.ProcessTests+Mutate.' + 'should_trigger_event_when_drawing_is_worse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.ProcessTests+SetupDrawing.' + 'should_compute_the_distance_only_the_first_time', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.ProcessTests+SetupDrawing.' + 'should_create_drawing_based_on_target', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.ProcessTests+SetupDrawing.' + 'should_create_the_drawing_only_the_first_time', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.RandomizationProviderTests+Constructor.' + 'should_keep_the_seed', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.RandomizationProviderTests+Next.' + 'should_return_integer', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+Constructor.' + 'should_keep_initialized_data(System.Func`2[imG.Approx.Mutation.' + 'Target,System.Object],25)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.009 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+Constructor.' + 'should_keep_initialized_data(System.Func`2[imG.Approx.Mutation.' + 'Target,System.Object],"data\\\\red.png")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+DistanceTo.' + 'should_throw_if_dimensions_are_different(imG.Approx.Components.' + 'Drawing)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+DistanceTo.' + 'should_throw_if_dimensions_are_different(imG.Approx.Components.' + 'Drawing)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+DistanceTo.' + 'should_throw_if_dimensions_are_different(imG.Approx.Components.' + 'Drawing)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+DistanceTo.' + 'should_not_throw_if_dimensions_are_identical', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+LoadImageData.' + 'should_not_resize_if_image_dimensions_are_over_or_equal_to_maxDimensi' + 'on(100)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+LoadImageData.' + 'should_not_resize_if_image_dimensions_are_over_or_equal_to_maxDimensi' + 'on(50)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+LoadImageData.' + 'should_set_ratio_to_correct_value_when_loading(10,0.2f)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+LoadImageData.' + 'should_set_ratio_to_correct_value_when_loading(25,0.5f)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+LoadImageData.' + 'should_set_ratio_to_correct_value_when_loading(99,1.0f)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+LoadImageData.' + 'should_set_ratio_to_correct_value_when_loading(50,1.0f)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+LoadImageData.' + 'should_load_dimensions_from_image', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+LoadImageData.' + 'should_load_image_data', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+LoadImageData.' + 'should_resize_if_image_dimensions_are_over_maxDimension', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Mutation.TargetTests+Name.should_return_filename', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Tools.TestValues+Clamp.' + 'should_return_max_value_between_original_and_min_value(1,1,1)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Tools.TestValues+Clamp.' + 'should_return_max_value_between_original_and_min_value(1,0,1)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Tools.TestValues+Clamp.' + 'should_return_max_value_between_original_and_min_value(1,10,10)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Tools.TestValues+Clamp.' + 'should_return_min_value_between_original_and_max_value(1,10,1)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Tools.TestValues+Clamp.' + 'should_return_min_value_between_original_and_max_value(1,1,1)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Tools.TestValues+Clamp.' + 'should_return_min_value_between_original_and_max_value(1,0,0)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Tools.TestValues+Clamp.' + 'should_throw_if_min_is_above_max', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Tools.TestValues+Wrap.should_wrap_back_to_range(-' + '101,10,20,19)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Tools.TestValues+Wrap.should_wrap_back_to_range(16,' + '10,25,16)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Tools.TestValues+Wrap.should_wrap_back_to_range(10,' + '10,25,10)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Tools.TestValues+Wrap.should_wrap_back_to_range(25,' + '10,25,10)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Tools.TestValues+Wrap.should_wrap_back_to_range(101,' + '10,20,11)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct2.xml', + test_file=None, + line=None, + class_name='', + test_name='imG.Approx.Tests.Tools.TestValues+Wrap.' + 'should_throw_if_min_is_above_max', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-correct2.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-correct2.xml new file mode 100644 index 0000000..7e70f8b --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-correct2.xml @@ -0,0 +1,768 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-correct3.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-correct3.annotations new file mode 100644 index 0000000..7984814 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-correct3.annotations @@ -0,0 +1,67 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 22 tests pass in 4m 24s', + 'summary': + '22 tests\u2002\u2003\u200322 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '4m 24s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '13 suites\u2003\u2003\u205f\u20040 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20040 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/12MOw6AIBAFr' + '0KoLRSNhZcxBDVuFDELVMa7uwh+uzfzktn4AHNvecOKjHHrwUUoiTqP0oFZiEVdkaDPhV' + 'eIC1rrlfqZCVYy+S0GCfNH9IgGk0G/3MWwP8Eont7Jr9zJ75oyWoMjSIvZUfL9APCIHb/' + 'kAAAA\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 22 tests, see "Raw output" for the full list of tests.', + 'title': '22 tests found', + 'raw_details': + 'BlogEngine.Tests.Account.Login.InvalidLoginShouldFail\n' + 'BlogEngine.Tests.Account.Login.ValidLoginShouldPass\n' + 'BlogEngine.Tests.Comments.Comment.CanAddUpdateAndDeleteComment\n' + 'BlogEngine.Tests.FileSystem.Crud.CanWriteAndReadAppCodeDirectory\n' + 'BlogEngine.Tests.FileSystem.Crud.CanWriteAndReadAppDataDirectory\n' + 'BlogEngine.Tests.Navigation.CustomPages.CanNavigateToCustomAspxPage' + '\n' + 'BlogEngine.Tests.Navigation.CustomPages.CanNavigateToDefaultAspxPag' + 'eInSubDiretory\nBlogEngine.Tests.Navigation.SubBlog.MyTest\n' + 'BlogEngine.Tests.Navigation.SubBlogAggregation.MyTest\n' + 'BlogEngine.Tests.Packaging.Installer.CanInstallAndUninstallTheme\n' + 'BlogEngine.Tests.Posts.Post.CanCreateAndDeletePost\n' + 'BlogEngine.Tests.QuickNotes.Crud.ShouldBeAbleToCreateUpdateAndDelet' + 'eNote\n' + 'BlogEngine.Tests.QuickNotes.Navigation.AdminShouldSeeQuickNotesPane' + 'l\n' + 'BlogEngine.Tests.QuickNotes.Navigation.AnonymousUserShouldNotSeeQui' + 'ckNotesPanel\n' + 'BlogEngine.Tests.QuickNotes.Navigation.ShouldBeAbleBrowseThroughTab' + 's\nBlogEngine.Tests.QuickNotes.Posting.PublishQuickNoteAsPost\n' + 'BlogEngine.Tests.Quixote.Runner.RunAvatarTests\n' + 'BlogEngine.Tests.Quixote.Runner.RunPackagingTests\n' + 'BlogEngine.Tests.Quixote.Runner.RunPagerTests\n' + 'BlogEngine.Tests.Quixote.Runner.RunUrlRewriteNoExtensionsTests\n' + 'BlogEngine.Tests.Quixote.Runner.RunUrlRewriteTests\n' + 'BlogEngine.Tests.Users.AuthorProfile.CanAddUpdateAndDeleteUserProfi' + 'le' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-correct3.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-correct3.junit-xml new file mode 100644 index 0000000..a069bcd --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-correct3.junit-xml @@ -0,0 +1,170 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-correct3.results b/python/test/files/nunit/nunit3/jenkins/NUnit-correct3.results new file mode 100644 index 0000000..6a9c1f3 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-correct3.results @@ -0,0 +1,420 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=13, + suite_tests=22, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=264, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='BlogEngine.Tests.Account.Login', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='BlogEngine.Tests.Comments.Comment', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='BlogEngine.Tests.FileSystem.Crud', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='BlogEngine.Tests.Navigation.CustomPages', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='BlogEngine.Tests.Navigation.SubBlog', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='BlogEngine.Tests.Navigation.SubBlogAggregation', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='BlogEngine.Tests.Packaging.Installer', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='BlogEngine.Tests.Posts.Post', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='BlogEngine.Tests.QuickNotes.Crud', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='BlogEngine.Tests.QuickNotes.Navigation', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='BlogEngine.Tests.QuickNotes.Posting', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='BlogEngine.Tests.Quixote.Runner', + tests=5, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='BlogEngine.Tests.Users.AuthorProfile', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Account.Login.InvalidLoginShouldFail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.219 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Account.Login.ValidLoginShouldPass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.047 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Comments.Comment.CanAddUpdateAndDeleteComment', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=16.392 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.FileSystem.Crud.CanWriteAndReadAppCodeDirectory', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=60.395 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.FileSystem.Crud.CanWriteAndReadAppDataDirectory', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.016 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Navigation.CustomPages.CanNavigateToCustomAspxPage', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.578 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Navigation.CustomPages.' + 'CanNavigateToDefaultAspxPageInSubDiretory', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.375 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Navigation.SubBlog.MyTest', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Navigation.SubBlogAggregation.MyTest', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Packaging.Installer.CanInstallAndUninstallTheme', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=51.051 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Posts.Post.CanCreateAndDeletePost', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=33.346 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.QuickNotes.Crud.' + 'ShouldBeAbleToCreateUpdateAndDeleteNote', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.203 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.QuickNotes.Navigation.AdminShouldSeeQuickNotesPanel', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.719 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.QuickNotes.Navigation.' + 'AnonymousUserShouldNotSeeQuickNotesPanel', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.063 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.QuickNotes.Navigation.ShouldBeAbleBrowseThroughTabs', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.032 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.QuickNotes.Posting.PublishQuickNoteAsPost', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=10.219 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Quixote.Runner.RunAvatarTests', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.813 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Quixote.Runner.RunPackagingTests', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=16.204 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Quixote.Runner.RunPagerTests', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=23.095 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Quixote.Runner.RunUrlRewriteNoExtensionsTests', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.188 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Quixote.Runner.RunUrlRewriteTests', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-correct3.xml', + test_file=None, + line=None, + class_name='', + test_name='BlogEngine.Tests.Users.AuthorProfile.CanAddUpdateAndDeleteUserProfile', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=22.049 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-correct3.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-correct3.xml new file mode 100644 index 0000000..8208041 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-correct3.xml @@ -0,0 +1,171 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-failure.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-failure.annotations new file mode 100644 index 0000000..ab57926 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-failure.annotations @@ -0,0 +1,60 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail, 2 pass in 0s', + 'summary': + '3 tests\u2002\u2003\u20032 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MOw6AIBBEr' + '0KoLfx0XoYQ1LhRwCxQGe/uykfo5s1M3s03OFfHZzZ0jLsA/ocloPRgDWFPSIP/pqlk4Y' + 'JSVIy1OOBq32KTcGZbKlZEi/mCwRTfF1td4mqL3Mgity5ltQZPkBNzu+TPC/n9SCLdAAA' + 'A\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-failure.xml\u2003[took 0s]', + 'title': 'UnitTests.MainClassTest.TestFailure failed', + 'raw_details': + ' Expected failure\n Expected: 30\n But was: 20\n at ' + 'UnitTests.MainClassTest.TestFailure () [0x00000] \n at <0x00000> ' + '\n at (wrapper managed-to-native) ' + 'System.Reflection.MonoMethod:InternalInvoke (object,object[])\n ' + 'at System.Reflection.MonoMethod.Invoke (System.Object obj, ' + 'BindingFlags invokeAttr, System.Reflection.Binder binder, ' + 'System.Object[] parameters, System.Globalization.CultureInfo ' + 'culture) [0x00000]' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 3 tests, see "Raw output" for the full list of tests.', + 'title': '3 tests found', + 'raw_details': + 'UnitTests.MainClassTest.TestFailure\n' + 'UnitTests.MainClassTest.TestMethodUpdateValue\n' + 'UnitTests.MainClassTest.TestPropertyValue' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-failure.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-failure.junit-xml new file mode 100644 index 0000000..3edf7e7 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-failure.junit-xml @@ -0,0 +1,26 @@ + + + + + + + + + + + + + + at UnitTests.MainClassTest.TestFailure () [0x00000] + at <0x00000> <unknown method> + at (wrapper managed-to-native) System.Reflection.MonoMethod:InternalInvoke (object,object[]) + at System.Reflection.MonoMethod.Invoke (System.Object obj, BindingFlags invokeAttr, System.Reflection.Binder binder, System.Object[] parameters, System.Globalization.CultureInfo culture) [0x00000] + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-failure.results b/python/test/files/nunit/nunit3/jenkins/NUnit-failure.results new file mode 100644 index 0000000..b9a0357 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-failure.results @@ -0,0 +1,67 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=3, + suite_skipped=0, + suite_failures=1, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='UnitTests.MainClassTest', + tests=3, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-failure.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-failure.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestMethodUpdateValue', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-failure.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestFailure', + result='failure', + message=' Expected failure\n Expected: 30\n But was: 20\n', + content=' at UnitTests.MainClassTest.TestFailure () [0x00000] \n at ' + '<0x00000> \n at (wrapper managed-to-native) ' + 'System.Reflection.MonoMethod:InternalInvoke (object,object[])\n at ' + 'System.Reflection.MonoMethod.Invoke (System.Object obj, BindingFlags ' + 'invokeAttr, System.Reflection.Binder binder, System.Object[] ' + 'parameters, System.Globalization.CultureInfo culture) [0x00000] \n', + stdout=None, + stderr=None, + time=0.092 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-failure.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-failure.xml new file mode 100644 index 0000000..d9c5be8 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-failure.xml @@ -0,0 +1,32 @@ + + + + + + + + + + + + + + + + + at (wrapper managed-to-native) System.Reflection.MonoMethod:InternalInvoke (object,object[]) + at System.Reflection.MonoMethod.Invoke (System.Object obj, BindingFlags invokeAttr, System.Reflection.Binder binder, System.Object[] parameters, System.Globalization.CultureInfo culture) [0x00000] +]]> + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-healthReport.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-healthReport.annotations new file mode 100644 index 0000000..e135167 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-healthReport.annotations @@ -0,0 +1,66 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail, 9 pass in 1s', + 'summary': + '10 tests\u2002\u2003\u20039 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '1s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20041 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MQQ6AIAwEv' + '0I4e9CjfoYQhNgoYAqcjH8XEaG3zu52Lm7g0IEvbBoYDwligzWhjOBdxVzEUo0/iJCUys' + 'ncgx3OHPSFkXDQf6ERPdYJJteE7019H3ddYWIrTGXKWwsxQ71Y2CS/HxbYkAffAAAA\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-healthReport.xml\u2003[took 0s]', + 'title': 'UnitTests.MainClassTest.TestFailure failed', + 'raw_details': + ' Expected failure\n Expected: 30\n But was: 20\n at ' + 'UnitTests.MainClassTest.TestFailure () [0x00000]\n at <0x00000> ' + '\n at (wrapper managed-to-native) ' + 'System.Reflection.MonoMethod:InternalInvoke (object,object[])\n ' + 'at System.Reflection.MonoMethod.Invoke (System.Object obj, ' + 'BindingFlags invokeAttr, System.Reflection.Binder binder, ' + 'System.Object[] parameters, System.Globalization.CultureInfo ' + 'culture) [0x00000]' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 10 tests, see "Raw output" for the full list of tests.', + 'title': '10 tests found', + 'raw_details': + 'UnitTests.MainClassTest.TestFailure\n' + 'UnitTests.MainClassTest.TestMethodUpdateValue\n' + 'UnitTests.MainClassTest.TestPropertyValue\n' + 'UnitTests.MainClassTest.TestPropertyValue1\n' + 'UnitTests.MainClassTest.TestPropertyValue2\n' + 'UnitTests.MainClassTest.TestPropertyValue3\n' + 'UnitTests.MainClassTest.TestPropertyValue4\n' + 'UnitTests.MainClassTest.TestPropertyValue5\n' + 'UnitTests.MainClassTest.TestPropertyValue6\n' + 'UnitTests.MainClassTest.TestPropertyValue7' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-healthReport.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-healthReport.junit-xml new file mode 100644 index 0000000..d80dd90 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-healthReport.junit-xml @@ -0,0 +1,33 @@ + + + + + + + + + + + + + + at UnitTests.MainClassTest.TestFailure () [0x00000] + at <0x00000> <unknown method> + at (wrapper managed-to-native) System.Reflection.MonoMethod:InternalInvoke (object,object[]) + at System.Reflection.MonoMethod.Invoke (System.Object obj, BindingFlags invokeAttr, System.Reflection.Binder binder, System.Object[] parameters, System.Globalization.CultureInfo culture) [0x00000] + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-healthReport.results b/python/test/files/nunit/nunit3/jenkins/NUnit-healthReport.results new file mode 100644 index 0000000..4aca78f --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-healthReport.results @@ -0,0 +1,158 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=10, + suite_skipped=0, + suite_failures=1, + suite_errors=0, + suite_time=1, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='UnitTests.MainClassTest', + tests=10, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-healthReport.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-healthReport.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestMethodUpdateValue', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-healthReport.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestFailure', + result='failure', + message=' Expected failure\n Expected: 30\n But was: 20\n', + content=' at UnitTests.MainClassTest.TestFailure () [0x00000]\n at ' + '<0x00000> \n at (wrapper managed-to-native) ' + 'System.Reflection.MonoMethod:InternalInvoke (object,object[])\n at ' + 'System.Reflection.MonoMethod.Invoke (System.Object obj, BindingFlags ' + 'invokeAttr, System.Reflection.Binder binder, System.Object[] ' + 'parameters, System.Globalization.CultureInfo culture) [0x00000]\n', + stdout=None, + stderr=None, + time=0.092 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-healthReport.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue1', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-healthReport.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue2', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-healthReport.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue3', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-healthReport.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue4', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-healthReport.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue5', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-healthReport.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue6', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-healthReport.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue7', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-healthReport.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-healthReport.xml new file mode 100644 index 0000000..4d0f8e9 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-healthReport.xml @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + at (wrapper managed-to-native) System.Reflection.MonoMethod:InternalInvoke (object,object[]) + at System.Reflection.MonoMethod.Invoke (System.Object obj, BindingFlags invokeAttr, System.Reflection.Binder binder, System.Object[] parameters, System.Globalization.CultureInfo culture) [0x00000] +]]> + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-ignored.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-ignored.annotations new file mode 100644 index 0000000..1ae3d02 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-ignored.annotations @@ -0,0 +1,56 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 1 tests pass, 2 skipped in 0s', + 'summary': + '3 tests\u2002\u2003\u20031 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20032 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MSw6AIAxEr' + '0JYu/Cz8zKGIMRGBdPCynh3KyKymzfTvlNa2AzJUXSNkBQhFJgjqgDeMbaMPIRnGr48Ud' + 'Q63+ZihYOLvhRWwVa/TwbRY24wus/3xFr38m9LXMkS1y7t9x0CQ06CFiWvGx5uWF7dAAA' + 'A\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 2 skipped tests, see "Raw output" for the full list of ' + 'skipped tests.', + 'title': '2 skipped tests found', + 'raw_details': + 'UnitTests.OtherMainClassTest.TestIgnored\n' + 'UnitTests.OtherMainClassTest.TestIgnoredWithText' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 3 tests, see "Raw output" for the full list of tests.', + 'title': '3 tests found', + 'raw_details': + 'UnitTests.OtherMainClassTest.TestIgnored\n' + 'UnitTests.OtherMainClassTest.TestIgnoredWithText\n' + 'UnitTests.OtherMainClassTest.TestPropertyValue' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-ignored.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-ignored.junit-xml new file mode 100644 index 0000000..c7f446e --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-ignored.junit-xml @@ -0,0 +1,24 @@ + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-ignored.results b/python/test/files/nunit/nunit3/jenkins/NUnit-ignored.results new file mode 100644 index 0000000..2ac5fad --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-ignored.results @@ -0,0 +1,62 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=3, + suite_skipped=2, + suite_failures=0, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='OtherMainClassTest', + tests=3, + skipped=2, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-ignored.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.OtherMainClassTest.TestIgnored', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-ignored.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.OtherMainClassTest.TestIgnoredWithText', + result='skipped', + message='Dont do this', + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-ignored.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.OtherMainClassTest.TestPropertyValue', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-ignored.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-ignored.xml new file mode 100644 index 0000000..60690e7 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-ignored.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue1077.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-issue1077.annotations new file mode 100644 index 0000000..8859c1d --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue1077.annotations @@ -0,0 +1,45 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 6 tests pass in 35s', + 'summary': + '6 tests\u2002\u2003\u20036 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '35s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMwQqAIBAFf' + '0U8dyiiDv1MiBktpcaqp+jf20pLb2/mwRx8hk05PrCmYtwF8B9MAYUHawjbjpgef3992q' + 'MLUpZihZ1E/YlZwFYIhWgxGgwm9e6Z517+aw9nsYfzlrRagyeIi7lF8PMC7eTeEN4AAAA' + '=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 6 tests, see "Raw output" for the full list of tests.', + 'title': '6 tests found', + 'raw_details': + 'testChangePassword\ntestChangePasswordFailEmptyForm\n' + 'testChangePasswordFailNewPasswordKO\n' + 'testChangePasswordFailNewPasswordNotRepeated\n' + 'testChangePasswordFailNewPasswordTooShort\n' + 'testChangePasswordFailOldPasswordKO' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue1077.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue1077.junit-xml new file mode 100644 index 0000000..7c4df81 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue1077.junit-xml @@ -0,0 +1,13 @@ + + + + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue1077.results b/python/test/files/nunit/nunit3/jenkins/NUnit-issue1077.results new file mode 100644 index 0000000..d9946bb --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue1077.results @@ -0,0 +1,101 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=6, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=35, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='AdministrationPasswordTest', + tests=6, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue1077.xml', + test_file=None, + line=None, + class_name='', + test_name='testChangePasswordFailEmptyForm', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.065 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue1077.xml', + test_file=None, + line=None, + class_name='', + test_name='testChangePasswordFailOldPasswordKO', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.066 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue1077.xml', + test_file=None, + line=None, + class_name='', + test_name='testChangePasswordFailNewPasswordTooShort', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.049 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue1077.xml', + test_file=None, + line=None, + class_name='', + test_name='testChangePasswordFailNewPasswordNotRepeated', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue1077.xml', + test_file=None, + line=None, + class_name='', + test_name='testChangePasswordFailNewPasswordKO', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.066 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue1077.xml', + test_file=None, + line=None, + class_name='', + test_name='testChangePassword', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=10.1 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue1077.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue1077.xml new file mode 100644 index 0000000..ef13506 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue1077.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue17521.exception b/python/test/files/nunit/nunit3/jenkins/NUnit-issue17521.exception new file mode 100644 index 0000000..9ef72ea --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue17521.exception @@ -0,0 +1 @@ +ParseError: file='files/nunit/nunit3/jenkins/NUnit-issue17521.xml', message='Char 0x0 out of allowed range, line 33, column 16 (NUnit-issue17521.xml, line 33)', line=None, column=None, exception=XMLSyntaxError('Char 0x0 out of allowed range, line 33, column 16') \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue17521.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue17521.xml new file mode 100644 index 0000000..a91fd60 Binary files /dev/null and b/python/test/files/nunit/nunit3/jenkins/NUnit-issue17521.xml differ diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue33493.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-issue33493.annotations new file mode 100644 index 0000000..7a7fadd --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue33493.annotations @@ -0,0 +1,62 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 1 tests pass, 1 skipped in 6s', + 'summary': + '1 files\u2004\u20032 suites\u2004\u2003\u20026s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '2 tests\u20031 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '1 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '0 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n2 runs\u2006\u2003' + '2 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '0 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMMQ6AIBAEv' + '0KoLdTCws8YghAvIpgDKuPfPREUu505mINrMMrzkXUN4z5CSNATzBFFAGcJB0I6hHJKe/' + 'JRyvwxixX2n9ACDIn2FQrRYTYYbends+Q+fmrlaR1LXLek2zYIBHkxvwh+XlEX1VPdAAA' + 'A\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'AddTwoNumbers' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 2 tests, see "Raw output" for the full list of tests.', + 'title': '2 tests found', + 'raw_details': + 'AddTwoNumbers\n' + 'SubSmokeTest("Geo","Geonw","dev1234567",System.String[])' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue33493.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue33493.junit-xml new file mode 100644 index 0000000..95adb0c --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue33493.junit-xml @@ -0,0 +1,70 @@ + + + + + + + + + + + + + + + Given I have entered 50 into the calculator +No matching step definition found for the step. Use the following code to create one: + [Given(@"I have entered (.*) into the calculator")] +public void GivenIHaveEnteredIntoTheCalculator(int p0) +{ + ScenarioContext.Current.Pending(); +} + +And I have entered 70 into the calculator +No matching step definition found for the step. Use the following code to create one: + [Given(@"I have entered (.*) into the calculator")] +public void GivenIHaveEnteredIntoTheCalculator(int p0) +{ + ScenarioContext.Current.Pending(); +} + +When I press add +No matching step definition found for the step. Use the following code to create one: + [When(@"I press add")] +public void WhenIPressAdd() +{ + ScenarioContext.Current.Pending(); +} + +Then the result should be 120 on the screen +No matching step definition found for the step. Use the following code to create one: + [Then(@"the result should be (.*) on the screen")] +public void ThenTheResultShouldBeOnTheScreen(int p0) +{ + ScenarioContext.Current.Pending(); +} + + + + + + + + + + Given I have an active Sub user Geo with username Geonw and password dev1234567 +done: LoginSteps.GivenIHaveAnActiveSubUserWithUsernameAndPassword("Geo", "Geonw", "dev1234567") (0.0s) +And he is on Sub login page +done: LoginSteps.GivenHeIsOnSubLoginPage() (0.6s) +When he logs in using his credentials +done: LoginSteps.WhenHeLogsInUsingHisCredentials() (1.8s) +Then he should land on the Accounts homepage +done: LoginSteps.ThenHeShouldLandOnTheAccountsHomePage() (0.3s) + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue33493.results b/python/test/files/nunit/nunit3/jenkins/NUnit-issue33493.results new file mode 100644 index 0000000..f61b5ee --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue33493.results @@ -0,0 +1,92 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=2, + suite_tests=2, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=6, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='Automation.Features.CMFeature', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='Automation.Features.SubFeature.SubSmokeTest', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue33493.xml', + test_file=None, + line=None, + class_name='', + test_name='AddTwoNumbers', + result='skipped', + message='No matching step definition found for one or more steps.\nusing ' + 'System;\nusing TechTalk.SpecFlow;\n\nnamespace MyNamespace\n{\n ' + '[Binding]\n public class StepDefinitions\n {\n ' + '[Given(@"I have entered (.*) into the calculator")]\npublic void ' + 'GivenIHaveEnteredIntoTheCalculator(int p0)\n{\n ' + 'ScenarioContext.Current.Pending();\n}\n\n [When(@"I press ' + 'add")]\npublic void WhenIPressAdd()\n{\n ' + 'ScenarioContext.Current.Pending();\n}\n\n [Then(@"the result ' + 'should be (.*) on the screen")]\npublic void ' + 'ThenTheResultShouldBeOnTheScreen(int p0)\n{\n ' + 'ScenarioContext.Current.Pending();\n}\n }\n}\n', + content=None, + stdout='Given I have entered 50 into the calculator\nNo matching step ' + 'definition found for the step. Use the following code to create one:\n' + ' [Given(@"I have entered (.*) into the calculator")]\npublic ' + 'void GivenIHaveEnteredIntoTheCalculator(int p0)\n{\n ' + 'ScenarioContext.Current.Pending();\n}\n\nAnd I have entered 70 into ' + 'the calculator\nNo matching step definition found for the step. Use ' + 'the following code to create one:\n [Given(@"I have entered ' + '(.*) into the calculator")]\npublic void ' + 'GivenIHaveEnteredIntoTheCalculator(int p0)\n{\n ' + 'ScenarioContext.Current.Pending();\n}\n\nWhen I press add\nNo ' + 'matching step definition found for the step. Use the following code ' + 'to create one:\n [When(@"I press add")]\npublic void ' + 'WhenIPressAdd()\n{\n ScenarioContext.Current.Pending();\n}\n\n' + 'Then the result should be 120 on the screen\nNo matching step ' + 'definition found for the step. Use the following code to create one:\n' + ' [Then(@"the result should be (.*) on the screen")]\npublic ' + 'void ThenTheResultShouldBeOnTheScreen(int p0)\n{\n ' + 'ScenarioContext.Current.Pending();\n}\n\n', + stderr=None, + time=0.186579 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue33493.xml', + test_file=None, + line=None, + class_name='', + test_name='SubSmokeTest("Geo","Geonw","dev1234567",System.String[])', + result='success', + message=None, + content=None, + stdout='Given I have an active Sub user Geo with username Geonw and password ' + 'dev1234567\ndone: ' + 'LoginSteps.GivenIHaveAnActiveSubUserWithUsernameAndPassword("Geo", ' + '"Geonw", "dev1234567") (0.0s)\nAnd he is on Sub login page\ndone: ' + 'LoginSteps.GivenHeIsOnSubLoginPage() (0.6s)\nWhen he logs in using ' + 'his credentials\ndone: LoginSteps.WhenHeLogsInUsingHisCredentials() ' + '(1.8s)\nThen he should land on the Accounts homepage\ndone: ' + 'LoginSteps.ThenHeShouldLandOnTheAccountsHomePage() (0.3s)\n', + stderr=None, + time=6.40203 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue33493.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue33493.xml new file mode 100644 index 0000000..27a88c6 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue33493.xml @@ -0,0 +1,121 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue44527.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-issue44527.annotations new file mode 100644 index 0000000..ec2363d --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue44527.annotations @@ -0,0 +1,5591 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '140 fail, 6 pass in 14m 11s', + 'summary': + '\u205f\u2004\u205f\u20041 files\u2004\u2003155 suites\u2004\u2003\u2002' + '14m 11s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '146 tests\u2003\u205f\u2004\u205f\u20046 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '140 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n150 runs\u2006\u2003\u205f\u2004\u205f\u2004' + '6 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '144 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02NSw6AIAwFr' + '0JYu9BEjPEyhqDExg+mwMp4dysfZdeZ175eXMM2Wz6wpmLcenARhCCcPEoH5iDRizen0I' + 'W47TKN1itFqhArnCTqT2gJWzj61YxoMC2hP+LLDGVl5L8x8FfYZlP2KbPv4AjSxOwi+f0' + 'AEAq2iOkAAAA=\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 31s]', + 'title': + 'UI_MyTask_MR_Grid_GridViewValidation(True,"chrome","/#/tasks/access' + '-certification/overview") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 0 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir4620_214" + "43}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "92de3e3859b5e9d2cb692461ba367ced\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.My_Tasks._My_Requests.Grid.GridValidation.UI" + "_MyTask_MR_Grid_GridViewValidation(Boolean excute, String " + "browserName, String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\My Requests\\Grid\\GridValidation.cs:line 29\n" + "--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 30s]', + 'title': + 'All 2 runs failed: ' + 'UI_MyTask_MR_Grid_Paging(True,"chrome","/#/tasks/access-certificati' + 'on/overview")', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks._My_Requests.Grid.GridValidation.UI' + '_MyTask_MR_Grid_Paging(Boolean excute, String browserName, String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\My Requests\\Grid\\GridValidation.cs:line 65' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 30s]', + 'title': + 'UI_MyTasks_MR_Paging_ShowPerPage(True,"chrome","/#/tasks/access-req' + 'uest/overview") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 0 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir10904_14" + "349}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "8d83262a43a60462d9eaed6fd8eec81c\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.My_Tasks._My_Requests.Paging.PagingValidatio" + "n.UI_MyTasks_MR_Paging_ShowPerPage(Boolean excute, String " + "browserName, String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\My Requests\\Paging\\PagingValidation.cs:line 30\n" + "--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_OE_Grid_GridViewValidation(True,"chrome","/#/tasks/access' + '-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks._Owner_Election.Grid.GridValidation' + '.UI_MyTask_OE_Grid_GridViewValidation(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Owner Election\\Grid\\GridValidation.cs:line 28' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTasks_OE_ViewOwnersElection("/#/tasks/owners-election/overview' + '") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at ' + 'MyCompanyUiSettings.Bl.OwnersElectionPage.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Owne' + 'rsElectionPage.cs:line 175\n at ' + 'MyCompanyUiSettings.Bl.OwnersElectionPage..ctor(IWebDriver driver) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Owne' + 'rsElectionPage.cs:line 145\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks._Owner_Election.Paging.PagingValida' + 'tion.UI_MyTasks_OE_ViewOwnersElection(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Owner Election\\Paging\\PagingValidation.cs:line 40' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 30s]', + 'title': + 'All 2 runs failed: ' + 'UI_MyTask_MR_Progress_Approve(True,"chrome","/#/tasks/access-certif' + 'ication/overview")', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 0 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir12612_29" + "006}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "d099a8dab51ddac1ad57f17fd01208dc\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.My_Tasks._Owner_Election.Progress.ProgressVa" + "lidation.UI_MyTask_MR_Progress_Approve(Boolean excute, String " + "browserName, String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Owner Election\\Progress\\ProgressValidation.cs:line 26\n" + "--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'All 2 runs failed: ' + 'UI_MyTask_MR_Progress_Reject(True,"chrome","/#/tasks/access-certifi' + 'cation/overview")', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks._Owner_Election.Progress.ProgressVa' + 'lidation.UI_MyTask_MR_Progress_Reject(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Owner Election\\Progress\\ProgressValidation.cs:line 74' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_ApproveAll_AddCommentYes_TC2689("/#/t' + 'asks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Approve_All.ApproveAll.UI_MyTask_AC' + '_ACIS_BulkActions_ApproveAll_AddCommentYes_TC2689(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Approve All\\ApproveAll.cs:line 29' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_ApproveAll_CommittedRecoredNotAffecte' + 'd_TC2691("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Approve_All.ApproveAll.UI_MyTask_AC' + '_ACIS_BulkActions_ApproveAll_CommittedRecoredNotAffected_TC2691(Str' + 'ing url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Approve All\\ApproveAll.cs:line 75' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_ApproveAll_WithExistingSaved_TC2690("' + '/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Approve_All.ApproveAll.UI_MyTask_AC' + '_ACIS_BulkActions_ApproveAll_WithExistingSaved_TC2690(String url) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Approve All\\ApproveAll.cs:line 47' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 11s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_Browsers_Chrome_TC2692("/#/tasks/acce' + 'ss-certification/overview") failed', + 'raw_details': + ' Expected: True\n But was: False\nat ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Browsers.Browsers.UI_MyTask_AC_ACIS' + '_BulkActions_Browsers_Chrome_TC2692(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Browsers\\Browsers.cs:line 41' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 6s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_ClearAll_ApproveAllNo_TC2707("/#/task' + 's/access-certification/overview") failed', + 'raw_details': + 'System.Exception : Base Class - Click(string xpath) method threw ' + 'an exception : \nunknown error: Element is not ' + 'clickable at point (80, 241). Other element would receive the ' + 'click: \n (Session info: chrome=58.0.3029.110)\n ' + '(Driver info: chromedriver=2.29.461591 ' + '(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT ' + '6.3.9600 x86_64) (WARNING: The server did not provide any ' + 'stacktrace information)\nCommand duration or timeout: 50 ' + 'milliseconds\nBuild info: version: \'3.1.0\', revision: ' + '\'86a5d70\', time: \'2017-02-16 07:57:44 -0800\'\nSystem info: ' + 'host: \'BRC-JENKINS2-AU\', ip: \'172.16.61.17\', os.name: ' + '\'Windows Server 2012 R2\', os.arch: \'x86\', os.version: \'6.3\', ' + 'java.version: \'1.8.0_66\'\nDriver info: ' + 'org.openqa.selenium.chrome.ChromeDriver\nCapabilities ' + '[{applicationCacheEnabled=false, rotatable=false, ' + 'mobileEmulationEnabled=false, networkConnectionEnabled=false, ' + 'chrome={chromedriverVersion=2.29.461591 ' + '(62ebf098771772160f391d75e589dc567915b233), ' + 'userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir2476_115' + '8}, takesHeapSnapshot=true, pageLoadStrategy=normal, ' + 'databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, ' + 'version=58.0.3029.110, platform=WIN8_1, ' + 'browserConnectionEnabled=false, nativeEvents=true, ' + 'acceptSslCerts=true, locationContextEnabled=true, ' + 'webStorageEnabled=true, browserName=chrome, takesScreenshot=true, ' + 'javascriptEnabled=true, cssSelectorsEnabled=true, ' + 'unexpectedAlertBehaviour=}]\nSession ID: ' + '5cb1002259d4ed7ed523ba2e9e0cea02\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at ' + 'MyCompanyUiSettings.Bl.Base.Click(String xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 323\n at MyCompanyUiSettings.Bl.Base.Click(String ' + 'xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 330\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Clear_All.ClearAll.UI_MyTask_AC_ACI' + 'S_BulkActions_ClearAll_ApproveAllNo_TC2707(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Clear All\\ClearAll.cs:line 90' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_ClearAll_CommittedRecoredNotAffected_' + 'TC2708("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Clear_All.ClearAll.UI_MyTask_AC_ACI' + 'S_BulkActions_ClearAll_CommittedRecoredNotAffected_TC2708(String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Clear All\\ClearAll.cs:line 102' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_RejectAll_AddCommentNo_TC2705("/#/tas' + 'ks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Clear_All.ClearAll.UI_MyTask_AC_ACI' + 'S_BulkActions_RejectAll_AddCommentNo_TC2705(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Clear All\\ClearAll.cs:line 13' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_RejectAll_AddCommentYes_TC2706("/#/ta' + 'sks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Clear_All.ClearAll.UI_MyTask_AC_ACI' + 'S_BulkActions_RejectAll_AddCommentYes_TC2706(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Clear All\\ClearAll.cs:line 32' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_ClearAllSelectio' + 'n_TC2712("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Examine_Text_and_Layout.ExaminTextA' + 'ndLayout.UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_ClearAl' + 'lSelection_TC2712(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Examine Text and ' + 'Layout\\ExaminTextAndLayout.cs:line 67' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_MainMenu_TC2709(' + '"/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Examine_Text_and_Layout.ExaminTextA' + 'ndLayout.UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_MainMen' + 'u_TC2709(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Examine Text and ' + 'Layout\\ExaminTextAndLayout.cs:line 15' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_RejectAllSelecti' + 'on_TC2711("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Examine_Text_and_Layout.ExaminTextA' + 'ndLayout.UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_RejectA' + 'llSelection_TC2711(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Examine Text and ' + 'Layout\\ExaminTextAndLayout.cs:line 50' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 35s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_MainMenu_TC2713(' + '"/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.Exception : Base Class - FindElementsOnPage(string xpath) - ' + '1 parameter - method threw an exception : \nTimed out after 30 ' + 'seconds\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri' + 'ng exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at ' + 'MyCompanyUiSettings.Bl.Base.WaitForVisibleElement(String xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 297\n at ' + 'MyCompanyUiSettings.Bl.Base.FindElementsOnPage(String xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 247\n at ' + 'MyCompanyUiSettings.Bl.Base.FindElementsOnPage(String xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 253\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Load.Load.UI_MyTask_AC_ACIS_BulkAct' + 'ions_ExamineTextAndLayout_MainMenu_TC2713(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Load\\Load.cs:line 15' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_MainMenu_TC2714(' + '"/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Load.Load.UI_MyTask_AC_ACIS_BulkAct' + 'ions_ExamineTextAndLayout_MainMenu_TC2714(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Load\\Load.cs:line 34' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 35s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_RejectAll_AddCommentNo_TC2715("/#/tas' + 'ks/access-certification/overview") failed', + 'raw_details': + 'System.Exception : Base Class - FindElementsOnPage(string xpath) - ' + '1 parameter - method threw an exception : \nTimed out after 30 ' + 'seconds\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri' + 'ng exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at ' + 'MyCompanyUiSettings.Bl.Base.WaitForVisibleElement(String xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 297\n at ' + 'MyCompanyUiSettings.Bl.Base.FindElementsOnPage(String xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 247\n at ' + 'MyCompanyUiSettings.Bl.Base.FindElementsOnPage(String xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 253\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Reject_All.ApproveAll.UI_MyTask_AC_' + 'ACIS_BulkActions_RejectAll_AddCommentNo_TC2715(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Reject All\\RejectAll.cs:line 14' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_RejectAll_AddCommentYes_TC2716("/#/ta' + 'sks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Reject_All.ApproveAll.UI_MyTask_AC_' + 'ACIS_BulkActions_RejectAll_AddCommentYes_TC2716(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Reject All\\RejectAll.cs:line 29' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_RejectAll_CommittedRecoredNotAffected' + '_TC2718("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Reject_All.ApproveAll.UI_MyTask_AC_' + 'ACIS_BulkActions_RejectAll_CommittedRecoredNotAffected_TC2718(Strin' + 'g url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Reject All\\RejectAll.cs:line 75' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_RejectAll_WithExistingSaved_TC2717("/' + '#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Reject_All.ApproveAll.UI_MyTask_AC_' + 'ACIS_BulkActions_RejectAll_WithExistingSaved_TC2717(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Reject All\\RejectAll.cs:line 47' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_BulkActions_Saving_Saving_IsSynchronous_NoOtherAc' + 'tionCanBeTaken_2722("/#/tasks/access-certification/overview") ' + 'failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Bulk_Acions.Saving.Saving.UI_MyTask_AC_ACIS_Bul' + 'kActions_Saving_Saving_IsSynchronous_NoOtherActionCanBeTaken_2722(S' + 'tring url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Saving\\Saving.cs:line 27' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 4s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColumApproveAll_T' + 'C2741("/#/tasks/access-certification/overview") failed', + 'raw_details': + "System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 65 " + "milliseconds\nBuild info: version: '3.1.0', revision: '86a5d70', " + "time: '2017-02-16 07:57:44 -0800'\nSystem info: host: " + "'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server " + "2012 R2', os.arch: 'x86', os.version: '6.3', java.version: " + "'1.8.0_66'\nDriver info: org.openqa.selenium.chrome.ChromeDriver\n" + "Capabilities [{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir4700_142" + "37}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "0501eda8a3e393ab97da9ab3839ea770\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at " + "MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi" + "cation_Inner_Screen.Chart_View.Chart_Menu.Approve_All.Graph.Approve" + "AllGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColumApp" + "roveAll_TC2741(String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Access Certification\\Access Certification Inner " + "Screen\\Chart View\\Chart Menu\\Approve " + "All\\Graph\\ApproveAllGraph.cs:line 15" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedApproveA' + 'll_TC11159("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Approve_All.Graph.Approve' + 'AllGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprove' + 'dApproveAll_TC11159(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Approve ' + 'All\\Graph\\ApproveAllGraph.cs:line 65' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActionApprove' + 'All_TC2744("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Approve_All.Graph.Approve' + 'AllGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActi' + 'onApproveAll_TC2744(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Approve ' + 'All\\Graph\\ApproveAllGraph.cs:line 39' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectApproveAll' + '_TC11160("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Approve_All.Graph.Approve' + 'AllGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectA' + 'pproveAll_TC11160(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Approve ' + 'All\\Graph\\ApproveAllGraph.cs:line 93' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 4s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColumClearAll_TC2' + '749("/#/tasks/access-certification/overview") failed', + 'raw_details': + "System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 66 " + "milliseconds\nBuild info: version: '3.1.0', revision: '86a5d70', " + "time: '2017-02-16 07:57:44 -0800'\nSystem info: host: " + "'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server " + "2012 R2', os.arch: 'x86', os.version: '6.3', java.version: " + "'1.8.0_66'\nDriver info: org.openqa.selenium.chrome.ChromeDriver\n" + "Capabilities [{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir6552_284" + "03}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "5646c3ae0ba7663483cda0a3894fe2a9\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at " + "MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi" + "cation_Inner_Screen.Chart_View.Chart_Menu.Clear_All.Graph.ClearAllG" + "raph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColumClearAl" + "l_TC2749(String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Access Certification\\Access Certification Inner " + "Screen\\Chart View\\Chart Menu\\Clear " + "All\\Graph\\ClearAllGraph.cs:line 15" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedClearAll' + '_TC2750("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Clear_All.Graph.ClearAllG' + 'raph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedCle' + 'arAll_TC2750(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Clear ' + 'All\\Graph\\ClearAllGraph.cs:line 46' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActionClearAl' + 'l_TC2752("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Clear_All.Graph.ClearAllG' + 'raph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActionCl' + 'earAll_TC2752(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Clear ' + 'All\\Graph\\ClearAllGraph.cs:line 112' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectedClearAll' + '_TC2751("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Clear_All.Graph.ClearAllG' + 'raph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectedCle' + 'arAll_TC2751(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Clear ' + 'All\\Graph\\ClearAllGraph.cs:line 79' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 4s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_ColumnFiltering_FilterFurther' + 'By_FilterBy_AllPossibleFields_TC2771("/#/tasks/access-certification' + '/overview") failed', + 'raw_details': + "System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 61 " + "milliseconds\nBuild info: version: '3.1.0', revision: '86a5d70', " + "time: '2017-02-16 07:57:44 -0800'\nSystem info: host: " + "'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server " + "2012 R2', os.arch: 'x86', os.version: '6.3', java.version: " + "'1.8.0_66'\nDriver info: org.openqa.selenium.chrome.ChromeDriver\n" + "Capabilities [{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir32_9833}" + ", takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "258bbe17298009e5e47efcf485ebccd3\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at " + "MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi" + "cation_Inner_Screen.Chart_View.Chart_Menu.Column_Filtering.Filter_f" + "urther_by.Filter_By.FilterBy.UI_MyTask_AC_ACIS_ChartView_ChartMenu_" + "ColumnFiltering_FilterFurtherBy_FilterBy_AllPossibleFields_TC2771(S" + "tring url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Access Certification\\Access Certification Inner " + "Screen\\Chart View\\Chart Menu\\Column Filtering\\Filter further " + "by\\Filter By\\FilterBy.cs:line 106" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_ColumnFiltering_FilterFurther' + 'By_FilterBy_OneColumnOutOfManyWithAlreadyExistingFilters_TC2768("/#' + '/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Column_Filtering.Filter_f' + 'urther_by.Filter_By.FilterBy.UI_MyTask_AC_ACIS_ChartView_ChartMenu_' + 'ColumnFiltering_FilterFurtherBy_FilterBy_OneColumnOutOfManyWithAlre' + 'adyExistingFilters_TC2768(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Column Filtering\\Filter further ' + 'by\\Filter By\\FilterBy.cs:line 54' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_ColumnFiltering_FilterFurther' + 'By_FilterBy_OneColumnOutOfManyWithNoExistingFilters_TC2767("/#/task' + 's/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Column_Filtering.Filter_f' + 'urther_by.Filter_By.FilterBy.UI_MyTask_AC_ACIS_ChartView_ChartMenu_' + 'ColumnFiltering_FilterFurtherBy_FilterBy_OneColumnOutOfManyWithNoEx' + 'istingFilters_TC2767(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Column Filtering\\Filter further ' + 'by\\Filter By\\FilterBy.cs:line 13' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 4s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithOnlyPen' + 'dingForAction_TC2753("/#/tasks/access-certification/overview") ' + 'failed', + 'raw_details': + "System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 65 " + "milliseconds\nBuild info: version: '3.1.0', revision: '86a5d70', " + "time: '2017-02-16 07:57:44 -0800'\nSystem info: host: " + "'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server " + "2012 R2', os.arch: 'x86', os.version: '6.3', java.version: " + "'1.8.0_66'\nDriver info: org.openqa.selenium.chrome.ChromeDriver\n" + "Capabilities [{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir2992_316" + "86}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "8397ed2522698ddccb6b0aa573d920e9\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at " + "MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi" + "cation_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColum" + "n.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithOnlyP" + "endingForAction_TC2753(String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Access Certification\\Access Certification Inner " + "Screen\\Chart View\\Chart Menu\\Entire " + "Column\\EntireColumn.cs:line 16" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithPending' + 'ForActionAndUncommittedApproved_TC2754("/#/tasks/access-certificati' + 'on/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColum' + 'n.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithPendi' + 'ngForActionAndUncommittedApproved_TC2754(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Entire ' + 'Column\\EntireColumn.cs:line 57' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithPending' + 'ForActionAndUncommittedReject_TC2755("/#/tasks/access-certification' + '/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColum' + 'n.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithPendi' + 'ngForActionAndUncommittedReject_TC2755(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Entire ' + 'Column\\EntireColumn.cs:line 83' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_WithPendingForActionApp' + 'rovedCommittedAndRejectedCommitted_TC2758("/#/tasks/access-certific' + 'ation/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColum' + 'n.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_WithPendingForActionA' + 'pprovedCommittedAndRejectedCommitted_TC2758(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Entire ' + 'Column\\EntireColumn.cs:line 148' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_WithUncommittedApproved' + 'AndUncommittedReject_TC2756("/#/tasks/access-certification/overview' + '") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColum' + 'n.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_WithUncommittedApprov' + 'edAndUncommittedReject_TC2756(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Entire ' + 'Column\\EntireColumn.cs:line 111' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 4s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Menus_ColumnEntireColumn_TC79' + '37_TC7927("/#/tasks/access-certification/overview") failed', + 'raw_details': + "System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 67 " + "milliseconds\nBuild info: version: '3.1.0', revision: '86a5d70', " + "time: '2017-02-16 07:57:44 -0800'\nSystem info: host: " + "'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server " + "2012 R2', os.arch: 'x86', os.version: '6.3', java.version: " + "'1.8.0_66'\nDriver info: org.openqa.selenium.chrome.ChromeDriver\n" + "Capabilities [{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir2696_148" + "36}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "6a683eff25d0c058e04394158f5d2245\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at " + "MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi" + "cation_Inner_Screen.Chart_View.Chart_Menu.Menus.Menus.UI_MyTask_AC_" + "ACIS_ChartView_ChartMenu_Menus_ColumnEntireColumn_TC7937_TC7927(Str" + "ing url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Access Certification\\Access Certification Inner " + "Screen\\Chart View\\Chart Menu\\Menus\\Menus.cs:line 58" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Menus_ColumnPendingAcions_TC7' + '938("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Menus.Menus.UI_MyTask_AC_' + 'ACIS_ChartView_ChartMenu_Menus_ColumnPendingAcions_TC7938(String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Menus\\Menus.cs:line 100' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Menus_FilterFurtherBy_TC7939(' + '"/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Menus.Menus.UI_MyTask_AC_' + 'ACIS_ChartView_ChartMenu_Menus_FilterFurtherBy_TC7939(String url) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Menus\\Menus.cs:line 140' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Menus_TakeActionOn_TC7936("/#' + '/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Menus.Menus.UI_MyTask_AC_' + 'ACIS_ChartView_ChartMenu_Menus_TakeActionOn_TC7936(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Menus\\Menus.cs:line 13' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 4s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColumRejectAll_TC' + '2763("/#/tasks/access-certification/overview") failed', + 'raw_details': + "System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 60 " + "milliseconds\nBuild info: version: '3.1.0', revision: '86a5d70', " + "time: '2017-02-16 07:57:44 -0800'\nSystem info: host: " + "'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server " + "2012 R2', os.arch: 'x86', os.version: '6.3', java.version: " + "'1.8.0_66'\nDriver info: org.openqa.selenium.chrome.ChromeDriver\n" + "Capabilities [{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir10404_20" + "818}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "50f2dfc6d36fd64051d143d025dc8e53\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at " + "MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi" + "cation_Inner_Screen.Chart_View.Chart_Menu.Reject_All.Graph.RejectAl" + "lGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColumRejec" + "tAll_TC2763(String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Access Certification\\Access Certification Inner " + "Screen\\Chart View\\Chart Menu\\Reject " + "All\\Graph\\RejectAllGraph.cs:line 15" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedRejectAl' + 'l_TC2765("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Reject_All.Graph.RejectAl' + 'lGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedR' + 'ejectAll_TC2765(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Reject ' + 'All\\Graph\\RejectAllGraph.cs:line 65' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActionRejectA' + 'll_TC2764("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Reject_All.Graph.RejectAl' + 'lGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForAction' + 'RejectAll_TC2764(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Reject ' + 'All\\Graph\\RejectAllGraph.cs:line 39' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectRejectAll_' + 'TC2766("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Chart_Menu.Reject_All.Graph.RejectAl' + 'lGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectRej' + 'ectAll_TC2766(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Reject ' + 'All\\Graph\\RejectAllGraph.cs:line 93' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 4s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_AllColumnsAvailab' + 'le_TC2793("/#/tasks/access-certification/overview") failed', + 'raw_details': + "System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 61 " + "milliseconds\nBuild info: version: '3.1.0', revision: '86a5d70', " + "time: '2017-02-16 07:57:44 -0800'\nSystem info: host: " + "'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server " + "2012 R2', os.arch: 'x86', os.version: '6.3', java.version: " + "'1.8.0_66'\nDriver info: org.openqa.selenium.chrome.ChromeDriver\n" + "Capabilities [{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir3796_318" + "36}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "840df673591317f43b8304ab9db74078\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at " + "MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi" + "cation_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_" + "AC_ACIS_ChartView_ChartMenu_LookAndFeel_AllColumnsAvailable_TC2793(" + "String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Access Certification\\Access Certification Inner " + "Screen\\Chart View\\Look And Feel\\LookAndFeel.cs:line 16" + } + ] + } + }, + { + 'output': { + 'title': '140 fail, 6 pass in 14m 11s', + 'summary': + '\u205f\u2004\u205f\u20041 files\u2004\u2003155 suites\u2004\u2003\u2002' + '14m 11s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '146 tests\u2003\u205f\u2004\u205f\u20046 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '140 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n150 runs\u2006\u2003\u205f\u2004\u205f\u2004' + '6 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '144 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02NSw6AIAwFr' + '0JYu9BEjPEyhqDExg+mwMp4dysfZdeZ175eXMM2Wz6wpmLcenARhCCcPEoH5iDRizen0I' + 'W47TKN1itFqhArnCTqT2gJWzj61YxoMC2hP+LLDGVl5L8x8FfYZlP2KbPv4AjSxOwi+f0' + 'AEAq2iOkAAAA=\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_LongNameGetsTC279' + '5("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_' + 'AC_ACIS_ChartView_ChartMenu_LookAndFeel_LongNameGetsTC2795(String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Look And Feel\\LookAndFeel.cs:line 55' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_TableViewTC2799("' + '/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_' + 'AC_ACIS_ChartView_ChartMenu_LookAndFeel_TableViewTC2799(String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Look And Feel\\LookAndFeel.cs:line 99' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_TextAndColorsTC27' + '94("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_' + 'AC_ACIS_ChartView_ChartMenu_LookAndFeel_TextAndColorsTC2794(String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Look And Feel\\LookAndFeel.cs:line 34' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_ToolTipTC2796_TC2' + '772("/#/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_' + 'AC_ACIS_ChartView_ChartMenu_LookAndFeel_ToolTipTC2796_TC2772(String' + ' url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Look And Feel\\LookAndFeel.cs:line 75' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_ToolTipTC7926("/#' + '/tasks/access-certification/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifi' + 'cation_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_' + 'AC_ACIS_ChartView_ChartMenu_LookAndFeel_ToolTipTC7926(String url) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Look And Feel\\LookAndFeel.cs:line 121' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_FiltersValidation(True,"chrome","/#/tasks/access-certi' + 'fication/overview") failed', + 'raw_details': 'OneTimeSetUp: No suitable constructor was found' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_DataGrid_DataDisplay_TC2101(True,"chrome","/#/tasks/ac' + 'cess-certification/overview") failed', + 'raw_details': 'OneTimeSetUp: No suitable constructor was found' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_DataGrid_Header_TC2100(True,"chrome","/#/tasks/access-' + 'certification/overview") failed', + 'raw_details': 'OneTimeSetUp: No suitable constructor was found' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_DataGrid_Navigation_TC2099(True,"chrome","/#/tasks/acc' + 'ess-certification/overview") failed', + 'raw_details': 'OneTimeSetUp: No suitable constructor was found' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_DataGrid_Paging_TC2102(True,"chrome","/#/tasks/access-' + 'certification/overview") failed', + 'raw_details': 'OneTimeSetUp: No suitable constructor was found' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_Grid_Grid_Validation(True,"chrome","#/tasks/access-cer' + 'tification/overview") failed', + 'raw_details': 'OneTimeSetUp: No suitable constructor was found' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_TwoUsersTwoApplicationsValidation(True,"chrome","/#/ta' + 'sks/access-certification/overview") failed', + 'raw_details': 'OneTimeSetUp: No suitable constructor was found' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 31s]', + 'title': + 'All 2 runs failed: ' + 'UI_MyTasks_AR_Paging_ShowPerPage(True,"chrome","/#/tasks/access-req' + 'uest/overview")', + 'raw_details': 'OneTimeSetUp: No suitable constructor was found' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_Progress_Approve(True,"chrome","/#/tasks/access-certif' + 'ication/overview") failed', + 'raw_details': 'OneTimeSetUp: No suitable constructor was found' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AC_Progress_Reject(True,"chrome","/#/tasks/access-certifi' + 'cation/overview") failed', + 'raw_details': 'OneTimeSetUp: No suitable constructor was found' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 30s]', + 'title': + 'UI_MyTask_AR_Filters_FilterLayout(True,"chrome","/#/tasks/access-ce' + 'rtification/overview") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 15 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir11804_16" + "895}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "29b4b9836d0675d3828a94e2f11cf9d7\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Filters.FiltersValid" + "ation.UI_MyTask_AR_Filters_FilterLayout(Boolean excute, String " + "browserName, String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Access Request\\Filters\\FiltersValidation.cs:line 29\n" + "--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AR_Filters_FiltersFunctionality(True,"chrome","/#/tasks/a' + 'ccess-request/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Filters.FiltersValid' + 'ation.UI_MyTask_AR_Filters_FiltersFunctionality(Boolean excute, ' + 'String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Request\\Filters\\FiltersValidation.cs:line 83' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 30s]', + 'title': + 'UI_MyTask_AR_Grid_FilterVAlidates(True,"chrome","/#/tasks/access-ce' + 'rtification/overview") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 15 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir12972_27" + "801}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "fca88dd0490c464a5ded2f16849929d8\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Grid.GridValidation." + "UI_MyTask_AR_Grid_FilterVAlidates(Boolean excute, String " + "browserName, String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Access Request\\Grid\\GridValidation.cs:line 29\n" + "--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AR_Grid_Paging(True,"chrome","/#/tasks/access-certificati' + 'on/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Grid.GridValidation.' + 'UI_MyTask_AR_Grid_Paging(Boolean excute, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Request\\Grid\\GridValidation.cs:line 65' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 30s]', + 'title': + 'UI_MyTask_AR_Progress_Approve(True,"chrome","/#/tasks/access-certif' + 'ication/overview") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 0 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir3688_215" + "57}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "fc2e027b336637b143a0098139997621\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Progress.ProgressVal" + "idation.UI_MyTask_AR_Progress_Approve(Boolean excute, String " + "browserName, String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Access Request\\Progress\\ProgressValidation.cs:line 32\n" + "--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_AR_Progress_Reject(True,"chrome","/#/tasks/access-certifi' + 'cation/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Progress.ProgressVal' + 'idation.UI_MyTask_AR_Progress_Reject(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Request\\Progress\\ProgressValidation.cs:line 80' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 31s]', + 'title': + 'UI_MyTask_CC_Filters_FilterVAlidates(True,"chrome","/#/tasks/access' + '-certification/overview") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 0 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir13304_30" + "088}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "e6e1a454eceffe04daec2df3121843c6\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.My_Tasks.Compliance_Control.Grid.GridValidat" + "ion.UI_MyTask_CC_Filters_FilterVAlidates(Boolean excute, String " + "browserName, String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Compliance Control\\Grid\\GridValidation.cs:line 30\n" + "--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_CC_Grid_Paging(True,"chrome","/#/tasks/access-certificati' + 'on/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Compliance_Control.Grid.GridValidat' + 'ion.UI_MyTask_CC_Grid_Paging(Boolean excute, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Compliance Control\\Grid\\GridValidation.cs:line 66' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 30s]', + 'title': + 'UI_MyTasks_CC_Paging_ShowPerPage(True,"chrome","/#/tasks/access-req' + 'uest/overview") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 0 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir6532_293" + "46}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "b5311e179a7c4fac0e8285b86e566664\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.My_Tasks.Compliance_Control.Paging.PagingVal" + "idation.UI_MyTasks_CC_Paging_ShowPerPage(Boolean excute, String " + "browserName, String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Compliance Control\\Paging\\PagingValidation.cs:line 24\n" + "--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 30s]', + 'title': + 'UI_MyTask_CC_Progress_Approve(True,"chrome","/#/tasks/access-certif' + 'ication/overview") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 0 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir12668_24" + "175}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "1a60859e82be5a9504866d8d9e6b21ba\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.My_Tasks.Compliance_Control.Progress.Progres" + "sValidation.UI_MyTask_CC_Progress_Approve(Boolean excute, String " + "browserName, String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Compliance Control\\Progress\\ProgressValidation.cs:line " + "27\n--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_CC_Progress_Reject(True,"chrome","/#/tasks/access-certifi' + 'cation/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Compliance_Control.Progress.Progres' + 'sValidation.UI_MyTask_CC_Progress_Reject(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Compliance Control\\Progress\\ProgressValidation.cs:line ' + '76' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 30s]', + 'title': + 'UI_MyTask_CC_Filters_FilterLayout(True,"chrome","/#/tasks/access-ce' + 'rtification/overview") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 16 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir10360_63" + "06}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "68b0320c39a561808d45f7b1bd2ce18e\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.My_Tasks_Compliance_Control.Filters.FiltersV" + "alidation.UI_MyTask_CC_Filters_FilterLayout(Boolean excute, String " + "browserName, String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\Compliance Control\\Filters\\FiltersValidation.cs:line 30\n" + "--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_CC_Filters_FiltersFunctionality(True,"chrome","/#/tasks/a' + 'ccess-request/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks_Compliance_Control.Filters.FiltersV' + 'alidation.UI_MyTask_CC_Filters_FiltersFunctionality(Boolean ' + 'excute, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Compliance Control\\Filters\\FiltersValidation.cs:line 69' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 30s]', + 'title': + 'UI_MyTask_MR_Filters_FilterLayout(True,"chrome","/#/tasks/access-ce' + 'rtification/overview") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 0 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir2736_229" + "08}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "52ab857fbeb80383ec0a4311504f7b8e\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.My_Tasks_My_Requests.Filters.FiltersValidati" + "on.UI_MyTask_MR_Filters_FilterLayout(Boolean excute, String " + "browserName, String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My" + " Tasks\\My Requests\\Filters\\FiltersValidation.cs:line 22\n" + "--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UI_MyTask_MR_Filters_FiltersFunctionality(True,"chrome","/#/tasks/a' + 'ccess-request/overview") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks_My_Requests.Filters.FiltersValidati' + 'on.UI_MyTask_MR_Filters_FiltersFunctionality(Boolean excute, ' + 'String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\My Requests\\Filters\\FiltersValidation.cs:line 78' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 30s]', + 'title': + 'UIAlertExclusionAddAccountsFromSearch_TC7466(True,"en","1","chrome"' + ',"/#/settings/general-settings") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 0 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir3016_202" + "27}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "c9411ed622920bbdad53147bc36fd09b\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UI" + "AlertExclusionAddAccountsFromSearch_TC7466(Boolean excute, String " + "language, String itteration, String browserName, String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett" + "ings\\Alert Exclusions\\AlertExclusions.cs:line 76\n" + "--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIAlertExclusionBulkActionsCoverage_TC7465(True,"en","1","chrome","' + '/#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UI' + 'AlertExclusionBulkActionsCoverage_TC7465(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Alert Exclusions\\AlertExclusions.cs:line 111' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIAlertExclusionBulkUploadCoverage_TC7467_TC7468(True,"en","1","chr' + 'ome","/#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UI' + 'AlertExclusionBulkUploadCoverage_TC7467_TC7468(Boolean excute, ' + 'String language, String itteration, String browserName, String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Alert Exclusions\\AlertExclusions.cs:line 575' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIAlertExclusionBulkUploadDownloadSampleFile_TC7464(True,"en","1","' + 'chrome","/#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UI' + 'AlertExclusionBulkUploadDownloadSampleFile_TC7464(Boolean excute, ' + 'String language, String itteration, String browserName, String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Alert Exclusions\\AlertExclusions.cs:line 155' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIAlertExclusionColumns_TC7474(True,"en","1","chrome","/#/settings/' + 'general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UI' + 'AlertExclusionColumns_TC7474(Boolean excute, String language, ' + 'String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Alert Exclusions\\AlertExclusions.cs:line 204' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIAlertExclusionGridCoverage_TC7465(True,"en","1","chrome","/#/sett' + 'ings/general-settings","u0g793,u1g1,u1g792,u1g802,u2g399,u2g8...") ' + 'failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UI' + 'AlertExclusionGridCoverage_TC7465(Boolean excute, String language, ' + 'String itteration, String browserName, String url, String names) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Alert Exclusions\\AlertExclusions.cs:line 532' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIAlertExclusionLoadSameAccountTwice_TC7473(True,"en","1","chrome",' + '"/#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UI' + 'AlertExclusionLoadSameAccountTwice_TC7473(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Alert Exclusions\\AlertExclusions.cs:line 301' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIAlertExclusionNonCsvFormat_TC7472(True,"en","1","chrome","/#/sett' + 'ings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UI' + 'AlertExclusionNonCsvFormat_TC7472(Boolean excute, String language, ' + 'String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Alert Exclusions\\AlertExclusions.cs:line 349' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIAlertExclusionPaginationCoverage_TC7471(True,"en","1","chrome","/' + '#/settings/general-settings","u0g791,u0g801,u1g791,u1g801,u2g791,u2' + '...") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UI' + 'AlertExclusionPaginationCoverage_TC7471(Boolean excute, String ' + 'language, String itteration, String browserName, String url, ' + 'String names) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Alert Exclusions\\AlertExclusions.cs:line 32' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIAlertExclusionRemoveAccounts_TC7470(True,"en","1","chrome","/#/se' + 'ttings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UI' + 'AlertExclusionRemoveAccounts_TC7470(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Alert Exclusions\\AlertExclusions.cs:line 397' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIAlertExclusionScreenOverviewLook_TC7465(True,"en","1","chrome","/' + '#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UI' + 'AlertExclusionScreenOverviewLook_TC7465(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Alert Exclusions\\AlertExclusions.cs:line 248' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIAlertExclusionSearchCurrentExcludedAccounts_TC7475(True,"en","1",' + '"chrome","/#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UI' + 'AlertExclusionSearchCurrentExcludedAccounts_TC7475(Boolean excute, ' + 'String language, String itteration, String browserName, String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Alert Exclusions\\AlertExclusions.cs:line 488' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIAlertExclusionShowPerPageCoverage_TC7465(True,"en","1","chrome","' + '/#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UI' + 'AlertExclusionShowPerPageCoverage_TC7465(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Alert Exclusions\\AlertExclusions.cs:line 447' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 30s]', + 'title': + 'UIDataOwnerExclusionAddAccountsFromSearch_TC3411(True,"en","1","chr' + 'ome","/#/settings/general-settings") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 0 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir9916_128" + "85}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "d3eacb9d6fac9a67fa47aa82158da43c\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExcl" + "usions.UIDataOwnerExclusionAddAccountsFromSearch_TC3411(Boolean " + "excute, String language, String itteration, String browserName, " + "String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett" + "ings\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 142\n" + "--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIDataOwnerExclusionBulkActionsCoverage_TC7554_TC3415(True,"en","1"' + ',"chrome","/#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExcl' + 'usions.UIDataOwnerExclusionBulkActionsCoverage_TC7554_TC3415(Boolea' + 'n excute, String language, String itteration, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 180' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIDataOwnerExclusionBulkUploadCoverage_TC3412_TC3413(True,"en","1",' + '"chrome","/#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExcl' + 'usions.UIDataOwnerExclusionBulkUploadCoverage_TC3412_TC3413(Boolean' + ' excute, String language, String itteration, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 78' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIDataOwnerExclusionColumns_TC3419(True,"en","1","chrome","/#/setti' + 'ngs/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExcl' + 'usions.UIDataOwnerExclusionColumns_TC3419(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 223' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIDataOwnerExclusionGridCoverage_TC7554(True,"en","1","chrome","/#/' + 'settings/general-settings","u0g793,u1g1,u1g792,u1g802,u2g399,u2g8..' + '.") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExcl' + 'usions.UIDataOwnerExclusionGridCoverage_TC7554(Boolean excute, ' + 'String language, String itteration, String browserName, String ' + 'url, String names) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 267' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIDataOwnerExclusionLoadSameAccountTwice_TC3418(True,"en","1","chro' + 'me","/#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExcl' + 'usions.UIDataOwnerExclusionLoadSameAccountTwice_TC3418(Boolean ' + 'excute, String language, String itteration, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 309' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIDataOwnerExclusionNonCsvFormat_TC3417(True,"en","1","chrome","/#/' + 'settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExcl' + 'usions.UIDataOwnerExclusionNonCsvFormat_TC3417(Boolean excute, ' + 'String language, String itteration, String browserName, String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 31' + } + ] + } + }, + { + 'output': { + 'title': '140 fail, 6 pass in 14m 11s', + 'summary': + '\u205f\u2004\u205f\u20041 files\u2004\u2003155 suites\u2004\u2003\u2002' + '14m 11s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '146 tests\u2003\u205f\u2004\u205f\u20046 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '140 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n150 runs\u2006\u2003\u205f\u2004\u205f\u2004' + '6 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '144 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02NSw6AIAwFr' + '0JYu9BEjPEyhqDExg+mwMp4dysfZdeZ175eXMM2Wz6wpmLcenARhCCcPEoH5iDRizen0I' + 'W47TKN1itFqhArnCTqT2gJWzj61YxoMC2hP+LLDGVl5L8x8FfYZlP2KbPv4AjSxOwi+f0' + 'AEAq2iOkAAAA=\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIDataOwnerExclusionPaginationCoverage_TC7554_TC3415(True,"en","1",' + '"chrome","/#/settings/general-settings","u0g106,u0g115,u0g124,u0g13' + '3,u0g142,u0...") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExcl' + 'usions.UIDataOwnerExclusionPaginationCoverage_TC7554_TC3415(Boolean' + ' excute, String language, String itteration, String browserName, ' + 'String url, String names) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 355' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIDataOwnerExclusionSearchCurrentExcludedAccounts_TC3420(True,"en",' + '"1","chrome","/#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExcl' + 'usions.UIDataOwnerExclusionSearchCurrentExcludedAccounts_TC3420(Boo' + 'lean excute, String language, String itteration, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 398' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIDataOwnerExclusionShowPerPageCoverage_TC7554(True,"en","1","chrom' + 'e","/#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExcl' + 'usions.UIDataOwnerExclusionShowPerPageCoverage_TC7554(Boolean ' + 'excute, String language, String itteration, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 438' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 30s]', + 'title': + 'UIGeneralSettingsAllowPhysicalPath_TC10766(True,"1","abcd","chrome"' + ',"en","user,ra_user","crowdSource,whiteops","user","is_administrato' + 'r","/#/settings/general-settings") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 0 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir7348_165" + "22}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "a9460966896b2f67901d0c200c612026\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settin" + "gs.UIGeneralSettingsAllowPhysicalPath_TC10766(Boolean excute, " + "String itteration, String account, String browserName, String " + "language, String dbTables, String dbSchema, String tableName, " + "String columnName, String url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett" + "ings\\Logical Mapped Path\\Settings\\Settings.cs:line 266\n" + "--NoSuchElementException\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIGeneralSettingsDataDisplay_TC10898(True,"1","abcd","chrome","en",' + '"user,ra_user","crowdSource,whiteops","user","is_administrator","/#' + '/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settin' + 'gs.UIGeneralSettingsDataDisplay_TC10898(Boolean excute, String ' + 'itteration, String account, String browserName, String language, ' + 'String dbTables, String dbSchema, String tableName, String ' + 'columnName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Logical Mapped Path\\Settings\\Settings.cs:line 75' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIGeneralSettingsExcludeAdministrator_TC10765(True,"1","abcd","chro' + 'me","en","user,ra_user","crowdSource,whiteops","user","is_administr' + 'ator","/#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settin' + 'gs.UIGeneralSettingsExcludeAdministrator_TC10765(Boolean excute, ' + 'String itteration, String account, String browserName, String ' + 'language, String dbTables, String dbSchema, String tableName, ' + 'String columnName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Logical Mapped Path\\Settings\\Settings.cs:line 192' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIGeneralSettingsNavigation_TC10897(True,"1","abcd","chrome","en","' + 'user,ra_user","crowdSource,whiteops","user","is_administrator","/#/' + 'settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settin' + 'gs.UIGeneralSettingsNavigation_TC10897(Boolean excute, String ' + 'itteration, String account, String browserName, String language, ' + 'String dbTables, String dbSchema, String tableName, String ' + 'columnName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Logical Mapped Path\\Settings\\Settings.cs:line 36' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIGeneralSettingsTranslatePhysicalPath_TC10764(True,"1","abcd","chr' + 'ome","en","user,ra_user","crowdSource,whiteops","user","is_administ' + 'rator","/#/settings/general-settings") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settin' + 'gs.UIGeneralSettingsTranslatePhysicalPath_TC10764(Boolean excute, ' + 'String itteration, String account, String browserName, String ' + 'language, String dbTables, String dbSchema, String tableName, ' + 'String columnName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Logical Mapped Path\\Settings\\Settings.cs:line 119' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 31s]', + 'title': + 'UIMessageTemplatesCompanyInformationCoverage_TC3422_TC7534(True,"en' + '","1","chrome","/#/settings/crowd-messages/welcome-me...") failed', + 'raw_details': + "OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any " + "stacktrace information)\nCommand duration or timeout: 16 " + "milliseconds\nFor documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir2232_223" + "98}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "882c55bf9c675e183d7269fae3076ce9\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(Stri" + "ng exceptionMessage, Exception lastException)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base" + ".cs:line 537\n at " + "MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTem" + "platesCompanyInformationCoverage_TC3422_TC7534(Boolean excute, " + "String language, String itteration, String browserName, String " + "url) in " + "C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett" + "ings\\Messages\\MessagesTests.cs:line 33\n--NoSuchElementException\n" + " at " + "OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon" + "se errorResponse)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String " + "driverCommandToExecute, Dictionary`2 parameters)\n at " + "OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String " + "mechanism, String value)\n at " + "OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<" + "ElementIsVisible>b__12(IWebDriver driver)\n at " + "OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 " + "condition)" + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIMessageTemplatesCompanyInformationFunctionality_TC3422_TC7534(Tru' + 'e,"en","1","chrome","/#/settings/crowd-messages/welcome-me...","goo' + 'gle") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTem' + 'platesCompanyInformationFunctionality_TC3422_TC7534(Boolean ' + 'excute, String language, String itteration, String browserName, ' + 'String url, String companyName) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Messages\\MessagesTests.cs:line 79' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIMessageTemplatesCumulativeReminderScheduling_TC3426(True,"en","1"' + ',"chrome","/#/settings/crowd-messages/welcome-me...") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTem' + 'platesCumulativeReminderScheduling_TC3426(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Messages\\MessagesTests.cs:line 116' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIMessageTemplatesDiscardChanges_TC3425(True,"en","1","chrome","/#/' + 'settings/crowd-messages/welcome-me...") failed', + 'raw_details': + 'System.InvalidOperationException : Session [(null externalkey)] ' + 'not available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Respon' + 'se errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String ' + 'mechanism, String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.<' + 'ElementIsVisible>b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base' + '.cs:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTem' + 'platesDiscardChanges_TC3425(Boolean excute, String language, ' + 'String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Sett' + 'ings\\Messages\\MessagesTests.cs:line 172' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue44527.xml\u2003[took 0s]', + 'title': + 'UIMessageTemplatesHtmlEditor_TC3424(True,"en","1","chrome","/#/sett' + 'ings/crowd-messages/welcome-me..."," is not clickable at point (80, ' + '241). Other element would receive the click: \n (Session info: chrome=58.0.3029.110)\n ' + '(Driver info: chromedriver=2.29.461591 ' + '(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT ' + '6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace ' + 'information)\nCommand duration or timeout: 50 milliseconds\nBuild ' + 'info: version: \'3.1.0\', revision: \'86a5d70\', time: \'2017-02-16 ' + '07:57:44 -0800\'\nSystem info: host: \'BRC-JENKINS2-AU\', ip: ' + '\'172.16.61.17\', os.name: \'Windows Server 2012 R2\', os.arch: ' + '\'x86\', os.version: \'6.3\', java.version: \'1.8.0_66\'\nDriver ' + 'info: org.openqa.selenium.chrome.ChromeDriver\nCapabilities ' + '[{applicationCacheEnabled=false, rotatable=false, ' + 'mobileEmulationEnabled=false, networkConnectionEnabled=false, ' + 'chrome={chromedriverVersion=2.29.461591 ' + '(62ebf098771772160f391d75e589dc567915b233), ' + 'userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir2476_1158}' + ', takesHeapSnapshot=true, pageLoadStrategy=normal, ' + 'databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, ' + 'version=58.0.3029.110, platform=WIN8_1, ' + 'browserConnectionEnabled=false, nativeEvents=true, ' + 'acceptSslCerts=true, locationContextEnabled=true, ' + 'webStorageEnabled=true, browserName=chrome, takesScreenshot=true, ' + 'javascriptEnabled=true, cssSelectorsEnabled=true, ' + 'unexpectedAlertBehaviour=}]\nSession ID: ' + '5cb1002259d4ed7ed523ba2e9e0cea02\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at ' + 'MyCompanyUiSettings.Bl.Base.Click(String xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 323', + content=' at MyCompanyUiSettings.Bl.Base.Click(String xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 330\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Clear_All.ClearAll.UI_MyTask_AC_ACIS_Bu' + 'lkActions_ClearAll_ApproveAllNo_TC2707(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Clear All\\ClearAll.cs:line 90', + stdout=None, + stderr=None, + time=6.804192 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_ClearAll_CommittedRecoredNotAffected_TC' + '2708("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Clear_All.ClearAll.UI_MyTask_AC_ACIS_Bu' + 'lkActions_ClearAll_CommittedRecoredNotAffected_TC2708(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Clear All\\ClearAll.cs:line 102', + stdout=None, + stderr=None, + time=0.009398 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_RejectAll_AddCommentNo_TC2705("/#/' + 'tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Clear_All.ClearAll.UI_MyTask_AC_ACIS_Bu' + 'lkActions_RejectAll_AddCommentNo_TC2705(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Clear All\\ClearAll.cs:line 13', + stdout=None, + stderr=None, + time=0.008016 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_RejectAll_AddCommentYes_TC2706("/#/' + 'tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Clear_All.ClearAll.UI_MyTask_AC_ACIS_Bu' + 'lkActions_RejectAll_AddCommentYes_TC2706(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Clear All\\ClearAll.cs:line 32', + stdout=None, + stderr=None, + time=0.006259 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_ApproveAllSelectio' + 'n_TC2710("/#/tasks/access-certification/overview")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.921707 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_ClearAllSelection_' + 'TC2712("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Examine_Text_and_Layout.ExaminTextAndLa' + 'yout.UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_ClearAllSelec' + 'tion_TC2712(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Examine Text and ' + 'Layout\\ExaminTextAndLayout.cs:line 67', + stdout=None, + stderr=None, + time=0.00639 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_MainMenu_TC2709("/' + '#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Examine_Text_and_Layout.ExaminTextAndLa' + 'yout.UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_MainMenu_TC27' + '09(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Examine Text and ' + 'Layout\\ExaminTextAndLayout.cs:line 15', + stdout=None, + stderr=None, + time=0.006646 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_RejectAllSelection' + '_TC2711("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Examine_Text_and_Layout.ExaminTextAndLa' + 'yout.UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_RejectAllSele' + 'ction_TC2711(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Examine Text and ' + 'Layout\\ExaminTextAndLayout.cs:line 50', + stdout=None, + stderr=None, + time=0.005534 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_MainMenu_TC2713("/' + '#/tasks/access-certification/overview")', + result='failure', + message='System.Exception : Base Class - FindElementsOnPage(string xpath) - 1 ' + 'parameter - method threw an exception : \nTimed out after 30 seconds\n' + ' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at ' + 'MyCompanyUiSettings.Bl.Base.WaitForVisibleElement(String xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 297\n at ' + 'MyCompanyUiSettings.Bl.Base.FindElementsOnPage(String xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 247', + content=' at MyCompanyUiSettings.Bl.Base.FindElementsOnPage(String xpath) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 253\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Load.Load.UI_MyTask_AC_ACIS_BulkActions' + '_ExamineTextAndLayout_MainMenu_TC2713(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Load\\Load.cs:line 15', + stdout=None, + stderr=None, + time=35.390525 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_MainMenu_TC2714("/' + '#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Load.Load.UI_MyTask_AC_ACIS_BulkActions' + '_ExamineTextAndLayout_MainMenu_TC2714(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Load\\Load.cs:line 34', + stdout=None, + stderr=None, + time=0.007019 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_RejectAll_AddCommentNo_TC2715("/#/' + 'tasks/access-certification/overview")', + result='failure', + message='System.Exception : Base Class - FindElementsOnPage(string xpath) - 1 ' + 'parameter - method threw an exception : \nTimed out after 30 seconds\n' + ' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at ' + 'MyCompanyUiSettings.Bl.Base.WaitForVisibleElement(String xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 297\n at ' + 'MyCompanyUiSettings.Bl.Base.FindElementsOnPage(String xpath) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 247', + content=' at MyCompanyUiSettings.Bl.Base.FindElementsOnPage(String xpath) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 253\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Reject_All.ApproveAll.UI_MyTask_AC_ACIS' + '_BulkActions_RejectAll_AddCommentNo_TC2715(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Reject All\\RejectAll.cs:line 14', + stdout=None, + stderr=None, + time=35.643121 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_RejectAll_AddCommentYes_TC2716("/#/' + 'tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Reject_All.ApproveAll.UI_MyTask_AC_ACIS' + '_BulkActions_RejectAll_AddCommentYes_TC2716(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Reject All\\RejectAll.cs:line 29', + stdout=None, + stderr=None, + time=0.006562 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_RejectAll_CommittedRecoredNotAffected_T' + 'C2718("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Reject_All.ApproveAll.UI_MyTask_AC_ACIS' + '_BulkActions_RejectAll_CommittedRecoredNotAffected_TC2718(String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Reject All\\RejectAll.cs:line 75', + stdout=None, + stderr=None, + time=0.007306 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_RejectAll_WithExistingSaved_TC2717("/#/' + 'tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Reject_All.ApproveAll.UI_MyTask_AC_ACIS' + '_BulkActions_RejectAll_WithExistingSaved_TC2717(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Reject All\\RejectAll.cs:line 47', + stdout=None, + stderr=None, + time=0.005689 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_Saving_FailesToSaveDueToDatabaseTimeout' + '_TC2720("/#/tasks/access-certification/overview")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.670159 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_Saving_FailesToSaveDueToNetworkDisconne' + 'ct_2721("/#/tasks/access-certification/overview")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001774 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_BulkActions_Saving_Saving_IsSynchronous_NoOtherActi' + 'onCanBeTaken_2722("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Bulk_Acions.Saving.Saving.UI_MyTask_AC_ACIS_BulkAct' + 'ions_Saving_Saving_IsSynchronous_NoOtherActionCanBeTaken_2722(String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Bulk Acions\\Saving\\Saving.cs:line 27', + stdout=None, + stderr=None, + time=0.008969 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColumApproveAll_TC2' + '741("/#/tasks/access-certification/overview")', + result='failure', + message="System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 65 milliseconds\nBuild " + "info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 " + "07:57:44 -0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: " + "'172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', " + "os.version: '6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir4700_14237" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "0501eda8a3e393ab97da9ab3839ea770", + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Approve_All.Graph.ApproveAllG' + 'raph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColumApproveAl' + 'l_TC2741(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Approve ' + 'All\\Graph\\ApproveAllGraph.cs:line 15', + stdout=None, + stderr=None, + time=4.765661 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedApproveAll' + '_TC11159("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Approve_All.Graph.ApproveAllG' + 'raph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedAppro' + 'veAll_TC11159(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Approve ' + 'All\\Graph\\ApproveAllGraph.cs:line 65', + stdout=None, + stderr=None, + time=0.009578 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActionApproveAl' + 'l_TC2744("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Approve_All.Graph.ApproveAllG' + 'raph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActionAppr' + 'oveAll_TC2744(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Approve ' + 'All\\Graph\\ApproveAllGraph.cs:line 39', + stdout=None, + stderr=None, + time=0.006023 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectApproveAll_T' + 'C11160("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Approve_All.Graph.ApproveAllG' + 'raph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectApprove' + 'All_TC11160(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Approve ' + 'All\\Graph\\ApproveAllGraph.cs:line 93', + stdout=None, + stderr=None, + time=0.005994 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColumClearAll_TC274' + '9("/#/tasks/access-certification/overview")', + result='failure', + message="System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 66 milliseconds\nBuild " + "info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 " + "07:57:44 -0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: " + "'172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', " + "os.version: '6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir6552_28403" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "5646c3ae0ba7663483cda0a3894fe2a9", + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Clear_All.Graph.ClearAllGraph' + '.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColumClearAll_TC27' + '49(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Clear ' + 'All\\Graph\\ClearAllGraph.cs:line 15', + stdout=None, + stderr=None, + time=4.814514 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedClearAll_T' + 'C2750("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Clear_All.Graph.ClearAllGraph' + '.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedClearAll_' + 'TC2750(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Clear ' + 'All\\Graph\\ClearAllGraph.cs:line 46', + stdout=None, + stderr=None, + time=0.007531 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActionClearAll_' + 'TC2752("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Clear_All.Graph.ClearAllGraph' + '.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActionClearAll' + '_TC2752(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Clear ' + 'All\\Graph\\ClearAllGraph.cs:line 112', + stdout=None, + stderr=None, + time=0.006818 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectedClearAll_T' + 'C2751("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Clear_All.Graph.ClearAllGraph' + '.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectedClearAll_' + 'TC2751(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Clear ' + 'All\\Graph\\ClearAllGraph.cs:line 79', + stdout=None, + stderr=None, + time=0.007179 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_ColumnFiltering_FilterFurtherBy' + '_FilterBy_AllPossibleFields_TC2771("/#/tasks/access-certification/' + 'overview")', + result='failure', + message="System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 61 milliseconds\nBuild " + "info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 " + "07:57:44 -0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: " + "'172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', " + "os.version: '6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir32_9833}," + " takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "258bbe17298009e5e47efcf485ebccd3", + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Column_Filtering.Filter_furth' + 'er_by.Filter_By.FilterBy.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Column' + 'Filtering_FilterFurtherBy_FilterBy_AllPossibleFields_TC2771(String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Column Filtering\\Filter further ' + 'by\\Filter By\\FilterBy.cs:line 106', + stdout=None, + stderr=None, + time=4.87986 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_ColumnFiltering_FilterFurtherBy' + '_FilterBy_OneColumnOutOfManyWithAlreadyExistingFilters_TC2768("/#/' + 'tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Column_Filtering.Filter_furth' + 'er_by.Filter_By.FilterBy.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Column' + 'Filtering_FilterFurtherBy_FilterBy_OneColumnOutOfManyWithAlreadyExist' + 'ingFilters_TC2768(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Column Filtering\\Filter further ' + 'by\\Filter By\\FilterBy.cs:line 54', + stdout=None, + stderr=None, + time=0.006724 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_ColumnFiltering_FilterFurtherBy' + '_FilterBy_OneColumnOutOfManyWithNoExistingFilters_TC2767("/#/tasks/' + 'access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Column_Filtering.Filter_furth' + 'er_by.Filter_By.FilterBy.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Column' + 'Filtering_FilterFurtherBy_FilterBy_OneColumnOutOfManyWithNoExistingFi' + 'lters_TC2767(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Column Filtering\\Filter further ' + 'by\\Filter By\\FilterBy.cs:line 13', + stdout=None, + stderr=None, + time=0.007172 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithOnlyPendi' + 'ngForAction_TC2753("/#/tasks/access-certification/overview")', + result='failure', + message="System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 65 milliseconds\nBuild " + "info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 " + "07:57:44 -0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: " + "'172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', " + "os.version: '6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir2992_31686" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "8397ed2522698ddccb6b0aa573d920e9", + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColumn.UI' + '_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithOnlyPending' + 'ForAction_TC2753(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Entire Column\\EntireColumn.cs:line ' + '16', + stdout=None, + stderr=None, + time=4.715226 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithPendingFo' + 'rActionAndUncommittedApproved_TC2754("/#/tasks/access-certification/' + 'overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColumn.UI' + '_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithPendingForA' + 'ctionAndUncommittedApproved_TC2754(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Entire Column\\EntireColumn.cs:line ' + '57', + stdout=None, + stderr=None, + time=0.009371 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithPendingFo' + 'rActionAndUncommittedReject_TC2755("/#/tasks/access-certification/' + 'overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColumn.UI' + '_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithPendingForA' + 'ctionAndUncommittedReject_TC2755(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Entire Column\\EntireColumn.cs:line ' + '83', + stdout=None, + stderr=None, + time=0.007183 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_WithAllCommitted_TC2757TB' + 'D("/#/tasks/access-certification/overview")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001224 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_WithPendingForActionAppro' + 'vedCommittedAndRejectedCommitted_TC2758("/#/tasks/access-' + 'certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColumn.UI' + '_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_WithPendingForActionApprove' + 'dCommittedAndRejectedCommitted_TC2758(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Entire Column\\EntireColumn.cs:line ' + '148', + stdout=None, + stderr=None, + time=0.006828 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_WithUncommittedApprovedAn' + 'dUncommittedReject_TC2756("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColumn.UI' + '_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_WithUncommittedApprovedAndU' + 'ncommittedReject_TC2756(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Entire Column\\EntireColumn.cs:line ' + '111', + stdout=None, + stderr=None, + time=0.007697 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Menus_ColumnEntireColumn_TC7937' + '_TC7927("/#/tasks/access-certification/overview")', + result='failure', + message="System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 67 milliseconds\nBuild " + "info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 " + "07:57:44 -0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: " + "'172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', " + "os.version: '6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir2696_14836" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "6a683eff25d0c058e04394158f5d2245", + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Menus.Menus.UI_MyTask_AC_ACIS' + '_ChartView_ChartMenu_Menus_ColumnEntireColumn_TC7937_TC7927(String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Menus\\Menus.cs:line 58', + stdout=None, + stderr=None, + time=4.628458 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Menus_ColumnPendingAcions_TC793' + '8("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Menus.Menus.UI_MyTask_AC_ACIS' + '_ChartView_ChartMenu_Menus_ColumnPendingAcions_TC7938(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Menus\\Menus.cs:line 100', + stdout=None, + stderr=None, + time=0.0081 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Menus_FilterFurtherBy_TC7939("/' + '#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Menus.Menus.UI_MyTask_AC_ACIS' + '_ChartView_ChartMenu_Menus_FilterFurtherBy_TC7939(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Menus\\Menus.cs:line 140', + stdout=None, + stderr=None, + time=0.007192 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Menus_TakeActionOn_TC7936("/#/' + 'tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Menus.Menus.UI_MyTask_AC_ACIS' + '_ChartView_ChartMenu_Menus_TakeActionOn_TC7936(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Menus\\Menus.cs:line 13', + stdout=None, + stderr=None, + time=0.006358 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColumRejectAll_TC27' + '63("/#/tasks/access-certification/overview")', + result='failure', + message="System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 60 milliseconds\nBuild " + "info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 " + "07:57:44 -0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: " + "'172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', " + "os.version: '6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir10404_2081" + "8}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "50f2dfc6d36fd64051d143d025dc8e53", + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Reject_All.Graph.RejectAllGra' + 'ph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColumRejectAll_T' + 'C2763(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Reject ' + 'All\\Graph\\RejectAllGraph.cs:line 15', + stdout=None, + stderr=None, + time=4.669416 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedRejectAll_' + 'TC2765("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Reject_All.Graph.RejectAllGra' + 'ph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedRejectA' + 'll_TC2765(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Reject ' + 'All\\Graph\\RejectAllGraph.cs:line 65', + stdout=None, + stderr=None, + time=0.007605 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActionRejectAll' + '_TC2764("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Reject_All.Graph.RejectAllGra' + 'ph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActionReject' + 'All_TC2764(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Reject ' + 'All\\Graph\\RejectAllGraph.cs:line 39', + stdout=None, + stderr=None, + time=0.006233 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectRejectAll_TC' + '2766("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Chart_Menu.Reject_All.Graph.RejectAllGra' + 'ph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectRejectAll' + '_TC2766(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Chart Menu\\Reject ' + 'All\\Graph\\RejectAllGraph.cs:line 93', + stdout=None, + stderr=None, + time=0.005717 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_AllColumnsAvailable' + '_TC2793("/#/tasks/access-certification/overview")', + result='failure', + message="System.InvalidOperationException : unknown error: Element is not " + "clickable at point (932, 731)\n (Session info: " + "chrome=58.0.3029.110)\n (Driver info: chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 61 milliseconds\nBuild " + "info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 " + "07:57:44 -0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: " + "'172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', " + "os.version: '6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir3796_31836" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "840df673591317f43b8304ab9db74078", + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebElement.Click()\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_AC_A' + 'CIS_ChartView_ChartMenu_LookAndFeel_AllColumnsAvailable_TC2793(String' + ' url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Look And Feel\\LookAndFeel.cs:line 16', + stdout=None, + stderr=None, + time=4.834914 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_LongNameGetsTC2795(' + '"/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_AC_A' + 'CIS_ChartView_ChartMenu_LookAndFeel_LongNameGetsTC2795(String url) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Look And Feel\\LookAndFeel.cs:line 55', + stdout=None, + stderr=None, + time=0.00655 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_TableViewTC2799("/#' + '/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_AC_A' + 'CIS_ChartView_ChartMenu_LookAndFeel_TableViewTC2799(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Look And Feel\\LookAndFeel.cs:line 99', + stdout=None, + stderr=None, + time=0.005979 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_TextAndColorsTC2794' + '("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_AC_A' + 'CIS_ChartView_ChartMenu_LookAndFeel_TextAndColorsTC2794(String url) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Look And Feel\\LookAndFeel.cs:line 34', + stdout=None, + stderr=None, + time=0.006368 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_ToolTipTC2796_TC277' + '2("/#/tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_AC_A' + 'CIS_ChartView_ChartMenu_LookAndFeel_ToolTipTC2796_TC2772(String url) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Look And Feel\\LookAndFeel.cs:line 75', + stdout=None, + stderr=None, + time=0.006124 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_ToolTipTC7926("/#/' + 'tasks/access-certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certifica' + 'tion_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_AC_A' + 'CIS_ChartView_ChartMenu_LookAndFeel_ToolTipTC7926(String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Certification\\Access Certification Inner ' + 'Screen\\Chart View\\Look And Feel\\LookAndFeel.cs:line 121', + stdout=None, + stderr=None, + time=0.006007 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_FiltersValidation(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message='OneTimeSetUp: No suitable constructor was found', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_DataGrid_DataDisplay_TC2101(True,"chrome","/#/tasks/' + 'access-certification/overview")', + result='failure', + message='OneTimeSetUp: No suitable constructor was found', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_DataGrid_Header_TC2100(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message='OneTimeSetUp: No suitable constructor was found', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_DataGrid_Navigation_TC2099(True,"chrome","/#/tasks/' + 'access-certification/overview")', + result='failure', + message='OneTimeSetUp: No suitable constructor was found', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_DataGrid_Paging_TC2102(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message='OneTimeSetUp: No suitable constructor was found', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_Grid_Grid_Validation(True,"chrome","#/tasks/access-' + 'certification/overview")', + result='failure', + message='OneTimeSetUp: No suitable constructor was found', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_TwoUsersTwoApplicationsValidation(True,"chrome","/#/' + 'tasks/access-certification/overview")', + result='failure', + message='OneTimeSetUp: No suitable constructor was found', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTasks_AR_Paging_ShowPerPage(True,"chrome","/#/tasks/access-' + 'request/overview")', + result='failure', + message='OneTimeSetUp: No suitable constructor was found', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_Progress_Approve(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message='OneTimeSetUp: No suitable constructor was found', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AC_Progress_Reject(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message='OneTimeSetUp: No suitable constructor was found', + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AR_Filters_FilterLayout(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 15 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir11804_1689" + "5}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "29b4b9836d0675d3828a94e2f11cf9d7\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Filters.FiltersValidat' + 'ion.UI_MyTask_AR_Filters_FilterLayout(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Request\\Filters\\FiltersValidation.cs:line 29\n' + '--NoSuchElementException\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=30.834481 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AR_Filters_FiltersFunctionality(True,"chrome","/#/tasks/' + 'access-request/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Filters.FiltersValidat' + 'ion.UI_MyTask_AR_Filters_FiltersFunctionality(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Request\\Filters\\FiltersValidation.cs:line 83', + stdout=None, + stderr=None, + time=0.023999 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AR_Grid_FilterVAlidates(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 15 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir12972_2780" + "1}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "fca88dd0490c464a5ded2f16849929d8\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Grid.GridValidation.UI' + '_MyTask_AR_Grid_FilterVAlidates(Boolean excute, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Request\\Grid\\GridValidation.cs:line 29\n' + '--NoSuchElementException\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=30.89347 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AR_Grid_Paging(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Grid.GridValidation.UI' + '_MyTask_AR_Grid_Paging(Boolean excute, String browserName, String ' + 'url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Request\\Grid\\GridValidation.cs:line 65', + stdout=None, + stderr=None, + time=0.008822 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTasks_AR_Paging_ShowPerPage(True,"chrome","/#/tasks/access-' + 'request/overview")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 16 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir7652_19004" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "2a57cd88b44439bf41e198e40c667f20\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Paging.PagingValidatio' + 'n.UI_MyTasks_AR_Paging_ShowPerPage(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Request\\Paging\\PagingValidation.cs:line 29\n' + '--NoSuchElementException\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=31.057879 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AR_Progress_Approve(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 0 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir3688_21557" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "fc2e027b336637b143a0098139997621\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Progress.ProgressValid' + 'ation.UI_MyTask_AR_Progress_Approve(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Request\\Progress\\ProgressValidation.cs:line 32\n' + '--NoSuchElementException\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=30.919321 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_AR_Progress_Reject(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Progress.ProgressValid' + 'ation.UI_MyTask_AR_Progress_Reject(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Access Request\\Progress\\ProgressValidation.cs:line 80', + stdout=None, + stderr=None, + time=0.007383 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_CC_Filters_FilterVAlidates(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 0 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir13304_3008" + "8}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "e6e1a454eceffe04daec2df3121843c6\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Compliance_Control.Grid.GridValidatio' + 'n.UI_MyTask_CC_Filters_FilterVAlidates(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Compliance Control\\Grid\\GridValidation.cs:line 30\n' + '--NoSuchElementException\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=31.007686 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_CC_Grid_Paging(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Compliance_Control.Grid.GridValidatio' + 'n.UI_MyTask_CC_Grid_Paging(Boolean excute, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Compliance Control\\Grid\\GridValidation.cs:line 66', + stdout=None, + stderr=None, + time=0.00729 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTasks_CC_Paging_ShowPerPage(True,"chrome","/#/tasks/access-' + 'request/overview")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 0 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir6532_29346" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "b5311e179a7c4fac0e8285b86e566664\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Compliance_Control.Paging.PagingValid' + 'ation.UI_MyTasks_CC_Paging_ShowPerPage(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Compliance Control\\Paging\\PagingValidation.cs:line 24\n' + '--NoSuchElementException\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=30.750044 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_CC_Progress_Approve(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 0 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir12668_2417" + "5}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "1a60859e82be5a9504866d8d9e6b21ba\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Compliance_Control.Progress.ProgressV' + 'alidation.UI_MyTask_CC_Progress_Approve(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Compliance Control\\Progress\\ProgressValidation.cs:line 27\n' + '--NoSuchElementException\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=30.779221 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_CC_Progress_Reject(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.Compliance_Control.Progress.ProgressV' + 'alidation.UI_MyTask_CC_Progress_Reject(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Compliance Control\\Progress\\ProgressValidation.cs:line 76', + stdout=None, + stderr=None, + time=0.005767 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_MR_Progress_Approve(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 16 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir2428_25047" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "fe1ce479d62629acd645feb64f99fd6f\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.MyRequest.Progress.ProgressValidation' + '.UI_MyTask_MR_Progress_Approve(Boolean excute, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\My Requests\\Progress\\ProgressValidation.cs:line 27\n' + '--NoSuchElementException\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=30.73849 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_MR_Progress_Reject(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks.MyRequest.Progress.ProgressValidation' + '.UI_MyTask_MR_Progress_Reject(Boolean excute, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\My Requests\\Progress\\ProgressValidation.cs:line 75', + stdout=None, + stderr=None, + time=0.008522 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_CC_Filters_FilterLayout(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 16 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir10360_6306" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "68b0320c39a561808d45f7b1bd2ce18e\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks_Compliance_Control.Filters.FiltersVal' + 'idation.UI_MyTask_CC_Filters_FilterLayout(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Compliance Control\\Filters\\FiltersValidation.cs:line 30\n' + '--NoSuchElementException\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=30.683118 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_CC_Filters_FiltersFunctionality(True,"chrome","/#/tasks/' + 'access-request/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks_Compliance_Control.Filters.FiltersVal' + 'idation.UI_MyTask_CC_Filters_FiltersFunctionality(Boolean excute, ' + 'String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\Compliance Control\\Filters\\FiltersValidation.cs:line 69', + stdout=None, + stderr=None, + time=0.006491 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_MR_Filters_FilterLayout(True,"chrome","/#/tasks/access-' + 'certification/overview")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 0 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir2736_22908" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "52ab857fbeb80383ec0a4311504f7b8e\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks_My_Requests.Filters.FiltersValidation' + '.UI_MyTask_MR_Filters_FilterLayout(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\My Requests\\Filters\\FiltersValidation.cs:line 22\n' + '--NoSuchElementException\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=30.720678 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UI_MyTask_MR_Filters_FiltersFunctionality(True,"chrome","/#/tasks/' + 'access-request/overview")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.My_Tasks_My_Requests.Filters.FiltersValidation' + '.UI_MyTask_MR_Filters_FiltersFunctionality(Boolean excute, String ' + 'browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\My' + ' Tasks\\My Requests\\Filters\\FiltersValidation.cs:line 78', + stdout=None, + stderr=None, + time=0.007269 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIAlertExclusionAddAccountsFromSearch_TC7466(True,"en","1","chrome","' + '/#/settings/general-settings")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 0 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir3016_20227" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "c9411ed622920bbdad53147bc36fd09b\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAl' + 'ertExclusionAddAccountsFromSearch_TC7466(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Alert Exclusions\\AlertExclusions.cs:line 76\n' + '--NoSuchElementException\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=30.617281 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIAlertExclusionBulkActionsCoverage_TC7465(True,"en","1","chrome","/#' + '/settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAl' + 'ertExclusionBulkActionsCoverage_TC7465(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Alert Exclusions\\AlertExclusions.cs:line 111', + stdout=None, + stderr=None, + time=0.004977 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIAlertExclusionBulkUploadCoverage_TC7467_TC7468(True,"en","1","' + 'chrome","/#/settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAl' + 'ertExclusionBulkUploadCoverage_TC7467_TC7468(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Alert Exclusions\\AlertExclusions.cs:line 575', + stdout=None, + stderr=None, + time=0.004786 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIAlertExclusionBulkUploadDownloadSampleFile_TC7464(True,"en","1","' + 'chrome","/#/settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAl' + 'ertExclusionBulkUploadDownloadSampleFile_TC7464(Boolean excute, ' + 'String language, String itteration, String browserName, String url) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Alert Exclusions\\AlertExclusions.cs:line 155', + stdout=None, + stderr=None, + time=0.0045 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIAlertExclusionColumns_TC7474(True,"en","1","chrome","/#/settings/' + 'general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAl' + 'ertExclusionColumns_TC7474(Boolean excute, String language, String ' + 'itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Alert Exclusions\\AlertExclusions.cs:line 204', + stdout=None, + stderr=None, + time=0.005383 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIAlertExclusionGridCoverage_TC7465(True,"en","1","chrome","/#/' + 'settings/general-settings","u0g793,u1g1,u1g792,u1g802,u2g399,u2g8..."' + ')', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAl' + 'ertExclusionGridCoverage_TC7465(Boolean excute, String language, ' + 'String itteration, String browserName, String url, String names) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Alert Exclusions\\AlertExclusions.cs:line 532', + stdout=None, + stderr=None, + time=0.004051 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIAlertExclusionLoadSameAccountTwice_TC7473(True,"en","1","chrome","/' + '#/settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAl' + 'ertExclusionLoadSameAccountTwice_TC7473(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Alert Exclusions\\AlertExclusions.cs:line 301', + stdout=None, + stderr=None, + time=0.004175 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIAlertExclusionNonCsvFormat_TC7472(True,"en","1","chrome","/#/' + 'settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAl' + 'ertExclusionNonCsvFormat_TC7472(Boolean excute, String language, ' + 'String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Alert Exclusions\\AlertExclusions.cs:line 349', + stdout=None, + stderr=None, + time=0.003606 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIAlertExclusionPaginationCoverage_TC7471(True,"en","1","chrome","/#/' + 'settings/general-settings","u0g791,u0g801,u1g791,u1g801,u2g791,u2..."' + ')', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAl' + 'ertExclusionPaginationCoverage_TC7471(Boolean excute, String ' + 'language, String itteration, String browserName, String url, String ' + 'names) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Alert Exclusions\\AlertExclusions.cs:line 32', + stdout=None, + stderr=None, + time=0.003472 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIAlertExclusionRemoveAccounts_TC7470(True,"en","1","chrome","/#/' + 'settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAl' + 'ertExclusionRemoveAccounts_TC7470(Boolean excute, String language, ' + 'String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Alert Exclusions\\AlertExclusions.cs:line 397', + stdout=None, + stderr=None, + time=0.004382 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIAlertExclusionScreenOverviewLook_TC7465(True,"en","1","chrome","/#/' + 'settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAl' + 'ertExclusionScreenOverviewLook_TC7465(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Alert Exclusions\\AlertExclusions.cs:line 248', + stdout=None, + stderr=None, + time=0.005425 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIAlertExclusionSearchCurrentExcludedAccounts_TC7475(True,"en","1","' + 'chrome","/#/settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAl' + 'ertExclusionSearchCurrentExcludedAccounts_TC7475(Boolean excute, ' + 'String language, String itteration, String browserName, String url) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Alert Exclusions\\AlertExclusions.cs:line 488', + stdout=None, + stderr=None, + time=0.004325 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIAlertExclusionShowPerPageCoverage_TC7465(True,"en","1","chrome","/#' + '/settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAl' + 'ertExclusionShowPerPageCoverage_TC7465(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Alert Exclusions\\AlertExclusions.cs:line 447', + stdout=None, + stderr=None, + time=0.003597 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIDataOwnerExclusionAddAccountsFromSearch_TC3411(True,"en","1","' + 'chrome","/#/settings/general-settings")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 0 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir9916_12885" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "d3eacb9d6fac9a67fa47aa82158da43c\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclus' + 'ions.UIDataOwnerExclusionAddAccountsFromSearch_TC3411(Boolean ' + 'excute, String language, String itteration, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 142\n' + '--NoSuchElementException\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=30.67329 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIDataOwnerExclusionBulkActionsCoverage_TC7554_TC3415(True,"en","1","' + 'chrome","/#/settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclus' + 'ions.UIDataOwnerExclusionBulkActionsCoverage_TC7554_TC3415(Boolean ' + 'excute, String language, String itteration, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 180', + stdout=None, + stderr=None, + time=0.004622 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIDataOwnerExclusionBulkUploadCoverage_TC3412_TC3413(True,"en","1","' + 'chrome","/#/settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclus' + 'ions.UIDataOwnerExclusionBulkUploadCoverage_TC3412_TC3413(Boolean ' + 'excute, String language, String itteration, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 78', + stdout=None, + stderr=None, + time=0.004533 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIDataOwnerExclusionColumns_TC3419(True,"en","1","chrome","/#/' + 'settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclus' + 'ions.UIDataOwnerExclusionColumns_TC3419(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 223', + stdout=None, + stderr=None, + time=0.003773 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIDataOwnerExclusionGridCoverage_TC7554(True,"en","1","chrome","/#/' + 'settings/general-settings","u0g793,u1g1,u1g792,u1g802,u2g399,u2g8..."' + ')', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclus' + 'ions.UIDataOwnerExclusionGridCoverage_TC7554(Boolean excute, String ' + 'language, String itteration, String browserName, String url, String ' + 'names) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 267', + stdout=None, + stderr=None, + time=0.004412 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIDataOwnerExclusionLoadSameAccountTwice_TC3418(True,"en","1","' + 'chrome","/#/settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclus' + 'ions.UIDataOwnerExclusionLoadSameAccountTwice_TC3418(Boolean excute, ' + 'String language, String itteration, String browserName, String url) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 309', + stdout=None, + stderr=None, + time=0.005649 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIDataOwnerExclusionNonCsvFormat_TC3417(True,"en","1","chrome","/#/' + 'settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclus' + 'ions.UIDataOwnerExclusionNonCsvFormat_TC3417(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 31', + stdout=None, + stderr=None, + time=0.004311 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIDataOwnerExclusionPaginationCoverage_TC7554_TC3415(True,"en","1","' + 'chrome","/#/settings/general-settings","u0g106,u0g115,u0g124,u0g133,' + 'u0g142,u0...")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclus' + 'ions.UIDataOwnerExclusionPaginationCoverage_TC7554_TC3415(Boolean ' + 'excute, String language, String itteration, String browserName, ' + 'String url, String names) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 355', + stdout=None, + stderr=None, + time=0.013288 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIDataOwnerExclusionSearchCurrentExcludedAccounts_TC3420(True,"en","' + '1","chrome","/#/settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclus' + 'ions.UIDataOwnerExclusionSearchCurrentExcludedAccounts_TC3420(Boolean' + ' excute, String language, String itteration, String browserName, ' + 'String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 398', + stdout=None, + stderr=None, + time=0.005548 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIDataOwnerExclusionShowPerPageCoverage_TC7554(True,"en","1","chrome"' + ',"/#/settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclus' + 'ions.UIDataOwnerExclusionShowPerPageCoverage_TC7554(Boolean excute, ' + 'String language, String itteration, String browserName, String url) ' + 'in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Data Owner Exclusions\\DataOwnerExclusions.cs:line 438', + stdout=None, + stderr=None, + time=0.005229 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIGeneralSettingsAllowPhysicalPath_TC10766(True,"1","abcd","chrome","' + 'en","user,ra_user","crowdSource,whiteops","user","is_administrator","' + '/#/settings/general-settings")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 0 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir7348_16522" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "a9460966896b2f67901d0c200c612026\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settings' + '.UIGeneralSettingsAllowPhysicalPath_TC10766(Boolean excute, String ' + 'itteration, String account, String browserName, String language, ' + 'String dbTables, String dbSchema, String tableName, String ' + 'columnName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Logical Mapped Path\\Settings\\Settings.cs:line 266\n' + '--NoSuchElementException\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=30.705834 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIGeneralSettingsDataDisplay_TC10898(True,"1","abcd","chrome","en","' + 'user,ra_user","crowdSource,whiteops","user","is_administrator","/#/' + 'settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settings' + '.UIGeneralSettingsDataDisplay_TC10898(Boolean excute, String ' + 'itteration, String account, String browserName, String language, ' + 'String dbTables, String dbSchema, String tableName, String ' + 'columnName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Logical Mapped Path\\Settings\\Settings.cs:line 75', + stdout=None, + stderr=None, + time=0.006011 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIGeneralSettingsExcludeAdministrator_TC10765(True,"1","abcd","' + 'chrome","en","user,ra_user","crowdSource,whiteops","user","' + 'is_administrator","/#/settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settings' + '.UIGeneralSettingsExcludeAdministrator_TC10765(Boolean excute, ' + 'String itteration, String account, String browserName, String ' + 'language, String dbTables, String dbSchema, String tableName, String ' + 'columnName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Logical Mapped Path\\Settings\\Settings.cs:line 192', + stdout=None, + stderr=None, + time=0.004827 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIGeneralSettingsNavigation_TC10897(True,"1","abcd","chrome","en","' + 'user,ra_user","crowdSource,whiteops","user","is_administrator","/#/' + 'settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settings' + '.UIGeneralSettingsNavigation_TC10897(Boolean excute, String ' + 'itteration, String account, String browserName, String language, ' + 'String dbTables, String dbSchema, String tableName, String ' + 'columnName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Logical Mapped Path\\Settings\\Settings.cs:line 36', + stdout=None, + stderr=None, + time=0.003872 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIGeneralSettingsTranslatePhysicalPath_TC10764(True,"1","abcd","' + 'chrome","en","user,ra_user","crowdSource,whiteops","user","' + 'is_administrator","/#/settings/general-settings")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settings' + '.UIGeneralSettingsTranslatePhysicalPath_TC10764(Boolean excute, ' + 'String itteration, String account, String browserName, String ' + 'language, String dbTables, String dbSchema, String tableName, String ' + 'columnName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Logical Mapped Path\\Settings\\Settings.cs:line 119', + stdout=None, + stderr=None, + time=0.005674 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIMessageTemplatesCompanyInformationCoverage_TC3422_TC7534(True,"en",' + '"1","chrome","/#/settings/crowd-messages/welcome-me...")', + result='failure', + message="OpenQA.Selenium.WebDriverTimeoutException : Timed out after 30 " + "seconds\n ----> OpenQA.Selenium.NoSuchElementException : no such " + "element: Unable to locate element: " + "{\"method\":\"xpath\",\"selector\":\"//span[@translate='_Loading_']\"}" + "\n (Session info: chrome=58.0.3029.110)\n (Driver info: " + "chromedriver=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233),platform=Windows NT " + "6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace " + "information)\nCommand duration or timeout: 16 milliseconds\nFor " + "documentation on this error, please visit: " + "http://seleniumhq.org/exceptions/no_such_element.html\nBuild info: " + "version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 " + "-0800'\nSystem info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', " + "os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: " + "'6.3', java.version: '1.8.0_66'\nDriver info: " + "org.openqa.selenium.chrome.ChromeDriver\nCapabilities " + "[{applicationCacheEnabled=false, rotatable=false, " + "mobileEmulationEnabled=false, networkConnectionEnabled=false, " + "chrome={chromedriverVersion=2.29.461591 " + "(62ebf098771772160f391d75e589dc567915b233), " + "userDataDir=C:\\Users\\BUILD-~1\\AppData\\Local\\Temp\\scoped_dir2232_22398" + "}, takesHeapSnapshot=true, pageLoadStrategy=normal, " + "databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, " + "version=58.0.3029.110, platform=WIN8_1, " + "browserConnectionEnabled=false, nativeEvents=true, " + "acceptSslCerts=true, locationContextEnabled=true, " + "webStorageEnabled=true, browserName=chrome, takesScreenshot=true, " + "javascriptEnabled=true, cssSelectorsEnabled=true, " + "unexpectedAlertBehaviour=}]\nSession ID: " + "882c55bf9c675e183d7269fae3076ce9\n*** Element info: {Using=xpath, " + "value=//span[@translate='_Loading_']}", + content=' at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.ThrowTimeoutException(String' + ' exceptionMessage, Exception lastException)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTempl' + 'atesCompanyInformationCoverage_TC3422_TC7534(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Messages\\MessagesTests.cs:line 33\n--NoSuchElementException\n ' + 'at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)', + stdout=None, + stderr=None, + time=31.228887 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIMessageTemplatesCompanyInformationFunctionality_TC3422_TC7534(True,' + '"en","1","chrome","/#/settings/crowd-messages/welcome-me...","google"' + ')', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTempl' + 'atesCompanyInformationFunctionality_TC3422_TC7534(Boolean excute, ' + 'String language, String itteration, String browserName, String url, ' + 'String companyName) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Messages\\MessagesTests.cs:line 79', + stdout=None, + stderr=None, + time=0.005272 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIMessageTemplatesCumulativeReminderScheduling_TC3426(True,"en","1","' + 'chrome","/#/settings/crowd-messages/welcome-me...")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTempl' + 'atesCumulativeReminderScheduling_TC3426(Boolean excute, String ' + 'language, String itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Messages\\MessagesTests.cs:line 116', + stdout=None, + stderr=None, + time=0.004819 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIMessageTemplatesDiscardChanges_TC3425(True,"en","1","chrome","/#/' + 'settings/crowd-messages/welcome-me...")', + result='failure', + message='System.InvalidOperationException : Session [(null externalkey)] not ' + 'available and is not among the last 1000 terminated sessions.\n' + 'Active sessions are[]', + content=' at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response' + ' errorResponse)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String ' + 'driverCommandToExecute, Dictionary`2 parameters)\n at ' + 'OpenQA.Selenium.Remote.RemoteWebDriver.FindElement(String mechanism, ' + 'String value)\n at ' + 'OpenQA.Selenium.Support.UI.ExpectedConditions.<>c__DisplayClass13.b__12(IWebDriver driver)\n at ' + 'OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 ' + 'condition)\n at MyCompanyUiSettings.Bl.Base.waitToLoad() in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Bl\\Base.c' + 's:line 537\n at ' + 'MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTempl' + 'atesDiscardChanges_TC3425(Boolean excute, String language, String ' + 'itteration, String browserName, String url) in ' + 'C:\\branches\\1\\main-branch\\Automation\\UI\\MyCompanyUiSettings\\Tl\\Settin' + 'gs\\Messages\\MessagesTests.cs:line 172', + stdout=None, + stderr=None, + time=0.006014 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue44527.xml', + test_file=None, + line=None, + class_name='', + test_name='UIMessageTemplatesHtmlEditor_TC3424(True,"en","1","chrome","/#/settin' + 'gs/crowd-messages/welcome-me..."," is not clickable at point (80, 241). Other element would receive the click: + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 50 milliseconds +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir2476_1158}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: 5cb1002259d4ed7ed523ba2e9e0cea02 + at OpenQA.Selenium.Remote.RemoteWebDriver.UnpackAndThrowOnError(Response errorResponse) + at OpenQA.Selenium.Remote.RemoteWebDriver.Execute(String driverCommandToExecute, Dictionary`2 parameters) + at OpenQA.Selenium.Remote.RemoteWebElement.Click() + at MyCompanyUiSettings.Bl.Base.Click(String xpath) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 323]]> + + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Bulk_Acions.Clear_All.ClearAll.UI_MyTask_AC_ACIS_BulkActions_ClearAll_CommittedRecoredNotAffected_TC2708(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Bulk Acions\Clear All\ClearAll.cs:line 102]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Bulk_Acions.Clear_All.ClearAll.UI_MyTask_AC_ACIS_BulkActions_RejectAll_AddCommentNo_TC2705(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Bulk Acions\Clear All\ClearAll.cs:line 13]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Bulk_Acions.Clear_All.ClearAll.UI_MyTask_AC_ACIS_BulkActions_RejectAll_AddCommentYes_TC2706(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Bulk Acions\Clear All\ClearAll.cs:line 32]]> + + + + + + + + + + + + + + + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Bulk_Acions.Examine_Text_and_Layout.ExaminTextAndLayout.UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_ClearAllSelection_TC2712(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Bulk Acions\Examine Text and Layout\ExaminTextAndLayout.cs:line 67]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Bulk_Acions.Examine_Text_and_Layout.ExaminTextAndLayout.UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_MainMenu_TC2709(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Bulk Acions\Examine Text and Layout\ExaminTextAndLayout.cs:line 15]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Bulk_Acions.Examine_Text_and_Layout.ExaminTextAndLayout.UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_RejectAllSelection_TC2711(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Bulk Acions\Examine Text and Layout\ExaminTextAndLayout.cs:line 50]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Bulk_Acions.Load.Load.UI_MyTask_AC_ACIS_BulkActions_ExamineTextAndLayout_MainMenu_TC2714(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Bulk Acions\Load\Load.cs:line 34]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Bulk_Acions.Reject_All.ApproveAll.UI_MyTask_AC_ACIS_BulkActions_RejectAll_AddCommentYes_TC2716(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Bulk Acions\Reject All\RejectAll.cs:line 29]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Bulk_Acions.Reject_All.ApproveAll.UI_MyTask_AC_ACIS_BulkActions_RejectAll_CommittedRecoredNotAffected_TC2718(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Bulk Acions\Reject All\RejectAll.cs:line 75]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Bulk_Acions.Reject_All.ApproveAll.UI_MyTask_AC_ACIS_BulkActions_RejectAll_WithExistingSaved_TC2717(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Bulk Acions\Reject All\RejectAll.cs:line 47]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Bulk_Acions.Saving.Saving.UI_MyTask_AC_ACIS_BulkActions_Saving_Saving_IsSynchronous_NoOtherActionCanBeTaken_2722(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Bulk Acions\Saving\Saving.cs:line 27]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Approve_All.Graph.ApproveAllGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedApproveAll_TC11159(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Approve All\Graph\ApproveAllGraph.cs:line 65]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Approve_All.Graph.ApproveAllGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActionApproveAll_TC2744(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Approve All\Graph\ApproveAllGraph.cs:line 39]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Approve_All.Graph.ApproveAllGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectApproveAll_TC11160(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Approve All\Graph\ApproveAllGraph.cs:line 93]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Clear_All.Graph.ClearAllGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedClearAll_TC2750(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Clear All\Graph\ClearAllGraph.cs:line 46]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Clear_All.Graph.ClearAllGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActionClearAll_TC2752(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Clear All\Graph\ClearAllGraph.cs:line 112]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Clear_All.Graph.ClearAllGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectedClearAll_TC2751(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Clear All\Graph\ClearAllGraph.cs:line 79]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Column_Filtering.Filter_further_by.Filter_By.FilterBy.UI_MyTask_AC_ACIS_ChartView_ChartMenu_ColumnFiltering_FilterFurtherBy_FilterBy_OneColumnOutOfManyWithAlreadyExistingFilters_TC2768(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Column Filtering\Filter further by\Filter By\FilterBy.cs:line 54]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Column_Filtering.Filter_further_by.Filter_By.FilterBy.UI_MyTask_AC_ACIS_ChartView_ChartMenu_ColumnFiltering_FilterFurtherBy_FilterBy_OneColumnOutOfManyWithNoExistingFilters_TC2767(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Column Filtering\Filter further by\Filter By\FilterBy.cs:line 13]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColumn.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithPendingForActionAndUncommittedApproved_TC2754(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Entire Column\EntireColumn.cs:line 57]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColumn.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_EntireColum_WithPendingForActionAndUncommittedReject_TC2755(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Entire Column\EntireColumn.cs:line 83]]> + + + + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColumn.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_WithPendingForActionApprovedCommittedAndRejectedCommitted_TC2758(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Entire Column\EntireColumn.cs:line 148]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Entire_Column.EntireColumn.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_WithUncommittedApprovedAndUncommittedReject_TC2756(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Entire Column\EntireColumn.cs:line 111]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Menus.Menus.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Menus_ColumnPendingAcions_TC7938(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Menus\Menus.cs:line 100]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Menus.Menus.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Menus_FilterFurtherBy_TC7939(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Menus\Menus.cs:line 140]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Menus.Menus.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Menus_TakeActionOn_TC7936(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Menus\Menus.cs:line 13]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Reject_All.Graph.RejectAllGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingApprovedRejectAll_TC2765(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Reject All\Graph\RejectAllGraph.cs:line 65]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Reject_All.Graph.RejectAllGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingForActionRejectAll_TC2764(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Reject All\Graph\RejectAllGraph.cs:line 39]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Chart_Menu.Reject_All.Graph.RejectAllGraph.UI_MyTask_AC_ACIS_ChartView_ChartMenu_Graph_PendingRejectRejectAll_TC2766(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Chart Menu\Reject All\Graph\RejectAllGraph.cs:line 93]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_LongNameGetsTC2795(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Look And Feel\LookAndFeel.cs:line 55]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_TableViewTC2799(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Look And Feel\LookAndFeel.cs:line 99]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_TextAndColorsTC2794(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Look And Feel\LookAndFeel.cs:line 34]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_ToolTipTC2796_TC2772(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Look And Feel\LookAndFeel.cs:line 75]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Certification.Access_Certification_Inner_Screen.Chart_View.Look_And_Feel.LookAndFeel.UI_MyTask_AC_ACIS_ChartView_ChartMenu_LookAndFeel_ToolTipTC7926(String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Certification\Access Certification Inner Screen\Chart View\Look And Feel\LookAndFeel.cs:line 121]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 15 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir11804_16895}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: 29b4b9836d0675d3828a94e2f11cf9d7 +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Filters.FiltersValidation.UI_MyTask_AR_Filters_FiltersFunctionality(Boolean excute, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Request\Filters\FiltersValidation.cs:line 83]]> + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 15 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir12972_27801}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: fca88dd0490c464a5ded2f16849929d8 +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Grid.GridValidation.UI_MyTask_AR_Grid_Paging(Boolean excute, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Request\Grid\GridValidation.cs:line 65]]> + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 16 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir7652_19004}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: 2a57cd88b44439bf41e198e40c667f20 +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 0 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir3688_21557}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: fc2e027b336637b143a0098139997621 +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Access_Request.Progress.ProgressValidation.UI_MyTask_AR_Progress_Reject(Boolean excute, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Access Request\Progress\ProgressValidation.cs:line 80]]> + + + + + + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 0 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir13304_30088}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: e6e1a454eceffe04daec2df3121843c6 +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Compliance_Control.Grid.GridValidation.UI_MyTask_CC_Grid_Paging(Boolean excute, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Compliance Control\Grid\GridValidation.cs:line 66]]> + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 0 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir6532_29346}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: b5311e179a7c4fac0e8285b86e566664 +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 0 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir12668_24175}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: 1a60859e82be5a9504866d8d9e6b21ba +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.Compliance_Control.Progress.ProgressValidation.UI_MyTask_CC_Progress_Reject(Boolean excute, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Compliance Control\Progress\ProgressValidation.cs:line 76]]> + + + + + + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 16 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir2428_25047}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: fe1ce479d62629acd645feb64f99fd6f +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks.MyRequest.Progress.ProgressValidation.UI_MyTask_MR_Progress_Reject(Boolean excute, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\My Requests\Progress\ProgressValidation.cs:line 75]]> + + + + + + + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 16 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir10360_6306}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: 68b0320c39a561808d45f7b1bd2ce18e +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks_Compliance_Control.Filters.FiltersValidation.UI_MyTask_CC_Filters_FiltersFunctionality(Boolean excute, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\Compliance Control\Filters\FiltersValidation.cs:line 69]]> + + + + + + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 0 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir2736_22908}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: 52ab857fbeb80383ec0a4311504f7b8e +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.My_Tasks_My_Requests.Filters.FiltersValidation.UI_MyTask_MR_Filters_FiltersFunctionality(Boolean excute, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\My Tasks\My Requests\Filters\FiltersValidation.cs:line 78]]> + + + + + + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 0 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir3016_20227}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: c9411ed622920bbdad53147bc36fd09b +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAlertExclusionBulkActionsCoverage_TC7465(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Alert Exclusions\AlertExclusions.cs:line 111]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAlertExclusionBulkUploadCoverage_TC7467_TC7468(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Alert Exclusions\AlertExclusions.cs:line 575]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAlertExclusionBulkUploadDownloadSampleFile_TC7464(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Alert Exclusions\AlertExclusions.cs:line 155]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAlertExclusionColumns_TC7474(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Alert Exclusions\AlertExclusions.cs:line 204]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAlertExclusionGridCoverage_TC7465(Boolean excute, String language, String itteration, String browserName, String url, String names) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Alert Exclusions\AlertExclusions.cs:line 532]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAlertExclusionLoadSameAccountTwice_TC7473(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Alert Exclusions\AlertExclusions.cs:line 301]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAlertExclusionNonCsvFormat_TC7472(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Alert Exclusions\AlertExclusions.cs:line 349]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAlertExclusionPaginationCoverage_TC7471(Boolean excute, String language, String itteration, String browserName, String url, String names) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Alert Exclusions\AlertExclusions.cs:line 32]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAlertExclusionRemoveAccounts_TC7470(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Alert Exclusions\AlertExclusions.cs:line 397]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAlertExclusionScreenOverviewLook_TC7465(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Alert Exclusions\AlertExclusions.cs:line 248]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAlertExclusionSearchCurrentExcludedAccounts_TC7475(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Alert Exclusions\AlertExclusions.cs:line 488]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Alert_Exclusions.AlertExclusions.UIAlertExclusionShowPerPageCoverage_TC7465(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Alert Exclusions\AlertExclusions.cs:line 447]]> + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 0 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir9916_12885}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: d3eacb9d6fac9a67fa47aa82158da43c +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclusions.UIDataOwnerExclusionBulkActionsCoverage_TC7554_TC3415(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Data Owner Exclusions\DataOwnerExclusions.cs:line 180]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclusions.UIDataOwnerExclusionBulkUploadCoverage_TC3412_TC3413(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Data Owner Exclusions\DataOwnerExclusions.cs:line 78]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclusions.UIDataOwnerExclusionColumns_TC3419(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Data Owner Exclusions\DataOwnerExclusions.cs:line 223]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclusions.UIDataOwnerExclusionGridCoverage_TC7554(Boolean excute, String language, String itteration, String browserName, String url, String names) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Data Owner Exclusions\DataOwnerExclusions.cs:line 267]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclusions.UIDataOwnerExclusionLoadSameAccountTwice_TC3418(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Data Owner Exclusions\DataOwnerExclusions.cs:line 309]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclusions.UIDataOwnerExclusionNonCsvFormat_TC3417(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Data Owner Exclusions\DataOwnerExclusions.cs:line 31]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclusions.UIDataOwnerExclusionPaginationCoverage_TC7554_TC3415(Boolean excute, String language, String itteration, String browserName, String url, String names) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Data Owner Exclusions\DataOwnerExclusions.cs:line 355]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclusions.UIDataOwnerExclusionSearchCurrentExcludedAccounts_TC3420(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Data Owner Exclusions\DataOwnerExclusions.cs:line 398]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Data_Owner_Exclusions.DataOwnerExclusions.UIDataOwnerExclusionShowPerPageCoverage_TC7554(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Data Owner Exclusions\DataOwnerExclusions.cs:line 438]]> + + + + + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 0 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir7348_16522}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: a9460966896b2f67901d0c200c612026 +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settings.UIGeneralSettingsDataDisplay_TC10898(Boolean excute, String itteration, String account, String browserName, String language, String dbTables, String dbSchema, String tableName, String columnName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Logical Mapped Path\Settings\Settings.cs:line 75]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settings.UIGeneralSettingsExcludeAdministrator_TC10765(Boolean excute, String itteration, String account, String browserName, String language, String dbTables, String dbSchema, String tableName, String columnName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Logical Mapped Path\Settings\Settings.cs:line 192]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settings.UIGeneralSettingsNavigation_TC10897(Boolean excute, String itteration, String account, String browserName, String language, String dbTables, String dbSchema, String tableName, String columnName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Logical Mapped Path\Settings\Settings.cs:line 36]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Logical_Mapped_Path.Settings.Settings.UIGeneralSettingsTranslatePhysicalPath_TC10764(Boolean excute, String itteration, String account, String browserName, String language, String dbTables, String dbSchema, String tableName, String columnName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Logical Mapped Path\Settings\Settings.cs:line 119]]> + + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 16 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir2232_22398}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: 882c55bf9c675e183d7269fae3076ce9 +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTemplatesCompanyInformationFunctionality_TC3422_TC7534(Boolean excute, String language, String itteration, String browserName, String url, String companyName) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Messages\MessagesTests.cs:line 79]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTemplatesCumulativeReminderScheduling_TC3426(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Messages\MessagesTests.cs:line 116]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTemplatesDiscardChanges_TC3425(Boolean excute, String language, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Messages\MessagesTests.cs:line 172]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTemplatesHtmlEditor_TC3424(Boolean excute, String language, String itteration, String browserName, String url, String htmlTxt) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Messages\MessagesTests.cs:line 246]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTemplatesInsertScriptIntoMessage(Boolean excute, String language, String itteration, String browserName, String url, String htmlTxt) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Messages\MessagesTests.cs:line 285]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTemplatesNewTaskCoverage_TC3423_TC7534(Boolean excute, String language, String itteration, String browserName, String url, String userMail) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Messages\MessagesTests.cs:line 330]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTemplatesOwnersAppointmentCoverage_TC3423_TC7534(Boolean excute, String language, String itteration, String browserName, String url, String userMail) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Messages\MessagesTests.cs:line 407]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTemplatesPendingActivitiesCoverage_TC3423_TC7534(Boolean excute, String language, String itteration, String browserName, String url, String userMail) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Messages\MessagesTests.cs:line 472]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTemplatesReviewTaskCoverage_TC3423_TC7534(Boolean excute, String language, String itteration, String browserName, String url, String userMail) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Messages\MessagesTests.cs:line 549]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTemplatesScheduledRemindersCoverage_TC7482_TC7534(Boolean excute, String language, String itteration, String browserName, String url, String userMail) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Messages\MessagesTests.cs:line 625]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Messages.MessagesTests.UIMessageTemplatesWelcomeEmailCoverage_TC3423_TC7534(Boolean excute, String language, String itteration, String browserName, String url, String userMail) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Messages\MessagesTests.cs:line 712]]> + + + + + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 0 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir6236_18900}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: b3b31a390177ef50a7382429e7c087d1 +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Sensitive_Account_Exclusions.Bulk_Upload.BulkUpload.UISensitiveAccountExclusionBulkUploadFourthColumn_TC10842(Boolean excute, String browserName, String url, String errorMessage, String exceptionXpath) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Sensitive Account Exclusions\Bulk Upload\BulkUpload.cs:line 347]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Sensitive_Account_Exclusions.Bulk_Upload.BulkUpload.UISensitiveAccountExclusionBulkUploadFourthColumn_TC10860(Boolean excute, String browserName, String url, String errorMessage, String exceptionXpath) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Sensitive Account Exclusions\Bulk Upload\BulkUpload.cs:line 307]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Sensitive_Account_Exclusions.Bulk_Upload.BulkUpload.UISensitiveAccountExclusionBulkUploadInValidTypeOfAccount_TC10865(Boolean excute, String browserName, String url, String errorMessage) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Sensitive Account Exclusions\Bulk Upload\BulkUpload.cs:line 443]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Sensitive_Account_Exclusions.Bulk_Upload.BulkUpload.UISensitiveAccountExclusionBulkUploadInValidTypeOfAccount_TC10866(Boolean excute, String browserName, String url, String errorMessage) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Sensitive Account Exclusions\Bulk Upload\BulkUpload.cs:line 386]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Sensitive_Account_Exclusions.Bulk_Upload.BulkUpload.UISensitiveAccountExclusionBulkUploadInValidUserAccount_TC10826(Boolean excute, String browserName, String url, String errorMessage, String errorUserNotFoundCount, String errorStartsWithCount) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Sensitive Account Exclusions\Bulk Upload\BulkUpload.cs:line 500]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Sensitive_Account_Exclusions.Bulk_Upload.BulkUpload.UISensitiveAccountExclusionBulkUploadValidUserAccount_TC10837(Boolean excute, String browserName, String url, String numberOfElementsInFile) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Sensitive Account Exclusions\Bulk Upload\BulkUpload.cs:line 259]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Sensitive_Account_Exclusions.Bulk_Upload.BulkUpload.UISensitiveAccountExclusionBulkUploadValidUserAccount_TC10839(Boolean excute, String browserName, String url, String numberOfElementsInFile) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Sensitive Account Exclusions\Bulk Upload\BulkUpload.cs:line 213]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Sensitive_Account_Exclusions.Bulk_Upload.BulkUpload.UISensitiveAccountExclusionBulkUploadValidUserAccount_TC10840(Boolean excute, String browserName, String url, String numberOfElementsInFile) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Sensitive Account Exclusions\Bulk Upload\BulkUpload.cs:line 167]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Sensitive_Account_Exclusions.Bulk_Upload.BulkUpload.UISensitiveAccountExclusionBulkUploadValidUserAccount_TC10861(Boolean excute, String browserName, String url, String numberOfElementsInFile) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Sensitive Account Exclusions\Bulk Upload\BulkUpload.cs:line 121]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Sensitive_Account_Exclusions.Bulk_Upload.BulkUpload.UISensitiveAccountExclusionBulkUploadValidUserAccount_TC10896(Boolean excute, String browserName, String url, String numberOfElementsInFile) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Sensitive Account Exclusions\Bulk Upload\BulkUpload.cs:line 75]]> + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 0 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir4256_15126}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: b0327689b4883ded705855c7620da98c +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 0 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir7696_19366}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: 6f5cf4571ba1f17931476ccdc0db53a3 +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Sensitive_Account_Exclusions.Grid.Grid.UISensitiveAccountExclusionGridManuallyRemoveAccount_TC10831(Boolean excute, String browserName, String url, String numberOfElementsInFile, String numberOfElementsAfterSearch) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Sensitive Account Exclusions\Grid\Grid.cs:line 145]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Sensitive_Account_Exclusions.Grid.Grid.UISensitiveAccountExclusionGridPaging_TC10830(Boolean excute, String browserName, String url, String numberOfElementsInFile, String numberOfElementsAfterSearch) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Sensitive Account Exclusions\Grid\Grid.cs:line 196]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Sensitive_Account_Exclusions.Grid.Grid.UISensitiveAccountExclusionGridSearch_TC10823(Boolean excute, String browserName, String url, String numberOfElementsInFile, String numberOfElementsAfterSearch) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Sensitive Account Exclusions\Grid\Grid.cs:line 31]]> + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 0 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir11004_25232}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: 538e1e6460210869f7784402170ca9f9 +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + + + + + + + + + + + + OpenQA.Selenium.NoSuchElementException : no such element: Unable to locate element: {"method":"xpath","selector":"//span[@translate='_Loading_']"} + (Session info: chrome=58.0.3029.110) + (Driver info: chromedriver=2.29.461591 (62ebf098771772160f391d75e589dc567915b233),platform=Windows NT 6.3.9600 x86_64) (WARNING: The server did not provide any stacktrace information) +Command duration or timeout: 16 milliseconds +For documentation on this error, please visit: http://seleniumhq.org/exceptions/no_such_element.html +Build info: version: '3.1.0', revision: '86a5d70', time: '2017-02-16 07:57:44 -0800' +System info: host: 'BRC-JENKINS2-AU', ip: '172.16.61.17', os.name: 'Windows Server 2012 R2', os.arch: 'x86', os.version: '6.3', java.version: '1.8.0_66' +Driver info: org.openqa.selenium.chrome.ChromeDriver +Capabilities [{applicationCacheEnabled=false, rotatable=false, mobileEmulationEnabled=false, networkConnectionEnabled=false, chrome={chromedriverVersion=2.29.461591 (62ebf098771772160f391d75e589dc567915b233), userDataDir=C:\Users\BUILD-~1\AppData\Local\Temp\scoped_dir7980_3057}, takesHeapSnapshot=true, pageLoadStrategy=normal, databaseEnabled=false, handlesAlerts=true, hasTouchScreen=false, version=58.0.3029.110, platform=WIN8_1, browserConnectionEnabled=false, nativeEvents=true, acceptSslCerts=true, locationContextEnabled=true, webStorageEnabled=true, browserName=chrome, takesScreenshot=true, javascriptEnabled=true, cssSelectorsEnabled=true, unexpectedAlertBehaviour=}] +Session ID: 7b8a5378b299463289d0d11f59f04129 +*** Element info: {Using=xpath, value=//span[@translate='_Loading_']}]]> + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition)]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Set_Admin_Roles.SetAdminRoles.UISetAdminRoleOverviewLook_TC3384(Boolean excute, String itteration, String browserName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Set Admin Roles\SetAdminRoles.cs:line 97]]> + + + + + + + + + + + c__DisplayClass13.b__12(IWebDriver driver) + at OpenQA.Selenium.Support.UI.DefaultWait`1.Until[TResult](Func`2 condition) + at MyCompanyUiSettings.Bl.Base.waitToLoad() in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Bl\Base.cs:line 537 + at MyCompanyUiSettings.Tl.Settings.Set_Admin_Roles.SetAdminRoles.UISetAdminRoleRemoveAdministrators_TC3382(Boolean excute, String itteration, String account, String browserName, String dbTables, String dbSchema, String tableName, String columnName, String url) in C:\branches\1\main-branch\Automation\UI\MyCompanyUiSettings\Tl\Settings\Set Admin Roles\SetAdminRoles.cs:line 138]]> + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue47367.exception b/python/test/files/nunit/nunit3/jenkins/NUnit-issue47367.exception new file mode 100644 index 0000000..2657d0d --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue47367.exception @@ -0,0 +1 @@ +ParseError: file='files/nunit/nunit3/jenkins/NUnit-issue47367.xml', message='attributes construct error, line 5, column 109 (NUnit-issue47367.xml, line 5)', line=None, column=None, exception=XMLSyntaxError('attributes construct error, line 5, column 109') \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue47367.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue47367.xml new file mode 100644 index 0000000..b744a34 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue47367.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue48478.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-issue48478.annotations new file mode 100644 index 0000000..821fabd --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue48478.annotations @@ -0,0 +1,40 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 3 tests pass in 17s', + 'summary': + '3 tests\u2002\u2003\u20033 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '17s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '2 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0KoLfwUJl7GENS4EcEsUBnv7oKi0L2Zl8zJF1Cz5QNrKsatBxehJZg8CgdGh68npseFr0' + 't7tF7KUmxwkKg/sQhQhZgRDb4GvU69MPPcw38tchaLnLek2XdwBO9idhX8ugG5zrfD3gA' + 'AAA==\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 3 tests, see "Raw output" for the full list of tests.', + 'title': '3 tests found', + 'raw_details': 'TestAllInfo\nTestAllRawData\nTestFirst10RawData' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue48478.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue48478.junit-xml new file mode 100644 index 0000000..9c3b054 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue48478.junit-xml @@ -0,0 +1,48 @@ + + + + + + + + + + + + + + + Connecting to Wombat Database + Project Filename : C:\Data\example.dat + Licence Server : LICSERVER + Author : AUTHOR + Subset : +Checking 241 log ids + + + + Connecting to Wombat Database + Project Filename : C:\Data\example.dat + Licence Server : LICSERVER + Author : AUTHOR + Subset : +Checking 10 log ids + + + + + + Connecting to Wombat Database + Project Filename : C:\Data\example.dat + Licence Server : LICSERVER + Author : AUTHOR + Subset : + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue48478.results b/python/test/files/nunit/nunit3/jenkins/NUnit-issue48478.results new file mode 100644 index 0000000..a0270af --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue48478.results @@ -0,0 +1,77 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=2, + suite_tests=3, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=17, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='NS.APP.Wombat.TestIntegration.TestGetData', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='NS.APP.Wombat.TestIntegration.TestGetCurveInfo', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue48478.xml', + test_file=None, + line=None, + class_name='', + test_name='TestAllRawData', + result='success', + message=None, + content=None, + stdout='Connecting to Wombat Database\n Project Filename : ' + 'C:\\Data\\example.dat\n Licence Server : LICSERVER\n Author ' + ' : AUTHOR\n Subset : \nChecking 241 log ids\n', + stderr=None, + time=11.129426 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue48478.xml', + test_file=None, + line=None, + class_name='', + test_name='TestFirst10RawData', + result='success', + message=None, + content=None, + stdout='Connecting to Wombat Database\n Project Filename : ' + 'C:\\Data\\example.dat\n Licence Server : LICSERVER\n Author ' + ' : AUTHOR\n Subset : \nChecking 10 log ids\n', + stderr=None, + time=0.48234 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue48478.xml', + test_file=None, + line=None, + class_name='', + test_name='TestAllInfo', + result='success', + message=None, + content=None, + stdout='Connecting to Wombat Database\n Project Filename : ' + 'C:\\Data\\example.dat\n Licence Server : LICSERVER\n Author ' + ' : AUTHOR\n Subset : \n', + stderr=None, + time=6.195858 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue48478.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue48478.xml new file mode 100644 index 0000000..a4d12c9 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue48478.xml @@ -0,0 +1,60 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue50162.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-issue50162.annotations new file mode 100644 index 0000000..d0be89b --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue50162.annotations @@ -0,0 +1,84 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '3 fail, 3 pass in 0s', + 'summary': + '6 tests\u2002\u2003\u20033 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '2 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20033 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBBEr' + '0KoLfwkFl7GEIS4kY9ZoDLe3RU1YDdvZvIOrsGowCfWNYyHBDFDT7AkFBG8I2wJaYj3NH' + '55DklKKoZSbLDX71kLML+HQvT4XjC5z3fHWvdwsWWuZJlrl/TWQiR4Ewur4OcFmZnWM90' + 'AAAA=\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue50162.xml\u2003[took 0s]', + 'title': 'TestWithParameters("Ä") failed', + 'raw_details': + ' Expected: greater than 2\n But was: 1\n bei ' + 'UnitTests.HelloWorldTests.TestWithParameters(String param) in ' + 'C:\\Program Files ' + '(x86)\\Jenkins\\workspace\\Build-VS-project\\HelloWorld\\UnitTests\\Hello' + 'WorldTests.cs:Zeile 27.' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue50162.xml\u2003[took 0s]', + 'title': 'TestWithParameters("Ö") failed', + 'raw_details': + ' Expected: greater than 2\n But was: 1\n bei ' + 'UnitTests.HelloWorldTests.TestWithParameters(String param) in ' + 'C:\\Program Files ' + '(x86)\\Jenkins\\workspace\\Build-VS-project\\HelloWorld\\UnitTests\\Hello' + 'WorldTests.cs:Zeile 27.' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue50162.xml\u2003[took 0s]', + 'title': 'FailThisTest failed', + 'raw_details': + 'Oh no the test failed!\n bei ' + 'UnitTests.HelloWorldTests.FailThisTest() in C:\\Program Files ' + '(x86)\\Jenkins\\workspace\\Build-VS-project\\HelloWorld\\UnitTests\\Hello' + 'WorldTests.cs:Zeile 18.' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 6 tests, see "Raw output" for the full list of tests.', + 'title': '6 tests found', + 'raw_details': + 'DoATest\nFailThisTest\nTestWithParameters("Bar")\n' + 'TestWithParameters("Foo")\nTestWithParameters("Ä")\n' + 'TestWithParameters("Ö")' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue50162.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue50162.junit-xml new file mode 100644 index 0000000..54685da --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue50162.junit-xml @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + bei UnitTests.HelloWorldTests.FailThisTest() in C:\Program Files (x86)\Jenkins\workspace\Build-VS-project\HelloWorld\UnitTests\HelloWorldTests.cs:Zeile 18. + + + + + + + + + bei UnitTests.HelloWorldTests.TestWithParameters(String param) in C:\Program Files (x86)\Jenkins\workspace\Build-VS-project\HelloWorld\UnitTests\HelloWorldTests.cs:Zeile 27. + + + + + bei UnitTests.HelloWorldTests.TestWithParameters(String param) in C:\Program Files (x86)\Jenkins\workspace\Build-VS-project\HelloWorld\UnitTests\HelloWorldTests.cs:Zeile 27. + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue50162.results b/python/test/files/nunit/nunit3/jenkins/NUnit-issue50162.results new file mode 100644 index 0000000..3c30a2f --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue50162.results @@ -0,0 +1,118 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=2, + suite_tests=6, + suite_skipped=0, + suite_failures=3, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='UnitTests.HelloWorldTests.TestWithParameters', + tests=4, + skipped=0, + failures=2, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='UnitTests.HelloWorldTests', + tests=6, + skipped=0, + failures=3, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue50162.xml', + test_file=None, + line=None, + class_name='', + test_name='TestWithParameters("Foo")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.018378 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue50162.xml', + test_file=None, + line=None, + class_name='', + test_name='TestWithParameters("Bar")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000179 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue50162.xml', + test_file=None, + line=None, + class_name='', + test_name='TestWithParameters("Ä")', + result='failure', + message=' Expected: greater than 2\n But was: 1\n', + content=' bei UnitTests.HelloWorldTests.TestWithParameters(String param) in ' + 'C:\\Program Files ' + '(x86)\\Jenkins\\workspace\\Build-VS-project\\HelloWorld\\UnitTests\\HelloWo' + 'rldTests.cs:Zeile 27.\n', + stdout=None, + stderr=None, + time=0.00291 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue50162.xml', + test_file=None, + line=None, + class_name='', + test_name='TestWithParameters("Ö")', + result='failure', + message=' Expected: greater than 2\n But was: 1\n', + content=' bei UnitTests.HelloWorldTests.TestWithParameters(String param) in ' + 'C:\\Program Files ' + '(x86)\\Jenkins\\workspace\\Build-VS-project\\HelloWorld\\UnitTests\\HelloWo' + 'rldTests.cs:Zeile 27.\n', + stdout=None, + stderr=None, + time=0.001015 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue50162.xml', + test_file=None, + line=None, + class_name='', + test_name='DoATest', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.03631 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue50162.xml', + test_file=None, + line=None, + class_name='', + test_name='FailThisTest', + result='failure', + message='Oh no the test failed!', + content=' bei UnitTests.HelloWorldTests.FailThisTest() in C:\\Program Files ' + '(x86)\\Jenkins\\workspace\\Build-VS-project\\HelloWorld\\UnitTests\\HelloWo' + 'rldTests.cs:Zeile 18.\n', + stdout=None, + stderr=None, + time=0.018525 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue50162.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue50162.xml new file mode 100644 index 0000000..06f5358 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue50162.xml @@ -0,0 +1,95 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue5674.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-issue5674.annotations new file mode 100644 index 0000000..f2016fb --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue5674.annotations @@ -0,0 +1,88 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '3 fail, 6 pass in 0s', + 'summary': + '9 tests\u2002\u2003\u20036 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '3 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20033 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MywqAIBREf' + '0VctyiCoH5GxIwu+YirrqJ/72ZK7ubMDOfiGxgd+MKGjvGQIGYYCdaEMoJ3hD0hDfGd5p' + 'pFSEpRMf3FAWf7FpsEU2xfoRE9lgsmV31vbHUf/7bMjSxz61LeWogEJbGwS34/WLAikt0' + 'AAAA=\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue5674.xml\u2003[took 0s]', + 'title': 'GetBrowsersTest.BrowserNamesTest1.FailTest("IE") failed', + 'raw_details': + 'at GetBrowsersTest.BrowserNamesTest1.FailTest(String browser) in ' + 'C:\\Users\\Samw\\Documents\\Visual Studio ' + '2008\\Projects\\GetBrowsersTest\\GetBrowsersTest\\BrowserNamesTest1.cs:' + 'line 27' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue5674.xml\u2003[took 0s]', + 'title': 'GetBrowsersTest.BrowserNamesTest1.FailTest("FireFox") failed', + 'raw_details': + 'at GetBrowsersTest.BrowserNamesTest1.FailTest(String browser) in ' + 'C:\\Users\\Samw\\Documents\\Visual Studio ' + '2008\\Projects\\GetBrowsersTest\\GetBrowsersTest\\BrowserNamesTest1.cs:' + 'line 27' + }, + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue5674.xml\u2003[took 0s]', + 'title': 'GetBrowsersTest.BrowserNamesTest1.FailTest("Safari") failed', + 'raw_details': + 'at GetBrowsersTest.BrowserNamesTest1.FailTest(String browser) in ' + 'C:\\Users\\Samw\\Documents\\Visual Studio ' + '2008\\Projects\\GetBrowsersTest\\GetBrowsersTest\\BrowserNamesTest1.cs:' + 'line 27' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 9 tests, see "Raw output" for the full list of tests.', + 'title': '9 tests found', + 'raw_details': + 'GetBrowsersTest.BrowserNamesTest1.AnotherTest("FireFox")\n' + 'GetBrowsersTest.BrowserNamesTest1.AnotherTest("IE")\n' + 'GetBrowsersTest.BrowserNamesTest1.AnotherTest("Safari")\n' + 'GetBrowsersTest.BrowserNamesTest1.FailTest("FireFox")\n' + 'GetBrowsersTest.BrowserNamesTest1.FailTest("IE")\n' + 'GetBrowsersTest.BrowserNamesTest1.FailTest("Safari")\n' + 'GetBrowsersTest.BrowserNamesTest1.ShowBrowsers("FireFox")\n' + 'GetBrowsersTest.BrowserNamesTest1.ShowBrowsers("IE")\n' + 'GetBrowsersTest.BrowserNamesTest1.ShowBrowsers("Safari")' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue5674.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue5674.junit-xml new file mode 100644 index 0000000..74bffe4 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue5674.junit-xml @@ -0,0 +1,47 @@ + + + + + + + + + + + + + + + + + + + + + at GetBrowsersTest.BrowserNamesTest1.FailTest(String browser) in C:\Users\Samw\Documents\Visual Studio 2008\Projects\GetBrowsersTest\GetBrowsersTest\BrowserNamesTest1.cs:line 27 + + + + at GetBrowsersTest.BrowserNamesTest1.FailTest(String browser) in C:\Users\Samw\Documents\Visual Studio 2008\Projects\GetBrowsersTest\GetBrowsersTest\BrowserNamesTest1.cs:line 27 + + + + at GetBrowsersTest.BrowserNamesTest1.FailTest(String browser) in C:\Users\Samw\Documents\Visual Studio 2008\Projects\GetBrowsersTest\GetBrowsersTest\BrowserNamesTest1.cs:line 27 + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue5674.results b/python/test/files/nunit/nunit3/jenkins/NUnit-issue5674.results new file mode 100644 index 0000000..3b15141 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue5674.results @@ -0,0 +1,167 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=3, + suite_tests=9, + suite_skipped=0, + suite_failures=3, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='AnotherTest', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='FailTest', + tests=3, + skipped=0, + failures=3, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='ShowBrowsers', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue5674.xml', + test_file=None, + line=None, + class_name='', + test_name='GetBrowsersTest.BrowserNamesTest1.AnotherTest("IE")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.023 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue5674.xml', + test_file=None, + line=None, + class_name='', + test_name='GetBrowsersTest.BrowserNamesTest1.AnotherTest("FireFox")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue5674.xml', + test_file=None, + line=None, + class_name='', + test_name='GetBrowsersTest.BrowserNamesTest1.AnotherTest("Safari")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue5674.xml', + test_file=None, + line=None, + class_name='', + test_name='GetBrowsersTest.BrowserNamesTest1.FailTest("IE")', + result='failure', + message=None, + content='at GetBrowsersTest.BrowserNamesTest1.FailTest(String browser) in ' + 'C:\\Users\\Samw\\Documents\\Visual Studio ' + '2008\\Projects\\GetBrowsersTest\\GetBrowsersTest\\BrowserNamesTest1.cs:li' + 'ne 27\n', + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue5674.xml', + test_file=None, + line=None, + class_name='', + test_name='GetBrowsersTest.BrowserNamesTest1.FailTest("FireFox")', + result='failure', + message=None, + content='at GetBrowsersTest.BrowserNamesTest1.FailTest(String browser) in ' + 'C:\\Users\\Samw\\Documents\\Visual Studio ' + '2008\\Projects\\GetBrowsersTest\\GetBrowsersTest\\BrowserNamesTest1.cs:li' + 'ne 27\n', + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue5674.xml', + test_file=None, + line=None, + class_name='', + test_name='GetBrowsersTest.BrowserNamesTest1.FailTest("Safari")', + result='failure', + message=None, + content='at GetBrowsersTest.BrowserNamesTest1.FailTest(String browser) in ' + 'C:\\Users\\Samw\\Documents\\Visual Studio ' + '2008\\Projects\\GetBrowsersTest\\GetBrowsersTest\\BrowserNamesTest1.cs:li' + 'ne 27\n', + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue5674.xml', + test_file=None, + line=None, + class_name='', + test_name='GetBrowsersTest.BrowserNamesTest1.ShowBrowsers("IE")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue5674.xml', + test_file=None, + line=None, + class_name='', + test_name='GetBrowsersTest.BrowserNamesTest1.ShowBrowsers("FireFox")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue5674.xml', + test_file=None, + line=None, + class_name='', + test_name='GetBrowsersTest.BrowserNamesTest1.ShowBrowsers("Safari")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue5674.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue5674.xml new file mode 100644 index 0000000..ad8e928 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue5674.xml @@ -0,0 +1,57 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue6353.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-issue6353.annotations new file mode 100644 index 0000000..63b7395 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue6353.annotations @@ -0,0 +1,59 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail, 1 skipped, 1 pass in 3s', + 'summary': + '3 tests\u2002\u2003\u20031 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '3s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20031 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMMQ6AIAxFr' + '0KYHTRuXsYQlNgoYlqYjHe3KmrZ/ntt3q4dLCPpTjWV0pQgfjAkNBHCytgy8iGS3D0la/' + 'NvFjNshXAGlkKMiAHZ1GwwrW/vmjL38F+7WcRuli0bvIfIkJeiyejjBNBleN/dAAAA\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-issue6353.xml\u2003[took 2s]', + 'title': 'FailedTest failed', + 'raw_details': 'Reason\nHere comes the stack trace' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'IgnoredTest' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 3 tests, see "Raw output" for the full list of tests.', + 'title': '3 tests found', + 'raw_details': 'FailedTest\nIgnoredTest\nSuccessfulTest' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue6353.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue6353.junit-xml new file mode 100644 index 0000000..631327f --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue6353.junit-xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + Here comes the stack trace + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue6353.results b/python/test/files/nunit/nunit3/jenkins/NUnit-issue6353.results new file mode 100644 index 0000000..0ada0c7 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue6353.results @@ -0,0 +1,62 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=3, + suite_skipped=1, + suite_failures=1, + suite_errors=0, + suite_time=3, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='MyTests', + tests=3, + skipped=1, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue6353.xml', + test_file=None, + line=None, + class_name='', + test_name='SuccessfulTest', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue6353.xml', + test_file=None, + line=None, + class_name='', + test_name='IgnoredTest', + result='skipped', + message='Reason', + content=None, + stdout=None, + stderr=None, + time=None + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-issue6353.xml', + test_file=None, + line=None, + class_name='', + test_name='FailedTest', + result='failure', + message='Reason', + content='Here comes the stack trace', + stdout=None, + stderr=None, + time=2.0 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-issue6353.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-issue6353.xml new file mode 100644 index 0000000..c30aab9 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-issue6353.xml @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-multinamespace.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-multinamespace.annotations new file mode 100644 index 0000000..a9d4c1f --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-multinamespace.annotations @@ -0,0 +1,44 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 4 tests pass in 0s', + 'summary': + '4 tests\u2002\u2003\u20034 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '2 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBBEr' + '0KoLdRYeRlCUOJGPmaBynh3F8SI3byZyTu5BrMGPrOhYzwkiAVGgiWhjOAdYU9IQ8zT9G' + 'YRklL/YoejfQstwfyKFdFjbTC515djq3v4sxVuZIVbl/LWQiSoiYVN8usGDjGDkd0AAAA' + '=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 4 tests, see "Raw output" for the full list of tests.', + 'title': '4 tests found', + 'raw_details': + 'AnotherNS.OtherMainClassTest.TestPropertyValueAgain\n' + 'UnitTests.MainClassTest.TestFailure\n' + 'UnitTests.MainClassTest.TestMethodUpdateValue\n' + 'UnitTests.MainClassTest.TestPropertyValue' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-multinamespace.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-multinamespace.junit-xml new file mode 100644 index 0000000..1d51a3d --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-multinamespace.junit-xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-multinamespace.results b/python/test/files/nunit/nunit3/jenkins/NUnit-multinamespace.results new file mode 100644 index 0000000..650e895 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-multinamespace.results @@ -0,0 +1,84 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=2, + suite_tests=4, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='UnitTests.MainClassTest', + tests=3, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='AnotherNS.OtherMainClassTest', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-multinamespace.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-multinamespace.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestMethodUpdateValue', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-multinamespace.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestFailure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.092 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-multinamespace.xml', + test_file=None, + line=None, + class_name='', + test_name='AnotherNS.OtherMainClassTest.TestPropertyValueAgain', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-multinamespace.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-multinamespace.xml new file mode 100644 index 0000000..c6498ff --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-multinamespace.xml @@ -0,0 +1,30 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.annotations new file mode 100644 index 0000000..1bad98a --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.annotations @@ -0,0 +1,49 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail, 1 pass in 0s', + 'summary': + '2 tests\u2002\u2003\u20031 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0KoLdTSyxCCEDfyMQtUxrsLCIrdm93JnFSBlp4uZBoI9RHCC2tEHsDZhGPC9Aj5NbfNfB' + 'SiuvWww9HbTHHQP0MiOqwKRtt6efa5h79a4S5WuG8JZwyEBHURv3F63ZlK7bXdAAAA\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-sec1752-file.xml\u2003[took 0s]', + 'title': 'UnitTests.MainClassTest.TestPropertyValue failed' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 2 tests, see "Raw output" for the full list of tests.', + 'title': '2 tests found', + 'raw_details': + 'UnitTests.MainClassTest.TestMethodUpdateValue\n' + 'UnitTests.MainClassTest.TestPropertyValue' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.exception b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.exception new file mode 100644 index 0000000..2ced6ac --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.exception @@ -0,0 +1 @@ +ParseError: file='files/nunit/nunit3/jenkins/NUnit-sec1752-file.xml', message='Failure to process entity xxe, line 17, column 51 (NUnit-sec1752-file.xml, line 17)', line=None, column=None, exception=XMLSyntaxError('Failure to process entity xxe, line 17, column 51') \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.junit-xml new file mode 100644 index 0000000..01db77a --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.junit-xml @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.results b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.results new file mode 100644 index 0000000..42d9a5d --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.results @@ -0,0 +1,49 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=2, + suite_skipped=0, + suite_failures=1, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='UnitTests.MainClassTest', + tests=2, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-sec1752-file.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue', + result='failure', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-sec1752-file.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestMethodUpdateValue', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.xml new file mode 100644 index 0000000..f940721 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-file.xml @@ -0,0 +1,27 @@ + + + ]> + + + + + + + + + + + + + &xxe; + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.annotations new file mode 100644 index 0000000..2dc6cd6 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.annotations @@ -0,0 +1,49 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail, 1 pass in 0s', + 'summary': + '2 tests\u2002\u2003\u20031 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0KoLdTSyxCCEDfyMQtUxrsLCIrdm93JnFSBlp4uZBoI9RHCC2tEHsDZhGPC9Aj5NbfNfB' + 'SiuvWww9HbTHHQP0MiOqwKRtt6efa5h79a4S5WuG8JZwyEBHURv3F63ZlK7bXdAAAA\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit-sec1752-https.xml\u2003[took 0s]', + 'title': 'UnitTests.MainClassTest.TestPropertyValue failed' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 2 tests, see "Raw output" for the full list of tests.', + 'title': '2 tests found', + 'raw_details': + 'UnitTests.MainClassTest.TestMethodUpdateValue\n' + 'UnitTests.MainClassTest.TestPropertyValue' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.exception b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.exception new file mode 100644 index 0000000..0b10953 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.exception @@ -0,0 +1 @@ +ParseError: file='files/nunit/nunit3/jenkins/NUnit-sec1752-https.xml', message='Failure to process entity xxe, line 17, column 51 (NUnit-sec1752-https.xml, line 17)', line=None, column=None, exception=XMLSyntaxError('Failure to process entity xxe, line 17, column 51') \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.junit-xml new file mode 100644 index 0000000..01db77a --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.junit-xml @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.results b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.results new file mode 100644 index 0000000..956c3cd --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.results @@ -0,0 +1,49 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=2, + suite_skipped=0, + suite_failures=1, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='UnitTests.MainClassTest', + tests=2, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-sec1752-https.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue', + result='failure', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-sec1752-https.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestMethodUpdateValue', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.xml new file mode 100644 index 0000000..9168541 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-sec1752-https.xml @@ -0,0 +1,27 @@ + + + ]> + + + + + + + + + + + + + &xxe; + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-simple.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit-simple.annotations new file mode 100644 index 0000000..fa75c6a --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-simple.annotations @@ -0,0 +1,41 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 2 tests pass in 0s', + 'summary': + '2 tests\u2002\u2003\u20032 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBBEr' + '0KoLdTSyxCCEDfyMQtUxrsLCgrdvJnJO6kCLT1dyDQQ6iOED9aIPICzCceEaQh5mmtmPg' + 'rRFzsc7ZspDrorJKLD0mC01Zdjq3v5tz3cyB5uXcIZAyFBScRvnF43yWbLod0AAAA=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 2 tests, see "Raw output" for the full list of tests.', + 'title': '2 tests found', + 'raw_details': + 'UnitTests.MainClassTest.TestMethodUpdateValue\n' + 'UnitTests.MainClassTest.TestPropertyValue' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-simple.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit-simple.junit-xml new file mode 100644 index 0000000..88c426d --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-simple.junit-xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-simple.results b/python/test/files/nunit/nunit3/jenkins/NUnit-simple.results new file mode 100644 index 0000000..56ed9d7 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-simple.results @@ -0,0 +1,49 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=2, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='UnitTests.MainClassTest', + tests=2, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-simple.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit-simple.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestMethodUpdateValue', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit-simple.xml b/python/test/files/nunit/nunit3/jenkins/NUnit-simple.xml new file mode 100644 index 0000000..11d456a --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit-simple.xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit.annotations b/python/test/files/nunit/nunit3/jenkins/NUnit.annotations new file mode 100644 index 0000000..78c1d9d --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit.annotations @@ -0,0 +1,61 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail, 3 pass in 0s', + 'summary': + '4 tests\u2002\u2003\u20033 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '2 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MOw6AIBBEr' + '0KoLfxVXoYQxLiRj1mgMt7dFSHSzZuZvItvYHTgCxs6xkOCmGEkWBPKCN4R9oQ0xHeaax' + 'YhKUXF9BcHnO1bbBJMUX+FRvRYLphc9b2x1X382zI3ssytS3lrIRKUxMIu+f0AuKmg790' + 'AAAA=\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnit.xml\u2003[took 0s]', + 'title': 'UnitTests.MainClassTest.TestFailure failed', + 'raw_details': + ' Expected failure\n Expected: 30\n But was: 20\n at ' + 'UnitTests.MainClassTest.TestFailure () [0x00000] \n at <0x00000> ' + '\n at (wrapper managed-to-native) ' + 'System.Reflection.MonoMethod:InternalInvoke (object,object[])\n ' + 'at System.Reflection.MonoMethod.Invoke (System.Object obj, ' + 'BindingFlags invokeAttr, System.Reflection.Binder binder, ' + 'System.Object[] parameters, System.Globalization.CultureInfo ' + 'culture) [0x00000]' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 4 tests, see "Raw output" for the full list of tests.', + 'title': '4 tests found', + 'raw_details': + 'AnotherNS.OtherMainClassTest.TestPropertyValueAgain\n' + 'UnitTests.MainClassTest.TestFailure\n' + 'UnitTests.MainClassTest.TestMethodUpdateValue\n' + 'UnitTests.MainClassTest.TestPropertyValue' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnit.junit-xml new file mode 100644 index 0000000..fae9788 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit.junit-xml @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + at UnitTests.MainClassTest.TestFailure () [0x00000] + at <0x00000> <unknown method> + at (wrapper managed-to-native) System.Reflection.MonoMethod:InternalInvoke (object,object[]) + at System.Reflection.MonoMethod.Invoke (System.Object obj, BindingFlags invokeAttr, System.Reflection.Binder binder, System.Object[] parameters, System.Globalization.CultureInfo culture) [0x00000] + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit.results b/python/test/files/nunit/nunit3/jenkins/NUnit.results new file mode 100644 index 0000000..e50ce04 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit.results @@ -0,0 +1,89 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=2, + suite_tests=4, + suite_skipped=0, + suite_failures=1, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='UnitTests.MainClassTest', + tests=3, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='AnotherNS.OtherMainClassTest', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestPropertyValue', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestMethodUpdateValue', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.MainClassTest.TestFailure', + result='failure', + message=' Expected failure\n Expected: 30\n But was: 20\n', + content=' at UnitTests.MainClassTest.TestFailure () [0x00000] \n at ' + '<0x00000> \n at (wrapper managed-to-native) ' + 'System.Reflection.MonoMethod:InternalInvoke (object,object[])\n at ' + 'System.Reflection.MonoMethod.Invoke (System.Object obj, BindingFlags ' + 'invokeAttr, System.Reflection.Binder binder, System.Object[] ' + 'parameters, System.Globalization.CultureInfo culture) [0x00000] \n', + stdout=None, + stderr=None, + time=0.092 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnit.xml', + test_file=None, + line=None, + class_name='', + test_name='AnotherNS.OtherMainClassTest.TestPropertyValueAgain', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnit.xml b/python/test/files/nunit/nunit3/jenkins/NUnit.xml new file mode 100644 index 0000000..61bbd3d --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnit.xml @@ -0,0 +1,42 @@ + + + + + + + + + + + + + + + + + + at (wrapper managed-to-native) System.Reflection.MonoMethod:InternalInvoke (object,object[]) + at System.Reflection.MonoMethod.Invoke (System.Object obj, BindingFlags invokeAttr, System.Reflection.Binder binder, System.Object[] parameters, System.Globalization.CultureInfo culture) [0x00000] +]]> + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnitUnicode.annotations b/python/test/files/nunit/nunit3/jenkins/NUnitUnicode.annotations new file mode 100644 index 0000000..fb026b1 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnitUnicode.annotations @@ -0,0 +1,50 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail in 0s', + 'summary': + '1 tests\u2002\u2003\u20030 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMSw6AIAxEr' + '0JYu9CtlzEEITbyMS2sjHcXERR28zrTd3INRhGf2TQwThHCB2tEEcC7hGPCVIRa5bxQlL' + 'ItF9rh6A5agOleFKLHMsHoqu+Jre7l35a5kWVuXdJbCyFBSYw2wa8bniF3vN0AAAA=\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit3/jenkins/NUnitUnicode.xml\u2003[took 0s]', + 'title': 'UnitTests.UnicodeClassTest.TestFailure failed', + 'raw_details': + ' Expected failure\n Expected: ü\n But was: â\n at ' + 'UnitTests.UnicodeClassTest.TestFailure () [0x00000]' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There is 1 test, see "Raw output" for the name of the test.', + 'title': '1 test found', + 'raw_details': 'UnitTests.UnicodeClassTest.TestFailure' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnitUnicode.junit-xml b/python/test/files/nunit/nunit3/jenkins/NUnitUnicode.junit-xml new file mode 100644 index 0000000..80222f6 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnitUnicode.junit-xml @@ -0,0 +1,21 @@ + + + + + + + + + + + + at UnitTests.UnicodeClassTest.TestFailure () [0x00000] + + + + + + + + + diff --git a/python/test/files/nunit/nunit3/jenkins/NUnitUnicode.results b/python/test/files/nunit/nunit3/jenkins/NUnitUnicode.results new file mode 100644 index 0000000..87c3708 --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnitUnicode.results @@ -0,0 +1,36 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=1, + suite_skipped=0, + suite_failures=1, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='UnitTests.UnicodeClassTest', + tests=1, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit3/jenkins/NUnitUnicode.xml', + test_file=None, + line=None, + class_name='', + test_name='UnitTests.UnicodeClassTest.TestFailure', + result='failure', + message=' Expected failure\n Expected: ü\n But was: â\n', + content=' at UnitTests.UnicodeClassTest.TestFailure () [0x00000]\n', + stdout=None, + stderr=None, + time=0.092 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/nunit/nunit3/jenkins/NUnitUnicode.xml b/python/test/files/nunit/nunit3/jenkins/NUnitUnicode.xml new file mode 100644 index 0000000..f75aa0b --- /dev/null +++ b/python/test/files/nunit/nunit3/jenkins/NUnitUnicode.xml @@ -0,0 +1,29 @@ + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/trx/mstest/pickles.annotations b/python/test/files/trx/mstest/pickles.annotations new file mode 100644 index 0000000..680c7bf --- /dev/null +++ b/python/test/files/trx/mstest/pickles.annotations @@ -0,0 +1,87 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail, 3 pass in 0s', + 'summary': + '4 tests\u2002\u2003\u20033 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MOw6AIBBEr' + '0KoLTRaeRlCUONGAbNAZby7Kx+hmzczeTff4Fwdn9nQMe4C+B+WgNKDNYQ9IQ3+m6aShQ' + 'tKUTHW4oCrfYtNwpltqVgRLeYLBlN8X2x1iastciOL3LqU1Ro8QU7M7ZI/L5ec2abdAAA' + 'A\n', + 'annotations': [ + { + 'path': 'Pickles.TestHarness.MSTest.AdditionFeature', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/pickles.trx\u2003[took 0s]', + 'title': + 'FailToAddTwoNumbers (Pickles.TestHarness.MSTest.AdditionFeature) ' + 'failed', + 'raw_details': + '\n Test method ' + 'Pickles.TestHarness.MSTest.AdditionFeature.FailToAddTwoNumbers ' + 'threw exception:\n ' + 'Should.Core.Exceptions.NotEqualException: Assert.NotEqual() ' + 'Failure\n \n at ' + 'Pickles.TestHarness.MSTest.Steps.ThenTheResultShouldBePass(Int32 ' + 'result) in ' + 'C:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarn' + 'ess.MSTest\\Steps.cs:line 28\n at lambda_method(Closure ' + ', IContextManager , Int32 )<...>\n at ' + 'TechTalk.SpecFlow.Bindings.MethodBinding.InvokeAction(IContextManag' + 'er contextManager, Object[] arguments, ITestTracer testTracer, ' + 'TimeSpan& duration)\n at ' + 'TechTalk.SpecFlow.Bindings.StepDefinitionBinding.Invoke(IContextMan' + 'ager contextManager, ITestTracer testTracer, Object[] arguments, ' + 'TimeSpan& duration)\n at ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStepMat' + 'ch(BindingMatch match, Object[] arguments)\n at ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStep(St' + 'epArgs stepArgs)\n at ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.OnAfterLastSte' + 'p()\n at ' + 'TechTalk.SpecFlow.TestRunner.CollectScenarioErrors()\n ' + 'at Pickles.TestHarness.MSTest.AdditionFeature.ScenarioCleanup() in ' + 'C:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarn' + 'ess.MSTest\\Addition.feature.cs:line 0\n at ' + 'Pickles.TestHarness.MSTest.AdditionFeature.FailToAddTwoNumbers() ' + 'in ' + 'c:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarn' + 'ess.MSTest\\Addition.feature:line 18' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 4 tests, see "Raw output" for the full list of tests.', + 'title': '4 tests found', + 'raw_details': + 'Pickles.TestHarness.MSTest.AdditionFeature ‑ AddTwoNumbers\n' + 'Pickles.TestHarness.MSTest.AdditionFeature ‑ ' + 'AddingSeveralNumbers_40\n' + 'Pickles.TestHarness.MSTest.AdditionFeature ‑ ' + 'AddingSeveralNumbers_60\n' + 'Pickles.TestHarness.MSTest.AdditionFeature ‑ FailToAddTwoNumbers' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/trx/mstest/pickles.junit-xml b/python/test/files/trx/mstest/pickles.junit-xml new file mode 100644 index 0000000..7235abd --- /dev/null +++ b/python/test/files/trx/mstest/pickles.junit-xml @@ -0,0 +1,25 @@ + + + + + + + + + Test method Pickles.TestHarness.MSTest.AdditionFeature.FailToAddTwoNumbers threw exception: + Should.Core.Exceptions.NotEqualException: Assert.NotEqual() Failure + + at Pickles.TestHarness.MSTest.Steps.ThenTheResultShouldBePass(Int32 result) in C:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.MSTest\Steps.cs:line 28 + at lambda_method(Closure , IContextManager , Int32 )<...> + at TechTalk.SpecFlow.Bindings.MethodBinding.InvokeAction(IContextManager contextManager, Object[] arguments, ITestTracer testTracer, TimeSpan& duration) + at TechTalk.SpecFlow.Bindings.StepDefinitionBinding.Invoke(IContextManager contextManager, ITestTracer testTracer, Object[] arguments, TimeSpan& duration) + at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStepMatch(BindingMatch match, Object[] arguments) + at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStep(StepArgs stepArgs) + at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.OnAfterLastStep() + at TechTalk.SpecFlow.TestRunner.CollectScenarioErrors() + at Pickles.TestHarness.MSTest.AdditionFeature.ScenarioCleanup() in C:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.MSTest\Addition.feature.cs:line 0 + at Pickles.TestHarness.MSTest.AdditionFeature.FailToAddTwoNumbers() in c:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.MSTest\Addition.feature:line 18 + + + + diff --git a/python/test/files/trx/mstest/pickles.results b/python/test/files/trx/mstest/pickles.results new file mode 100644 index 0000000..21c4b71 --- /dev/null +++ b/python/test/files/trx/mstest/pickles.results @@ -0,0 +1,105 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=4, + suite_skipped=0, + suite_failures=1, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='MSTestSuite', + tests=4, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='mstest/pickles.trx', + test_file=None, + line=None, + class_name='Pickles.TestHarness.MSTest.AdditionFeature', + test_name='AddingSeveralNumbers_40', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.076891 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/pickles.trx', + test_file=None, + line=None, + class_name='Pickles.TestHarness.MSTest.AdditionFeature', + test_name='AddingSeveralNumbers_60', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0111534 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/pickles.trx', + test_file=None, + line=None, + class_name='Pickles.TestHarness.MSTest.AdditionFeature', + test_name='AddTwoNumbers', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0055623 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/pickles.trx', + test_file=None, + line=None, + class_name='Pickles.TestHarness.MSTest.AdditionFeature', + test_name='FailToAddTwoNumbers', + result='failure', + message='\n Test method ' + 'Pickles.TestHarness.MSTest.AdditionFeature.FailToAddTwoNumbers threw ' + 'exception:\n Should.Core.Exceptions.NotEqualException: ' + 'Assert.NotEqual() Failure\n ', + content='\n Test method ' + 'Pickles.TestHarness.MSTest.AdditionFeature.FailToAddTwoNumbers threw ' + 'exception:\n Should.Core.Exceptions.NotEqualException: ' + 'Assert.NotEqual() Failure\n \n at ' + 'Pickles.TestHarness.MSTest.Steps.ThenTheResultShouldBePass(Int32 ' + 'result) in ' + 'C:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarnes' + 's.MSTest\\Steps.cs:line 28\n at lambda_method(Closure , ' + 'IContextManager , Int32 )<...>\n at ' + 'TechTalk.SpecFlow.Bindings.MethodBinding.InvokeAction(IContextManager' + ' contextManager, Object[] arguments, ITestTracer testTracer, ' + 'TimeSpan& duration)\n at ' + 'TechTalk.SpecFlow.Bindings.StepDefinitionBinding.Invoke(IContextManag' + 'er contextManager, ITestTracer testTracer, Object[] arguments, ' + 'TimeSpan& duration)\n at ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStepMatch' + '(BindingMatch match, Object[] arguments)\n at ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStep(Step' + 'Args stepArgs)\n at ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.OnAfterLastStep(' + ')\n at ' + 'TechTalk.SpecFlow.TestRunner.CollectScenarioErrors()\n at ' + 'Pickles.TestHarness.MSTest.AdditionFeature.ScenarioCleanup() in ' + 'C:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarnes' + 's.MSTest\\Addition.feature.cs:line 0\n at ' + 'Pickles.TestHarness.MSTest.AdditionFeature.FailToAddTwoNumbers() in ' + 'c:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarnes' + 's.MSTest\\Addition.feature:line 18\n ', + stdout=None, + stderr=None, + time=0.0459057 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/trx/mstest/pickles.trx b/python/test/files/trx/mstest/pickles.trx new file mode 100644 index 0000000..b0fa72c --- /dev/null +++ b/python/test/files/trx/mstest/pickles.trx @@ -0,0 +1,180 @@ + + + + + + + + + + + + + + + + + + Fail to add two numbers + + + + FeatureTitle + Addition + + + + + + Add two numbers + + + + FeatureTitle + Addition + + + + + + Adding several numbers + + + + FeatureTitle + Addition + + + VariantName + 40 + + + Parameter:Second Number + 50 + + + Parameter:Result + 90 + + + Parameter:First Number + 40 + + + + + + Adding several numbers + + + + FeatureTitle + Addition + + + VariantName + 60 + + + Parameter:Second Number + 70 + + + Parameter:Result + 130 + + + Parameter:First Number + 60 + + + + + + + + + + + + + + + + + + + + Given I have entered 40 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(40) (0.0s) + And I have entered 50 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(50) (0.0s) + When I press add + -> done: Steps.WhenIPressAdd() (0.0s) + Then the result should be 90 on the screen + -> done: Steps.ThenTheResultShouldBePass(90) (0.0s) + + + + + + + Given I have entered 60 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(60) (0.0s) + And I have entered 70 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(70) (0.0s) + When I press add + -> done: Steps.WhenIPressAdd() (0.0s) + Then the result should be 130 on the screen + -> done: Steps.ThenTheResultShouldBePass(130) (0.0s) + + + + + + + Given I have entered 50 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(50) (0.0s) + And I have entered 70 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(70) (0.0s) + When I press add + -> done: Steps.WhenIPressAdd() (0.0s) + Then the result should be 120 on the screen + -> done: Steps.ThenTheResultShouldBePass(120) (0.0s) + + + + + + + Given I have entered 50 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(50) (0.0s) + And I have entered -1 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(-1) (0.0s) + When I press add + -> done: Steps.WhenIPressAdd() (0.0s) + Then the result should be -50 on the screen + -> error: Assert.NotEqual() Failure + + + + Test method Pickles.TestHarness.MSTest.AdditionFeature.FailToAddTwoNumbers threw exception: + Should.Core.Exceptions.NotEqualException: Assert.NotEqual() Failure + + + at Pickles.TestHarness.MSTest.Steps.ThenTheResultShouldBePass(Int32 result) in C:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.MSTest\Steps.cs:line 28 + at lambda_method(Closure , IContextManager , Int32 )<...> + at TechTalk.SpecFlow.Bindings.MethodBinding.InvokeAction(IContextManager contextManager, Object[] arguments, ITestTracer testTracer, TimeSpan& duration) + at TechTalk.SpecFlow.Bindings.StepDefinitionBinding.Invoke(IContextManager contextManager, ITestTracer testTracer, Object[] arguments, TimeSpan& duration) + at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStepMatch(BindingMatch match, Object[] arguments) + at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStep(StepArgs stepArgs) + at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.OnAfterLastStep() + at TechTalk.SpecFlow.TestRunner.CollectScenarioErrors() + at Pickles.TestHarness.MSTest.AdditionFeature.ScenarioCleanup() in C:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.MSTest\Addition.feature.cs:line 0 + at Pickles.TestHarness.MSTest.AdditionFeature.FailToAddTwoNumbers() in c:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.MSTest\Addition.feature:line 18 + + + + + + diff --git a/python/test/files/trx/nunit/FluentValidation.Tests.annotations b/python/test/files/trx/nunit/FluentValidation.Tests.annotations new file mode 100644 index 0000000..f6dd4a8 --- /dev/null +++ b/python/test/files/trx/nunit/FluentValidation.Tests.annotations @@ -0,0 +1,1734 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 803 tests pass, 1 skipped in 3s', + 'summary': + '804 tests\u2002\u2003\u2003803 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '3s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u2004\u205f\u20041 suites\u2003\u2003\u205f\u2004\u205f\u2004' + '1 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004\u205f' + '\u20041 files\u2004\u2002\u2003\u2003\u205f\u2004\u205f\u20040 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMwQqAIBAFf' + '0U8dzDsEP1MiBUtacaqp+jf28yybm/mwex8AjN63rG6YtxHCC8MEVUAtxJKQjrCdbWiea' + 'j3UeukZFELbDlwi0mBISFeMSI6zAbjWpoX/JO3KcXEn2Dib087ayEQ5MX8rPhxArdpBif' + 'lAAAA\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'FluentValidation.Tests.AccessorCacheTests ‑ Benchmark' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 804 tests, see "Raw output" for the list of tests 1 to ' + '625.', + 'title': '804 tests found (test 1 to 625)', + 'raw_details': + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'CanValidateInstancesOfType_returns_false_when_comparing_against_som' + 'e_other_type\nFluentValidation.Tests.AbstractValidatorTester ‑ ' + 'CanValidateInstancesOfType_returns_true_when_comparing_against_same' + '_type\nFluentValidation.Tests.AbstractValidatorTester ‑ ' + 'CanValidateInstancesOfType_returns_true_when_comparing_against_subc' + 'lass\nFluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Can_replace_default_errorcode_resolver\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Default_error_code_should_be_class_name\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'OverridePropertyName_should_override_property_name\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'OverridePropertyName_with_lambda_should_override_property_name\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'PreValidate_bypasses_nullcheck_on_instance\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'RuleForeach_with_null_instances\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Should_be_valid_when_there_are_no_failures_for_single_property\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Should_not_main_state\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Should_throw_for_non_member_expression_when_validating_single_prope' + 'rty\nFluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Should_throw_when_rule_is_null\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Should_validate_public_Field\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Should_validate_single_Field\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Should_validate_single_property\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Should_validate_single_property_where_invalid_property_as_string\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Should_validate_single_property_where_property_as_string\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Uses_named_parameters_to_validate_ruleset\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Validates_single_property_by_path\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'Validates_type_when_using_non_generic_validate_overload\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'WhenPreValidationReturnsFalse_ResultReturnToUserImmediatly_Validate' + '(preValidationResult: )\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'WhenPreValidationReturnsFalse_ResultReturnToUserImmediatly_Validate' + '(preValidationResult: AnotherInt Test Message)\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'WhenPreValidationReturnsFalse_ResultReturnToUserImmediatly_Validate' + 'Async(preValidationResult: )\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'WhenPreValidationReturnsFalse_ResultReturnToUserImmediatly_Validate' + 'Async(preValidationResult: AnotherInt Test Message)\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'WhenPreValidationReturnsTrue_ValidatorsGetHit_Validate\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'WhenPreValidationReturnsTrue_ValidatorsGetHit_ValidateAsync\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'When_the_Validators_pass_then_the_validatorRunner_should_return_tru' + 'e\nFluentValidation.Tests.AbstractValidatorTester ‑ ' + 'When_the_validators_fail_then_the_errors_Should_be_accessible_via_t' + 'he_errors_property\nFluentValidation.Tests.AbstractValidatorTester ' + '‑ ' + 'When_the_validators_fail_then_validatorrunner_should_return_false\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'WithErrorCode_should_override_error_code\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'WithMessage_and_WithErrorCode_should_override_error_message_and_err' + 'or_code\nFluentValidation.Tests.AbstractValidatorTester ‑ ' + 'WithMessage_should_override_error_message\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'WithName_should_override_field_name\n' + 'FluentValidation.Tests.AbstractValidatorTester ‑ ' + 'WithName_should_override_field_name_with_value_from_other_property\n' + 'FluentValidation.Tests.AccessorCacheTests ‑ Benchmark\n' + 'FluentValidation.Tests.AccessorCacheTests ‑ ' + 'Equality_comparison_check\n' + 'FluentValidation.Tests.AccessorCacheTests ‑ Gets_accessor\n' + 'FluentValidation.Tests.AccessorCacheTests ‑ ' + 'Gets_member_for_nested_property\n' + 'FluentValidation.Tests.AccessorCacheTests ‑ ' + 'Identifies_if_memberexp_acts_on_model_instance\n' + 'FluentValidation.Tests.AssemblyScannerTester ‑ ' + 'Finds_validators_for_types\n' + 'FluentValidation.Tests.AssemblyScannerTester ‑ ' + 'ForEach_iterates_over_types\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Cascade_mode_can_be_set_after_validator_instantiated\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Cascade_mode_can_be_set_after_validator_instantiated_async\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Cascade_mode_can_be_set_after_validator_instantiated_async_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Cascade_mode_can_be_set_after_validator_instantiated_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_on_failure\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_on_failure_async\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_on_failure_when_set_to_StopOnFirstFailure_at_v' + 'alidator_level_and_overriden_at_rule_level\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_on_failure_when_set_to_StopOnFirstFailure_at_v' + 'alidator_level_and_overriden_at_rule_level_async\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_on_failure_when_set_to_StopOnFirstFailure_at_v' + 'alidator_level_and_overriden_at_rule_level_async_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_on_failure_when_set_to_StopOnFirstFailure_at_v' + 'alidator_level_and_overriden_at_rule_level_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_on_failure_when_set_to_Stop_globally_and_overr' + 'iden_at_rule_level\nFluentValidation.Tests.CascadingFailuresTester ' + '‑ ' + 'Validation_continues_on_failure_when_set_to_Stop_globally_and_overr' + 'iden_at_rule_level_async\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_on_failure_when_set_to_Stop_globally_and_overr' + 'iden_at_rule_level_async_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_on_failure_when_set_to_Stop_globally_and_overr' + 'iden_at_rule_level_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_to_second_validator_when_first_validator_succe' + 'eds_and_cascade_set_to_stop\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_to_second_validator_when_first_validator_succe' + 'eds_and_cascade_set_to_stop_async\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_to_second_validator_when_first_validator_succe' + 'eds_and_cascade_set_to_stop_async_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_to_second_validator_when_first_validator_succe' + 'eds_and_cascade_set_to_stop_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_when_set_to_Continue_at_validator_level\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_continues_when_set_to_Continue_at_validator_level_async\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_failure_when_set_to_Continue_and_overriden_at_r' + 'ule_level\nFluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_failure_when_set_to_Continue_and_overriden_at_r' + 'ule_level_async\nFluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_failure_when_set_to_Continue_and_overriden_at_r' + 'ule_level_async_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_failure_when_set_to_Continue_and_overriden_at_r' + 'ule_level_legacy\nFluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_Failure_when_set_to_Continue_globally_and' + '_overriden_at_rule_level\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_Failure_when_set_to_Continue_globally_and' + '_overriden_at_rule_level_and_async_validator_is_invoked_synchronous' + 'ly\nFluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_Failure_when_set_to_Continue_globally_and' + '_overriden_at_rule_level_and_async_validator_is_invoked_synchronous' + 'ly_legacy\nFluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_Failure_when_set_to_Continue_globally_and' + '_overriden_at_rule_level_async\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_Failure_when_set_to_Continue_globally_and' + '_overriden_at_rule_level_async_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_Failure_when_set_to_Continue_globally_and' + '_overriden_at_rule_level_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_failure\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_failure_async\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_failure_async_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_failure_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_failure_when_set_to_StopOnFirstFailure_at' + '_validator_level\nFluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_failure_when_set_to_StopOnFirstFailure_at' + '_validator_level_async\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_failure_when_set_to_StopOnFirstFailure_at' + '_validator_level_async_legacy\n' + 'FluentValidation.Tests.CascadingFailuresTester ‑ ' + 'Validation_stops_on_first_failure_when_set_to_StopOnFirstFailure_at' + '_validator_level_legacy\n' + 'FluentValidation.Tests.ChainedValidationTester ‑ ' + 'Can_validate_using_validator_for_base_type\n' + 'FluentValidation.Tests.ChainedValidationTester ‑ ' + 'Chained_property_should_be_excluded\n' + 'FluentValidation.Tests.ChainedValidationTester ‑ ' + 'Chained_validator_descriptor\n' + 'FluentValidation.Tests.ChainedValidationTester ‑ ' + 'Chained_validator_should_not_be_invoked_on_null_property\n' + 'FluentValidation.Tests.ChainedValidationTester ‑ ' + 'Condition_should_work_with_chained_property\n' + 'FluentValidation.Tests.ChainedValidationTester ‑ ' + 'Explicitly_included_properties_should_be_propagated_to_nested_valid' + 'ators\nFluentValidation.Tests.ChainedValidationTester ‑ ' + 'Explicitly_included_properties_should_be_propagated_to_nested_valid' + 'ators_using_strings\n' + 'FluentValidation.Tests.ChainedValidationTester ‑ ' + 'Separate_validation_on_chained_property\n' + 'FluentValidation.Tests.ChainedValidationTester ‑ ' + 'Separate_validation_on_chained_property_conditional\n' + 'FluentValidation.Tests.ChainedValidationTester ‑ ' + 'Separate_validation_on_chained_property_valid\n' + 'FluentValidation.Tests.ChainedValidationTester ‑ ' + 'Should_allow_normal_rules_and_chained_property_on_same_property\n' + 'FluentValidation.Tests.ChainedValidationTester ‑ ' + 'Uses_explicit_ruleset\n' + 'FluentValidation.Tests.ChainedValidationTester ‑ ' + 'Validates_chained_property\n' + 'FluentValidation.Tests.ChainingValidatorsTester ‑ ' + 'Options_should_only_apply_to_current_validator\n' + 'FluentValidation.Tests.ChainingValidatorsTester ‑ ' + 'Should_create_multiple_validators\n' + 'FluentValidation.Tests.ChainingValidatorsTester ‑ ' + 'Should_execute_multiple_validators\n' + 'FluentValidation.Tests.ChildRulesTests ‑ ' + 'Can_define_nested_rules_for_collection\n' + 'FluentValidation.Tests.ChildRulesTests ‑ ' + 'ChildRules_works_with_RuleSet\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Async_condition_should_work_with_child_collection\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Can_specify_condition_for_individual_collection_elements\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Can_validate_collection_using_validator_for_base_type\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Collection_should_be_excluded\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Collection_should_be_explicitly_included_with_expression\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Collection_should_be_explicitly_included_with_string\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Condition_should_work_with_child_collection\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Creates_validator_using_context_from_property_value\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Should_override_property_name\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Should_work_with_top_level_collection_validator\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Should_work_with_top_level_collection_validator_and_overriden_name\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Skips_null_items\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Validates_collection\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Validates_collection_asynchronously\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Validates_collection_several_levels_deep\n' + 'FluentValidation.Tests.CollectionValidatorWithParentTests ‑ ' + 'Validates_collection_several_levels_deep_async\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Async_condition_should_work_with_complex_property\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Async_condition_should_work_with_complex_property_when_validator_in' + 'voked_synchronously\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Can_directly_validate_multiple_fields_of_same_type\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Can_validate_using_validator_for_base_type\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Complex_property_should_be_excluded\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Complex_validator_should_not_be_invoked_on_null_property\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Condition_should_work_with_complex_property\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Condition_should_work_with_complex_property_when_invoked_async\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Explicitly_included_properties_should_be_propagated_to_nested_valid' + 'ators\nFluentValidation.Tests.ComplexValidationTester ‑ ' + 'Explicitly_included_properties_should_be_propagated_to_nested_valid' + 'ators_using_strings\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Multiple_rules_in_chain_with_childvalidator_shouldnt_reuse_accessor' + '\nFluentValidation.Tests.ComplexValidationTester ‑ ' + 'Multiple_rules_in_chain_with_childvalidator_shouldnt_reuse_accessor' + '_async\nFluentValidation.Tests.ComplexValidationTester ‑ ' + 'Should_allow_normal_rules_and_complex_property_on_same_property\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Should_override_propertyName\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Validates_child_validator_asynchronously\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Validates_child_validator_synchronously\n' + 'FluentValidation.Tests.ComplexValidationTester ‑ ' + 'Validates_complex_property\nFluentValidation.Tests.ConditionTests ' + '‑ ' + 'Async_condition_executed_synchronosuly_with_asynchronous_collection' + '_rule\nFluentValidation.Tests.ConditionTests ‑ ' + 'Async_condition_executed_synchronosuly_with_asynchronous_rule\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Async_condition_executed_synchronosuly_with_synchronous_collection_' + 'role\nFluentValidation.Tests.ConditionTests ‑ ' + 'Async_condition_executed_synchronosuly_with_synchronous_role\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Async_condition_is_applied_to_all_validators_in_the_chain\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Async_condition_is_applied_to_all_validators_in_the_chain_when_exec' + 'uted_synchronously\nFluentValidation.Tests.ConditionTests ‑ ' + 'Async_condition_is_applied_to_single_validator_in_the_chain_when_Ap' + 'plyConditionTo_set_to_CurrentValidator\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Condition_is_applied_to_all_validators_in_the_chain\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Condition_is_applied_to_single_validator_in_the_chain_when_ApplyCon' + 'ditionTo_set_to_CurrentValidator\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Sync_condition_is_applied_to_async_validators\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Validation_should_fail_when_async_condition_does_not_match\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Validation_should_fail_when_async_condition_matches\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Validation_should_fail_when_condition_does_not_match\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Validation_should_fail_when_condition_matches\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Validation_should_succeed_when_async_condition_does_not_match\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Validation_should_succeed_when_async_condition_matches\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Validation_should_succeed_when_condition_does_not_match\n' + 'FluentValidation.Tests.ConditionTests ‑ ' + 'Validation_should_succeed_when_condition_matches\n' + 'FluentValidation.Tests.CreditCardValidatorTests ‑ IsValidTests\n' + 'FluentValidation.Tests.CreditCardValidatorTests ‑ ' + 'When_validation_fails_the_default_error_should_be_set\n' + 'FluentValidation.Tests.CustomFailureActionTester ‑ ' + 'Does_not_invoke_action_if_validation_success\n' + 'FluentValidation.Tests.CustomFailureActionTester ‑ ' + 'Invokes_custom_action_on_failure\n' + 'FluentValidation.Tests.CustomFailureActionTester ‑ ' + 'Passes_object_being_validated_to_action\n' + 'FluentValidation.Tests.CustomMessageFormatTester ‑ ' + 'Replaces_propertyvalue_placeholder\n' + 'FluentValidation.Tests.CustomMessageFormatTester ‑ ' + 'Replaces_propertyvalue_with_empty_string_when_null\n' + 'FluentValidation.Tests.CustomMessageFormatTester ‑ ' + 'Should_format_custom_message\n' + 'FluentValidation.Tests.CustomMessageFormatTester ‑ ' + 'Uses_custom_delegate_for_building_message\n' + 'FluentValidation.Tests.CustomMessageFormatTester ‑ ' + 'Uses_custom_delegate_for_building_message_only_for_specific_validat' + 'or\nFluentValidation.Tests.CustomMessageFormatTester ‑ ' + 'Uses_property_value_in_message\n' + 'FluentValidation.Tests.CustomValidatorTester ‑ ' + 'New_CustomAsync_within_ruleset\n' + 'FluentValidation.Tests.CustomValidatorTester ‑ ' + 'New_Custom_Returns_single_failure\n' + 'FluentValidation.Tests.CustomValidatorTester ‑ ' + 'New_Custom_Returns_single_failure_async\n' + 'FluentValidation.Tests.CustomValidatorTester ‑ ' + 'New_Custom_When_property_name_omitted_infers_property_name\n' + 'FluentValidation.Tests.CustomValidatorTester ‑ ' + 'New_Custom_When_property_name_omitted_infers_property_name_nested\n' + 'FluentValidation.Tests.CustomValidatorTester ‑ ' + 'New_Custom_within_ruleset\n' + 'FluentValidation.Tests.CustomValidatorTester ‑ ' + 'New_custom_uses_empty_property_name_for_model_level_rule\n' + 'FluentValidation.Tests.CustomValidatorTester ‑ ' + 'Perserves_property_chain_using_custom\n' + 'FluentValidation.Tests.CustomValidatorTester ‑ ' + 'Runs_async_rule_synchronously_when_validator_invoked_synchronously\n' + 'FluentValidation.Tests.CustomValidatorTester ‑ ' + 'Runs_sync_rule_asynchronously_when_validator_invoked_asynchronously' + '\nFluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'Empty_should_create_EmptyValidator\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'Equal_should_create_EqualValidator_with_explicit_value\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'Equal_should_create_EqualValidator_with_lambda\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'GreaterThanOrEqual_should_create_GreaterThanOrEqualValidator_with_e' + 'xplicit_value\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'GreaterThanOrEqual_should_create_GreaterThanOrEqualValidator_with_l' + 'ambda\nFluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'GreaterThanOrEqual_should_create_GreaterThanOrEqualValidator_with_l' + 'ambda_with_other_Nullable\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'GreaterThan_should_create_GreaterThanValidator_with_explicit_value\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'GreaterThan_should_create_GreaterThanValidator_with_lambda\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'Length_should_create_ExactLengthValidator\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'Length_should_create_LengthValidator\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'Length_should_create_MaximumLengthValidator\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'Length_should_create_MinimumLengthValidator\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'LessThanOrEqual_should_create_LessThanOrEqualValidator_with_explici' + 't_value\nFluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'LessThanOrEqual_should_create_LessThanOrEqualValidator_with_lambda\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'LessThanOrEqual_should_create_LessThanOrEqualValidator_with_lambda_' + 'with_other_Nullable\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'LessThan_should_create_LessThanValidator_with_explicit_value\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'LessThan_should_create_LessThanValidator_with_lambda\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'MustAsync_should_create_AsyncPredicateValidator_with_PropertyValida' + 'torContext\nFluentValidation.Tests.DefaultValidatorExtensionTester ' + '‑ MustAsync_should_create_AsyncPredicateValidator_with_context\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'MustAsync_should_create_AsyncPredicteValidator\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'MustAsync_should_not_throw_InvalidCastException\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'Must_should_create_PredicateValidator_with_PropertyValidatorContext' + '\nFluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'Must_should_create_PredicateValidator_with_context\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'Must_should_create_PredicteValidator\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'NotEmpty_should_create_NotEmptyValidator\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'NotEqual_should_create_NotEqualValidator_with_explicit_value\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'NotEqual_should_create_NotEqualValidator_with_lambda\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'NotNull_should_create_NotNullValidator\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'ScalePrecision_should_create_ScalePrecisionValidator\n' + 'FluentValidation.Tests.DefaultValidatorExtensionTester ‑ ' + 'ScalePrecision_should_create_ScalePrecisionValidator_with_ignore_tr' + 'ailing_zeros\nFluentValidation.Tests.EmailValidatorTests ‑ ' + 'Fails_email_validation_aspnetcore_compatible(email: " \\r \\t ' + '\\n")\nFluentValidation.Tests.EmailValidatorTests ‑ ' + 'Fails_email_validation_aspnetcore_compatible(email: "")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Fails_email_validation_aspnetcore_compatible(email: "0")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Fails_email_validation_aspnetcore_compatible(email: ' + '"@someDomain.com")\nFluentValidation.Tests.EmailValidatorTests ‑ ' + 'Fails_email_validation_aspnetcore_compatible(email: ' + '"@someDomain@abc.com")\nFluentValidation.Tests.EmailValidatorTests ' + '‑ Fails_email_validation_aspnetcore_compatible(email: "someName")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Fails_email_validation_aspnetcore_compatible(email: "someName@")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Fails_email_validation_aspnetcore_compatible(email: ' + '"someName@a@b.com")\nFluentValidation.Tests.EmailValidatorTests ‑ ' + 'Invalid_email_addressex_regex(email: "")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Invalid_email_addressex_regex(email: "first.last@test..co.uk")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Invalid_email_addressex_regex(email: "testperso")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Invalid_email_addressex_regex(email: ' + '"thisisaverylongstringcodeplex.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_aspnetcore_compatible(email: ' + '"!#$%&\'*+-/=?^_`|~@someDomain.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_aspnetcore_compatible(email: ' + '"1234@someDomain.com")\nFluentValidation.Tests.EmailValidatorTests ' + '‑ Valid_email_addresses_aspnetcore_compatible(email: ' + '"\\"firstName.lastName\\"@someDomain.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_aspnetcore_compatible(email: ' + '"firstName.lastName@someDomain.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_aspnetcore_compatible(email: ' + '"someName@1234.com")\nFluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_aspnetcore_compatible(email: ' + '"someName@someDomain.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_aspnetcore_compatible(email: ' + '"someName@someDomain\uffef.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_aspnetcore_compatible(email: ' + '"someName@some_domain.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_aspnetcore_compatible(email: ' + '"someName@some~domain.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_aspnetcore_compatible(email: "\xa0' + '@someDomain.com")\nFluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_aspnetcore_compatible(email: null)\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_regex(email: "!def!xyz%abc@example.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_regex(email: "$A12345@example.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_regex(email: "TestPerson@gmail.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_regex(email: "\\"Abc@def\\"@example.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_regex(email: ' + '"\\"Abc\\\\@def\\"@example.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_regex(email: "\\"Fred ' + 'Bloggs\\"@example.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_regex(email: ' + '"\\"Joe\\\\Blow\\"@example.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_regex(email: "__somename@example.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_regex(email: ' + '"customer/department=shipping@example.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_regex(email: "first.last@test.co.uk")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_regex(email: "testperson+label@gmail.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_regex(email: "testperson@gmail.com")\n' + 'FluentValidation.Tests.EmailValidatorTests ‑ ' + 'Valid_email_addresses_regex(email: null)\n' + 'FluentValidation.Tests.EmptyTester ‑ ' + 'Passes_for_ienumerable_that_doesnt_implement_ICollection\n' + 'FluentValidation.Tests.EmptyTester ‑ Passes_when_collection_empty\n' + 'FluentValidation.Tests.EmptyTester ‑ ' + 'When_there_is_a_value_then_the_validator_should_fail\n' + 'FluentValidation.Tests.EmptyTester ‑ ' + 'When_validation_fails_error_should_be_set\n' + 'FluentValidation.Tests.EmptyTester ‑ ' + 'When_value_is_Default_for_type_validator_should_pass_datetime\n' + 'FluentValidation.Tests.EmptyTester ‑ ' + 'When_value_is_Default_for_type_validator_should_pass_int\n' + 'FluentValidation.Tests.EmptyTester ‑ ' + 'When_value_is_empty_string_validator_should_pass\n' + 'FluentValidation.Tests.EmptyTester ‑ ' + 'When_value_is_null_validator_should_pass\n' + 'FluentValidation.Tests.EmptyTester ‑ ' + 'When_value_is_whitespace_validation_should_pass\n' + 'FluentValidation.Tests.EnumValidatorTests ‑ ' + 'Flags_enum_invalid_when_using_outofrange_negative_value\n' + 'FluentValidation.Tests.EnumValidatorTests ‑ ' + 'Flags_enum_invalid_when_using_outofrange_positive_value\n' + 'FluentValidation.Tests.EnumValidatorTests ‑ ' + 'Flags_enum_valid_when_using_bitwise_value\n' + 'FluentValidation.Tests.EnumValidatorTests ‑ ' + 'Flags_enum_validates_correctly_when_using_zero_value\n' + 'FluentValidation.Tests.EnumValidatorTests ‑ ' + 'Flags_enum_with_overlapping_flags_valid_when_using_bitwise_value\n' + 'FluentValidation.Tests.EnumValidatorTests ‑ IsValidTests\n' + 'FluentValidation.Tests.EnumValidatorTests ‑ ' + 'Nullable_enum_invalid_when_bad_value_specified\n' + 'FluentValidation.Tests.EnumValidatorTests ‑ ' + 'Nullable_enum_valid_when_property_value_is_null\n' + 'FluentValidation.Tests.EnumValidatorTests ‑ ' + 'Nullable_enum_valid_when_value_specified\n' + 'FluentValidation.Tests.EnumValidatorTests ‑ ' + 'When_the_enum_is_initialized_with_invalid_value_then_the_validator_' + 'should_fail\nFluentValidation.Tests.EnumValidatorTests ‑ ' + 'When_the_enum_is_not_initialized_with_valid_value_then_the_validato' + 'r_should_fail\nFluentValidation.Tests.EnumValidatorTests ‑ ' + 'When_validation_fails_the_default_error_should_be_set\n' + 'FluentValidation.Tests.EqualValidatorTests ‑ ' + 'Comparison_property_uses_custom_resolver\n' + 'FluentValidation.Tests.EqualValidatorTests ‑ ' + 'Should_store_comparison_type\n' + 'FluentValidation.Tests.EqualValidatorTests ‑ ' + 'Should_store_property_to_compare\n' + 'FluentValidation.Tests.EqualValidatorTests ‑ ' + 'Should_succeed_on_case_insensitive_comparison\n' + 'FluentValidation.Tests.EqualValidatorTests ‑ ' + 'Should_succeed_on_case_insensitive_comparison_using_expression\n' + 'FluentValidation.Tests.EqualValidatorTests ‑ ' + 'Should_use_ordinal_comparison_by_default\n' + 'FluentValidation.Tests.EqualValidatorTests ‑ ' + 'Validates_against_property\n' + 'FluentValidation.Tests.EqualValidatorTests ‑ ' + 'When_the_objects_are_equal_validation_should_succeed\n' + 'FluentValidation.Tests.EqualValidatorTests ‑ ' + 'When_the_objects_are_not_equal_validation_should_fail\n' + 'FluentValidation.Tests.EqualValidatorTests ‑ ' + 'When_validation_fails_the_error_should_be_set\n' + 'FluentValidation.Tests.ExactLengthValidatorTester ‑ ' + 'Min_and_max_properties_should_be_set\n' + 'FluentValidation.Tests.ExactLengthValidatorTester ‑ ' + 'When_exact_length_rule_failes_error_should_have_exact_length_error_' + 'errorcode\nFluentValidation.Tests.ExactLengthValidatorTester ‑ ' + 'When_the_text_is_an_exact_length_the_validator_should_pass\n' + 'FluentValidation.Tests.ExactLengthValidatorTester ‑ ' + 'When_the_text_length_is_larger_the_validator_should_fail\n' + 'FluentValidation.Tests.ExactLengthValidatorTester ‑ ' + 'When_the_text_length_is_smaller_the_validator_should_fail\n' + 'FluentValidation.Tests.ExactLengthValidatorTester ‑ ' + 'When_the_validator_fails_the_error_message_should_be_set\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'To_and_from_properties_should_be_set\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'To_and_from_properties_should_be_set_for_dates\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'To_and_from_properties_should_be_set_for_strings\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'Validates_with_nullable_when_property_is_null\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'Validates_with_nullable_when_property_not_null\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_text_is_larger_than_the_range_then_the_validator_should_fa' + 'il\nFluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_text_is_larger_than_the_range_then_the_validator_should_fa' + 'il_for_strings\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_to_is_smaller_than_the_from_then_the_validator_should_thro' + 'w\nFluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_to_is_smaller_than_the_from_then_the_validator_should_thro' + 'w_for_strings\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_validator_fails_the_error_message_should_be_set\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_validator_fails_the_error_message_should_be_set_for_string' + 's\nFluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_between_the_range_specified_then_the_validator_sh' + 'ould_pass\nFluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_between_the_range_specified_then_the_validator_sh' + 'ould_pass_for_strings\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_exactly_the_size_of_the_lower_bound_then_the_vali' + 'dator_should_fail\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_exactly_the_size_of_the_lower_bound_then_the_vali' + 'dator_should_fail_for_strings\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_exactly_the_size_of_the_upper_bound_then_the_vali' + 'dator_should_fail\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_exactly_the_size_of_the_upper_bound_then_the_vali' + 'dator_should_fail_for_strings\n' + 'FluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_smaller_than_the_range_then_the_validator_should_' + 'fail\nFluentValidation.Tests.ExclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_smaller_than_the_range_then_the_validator_should_' + 'fail_for_strings\nFluentValidation.Tests.ExtensionTester ‑ ' + 'Should_extract_member_from_member_expression\n' + 'FluentValidation.Tests.ExtensionTester ‑ ' + 'Should_return_null_for_non_member_expressions\n' + 'FluentValidation.Tests.ExtensionTester ‑ ' + 'Should_split_pascal_cased_member_name\n' + 'FluentValidation.Tests.ExtensionTester ‑ ' + 'SplitPascalCase_should_return_null_when_input_is_null\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Async_condition_should_work_with_child_collection\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Can_access_colletion_index\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Can_access_colletion_index_async\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ Can_access_parent_index\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Can_access_parent_index_async\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Can_specify_condition_for_individual_collection_elements\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Can_use_cascade_with_RuleForEach\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Can_validate_collection_using_validator_for_base_type\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Collection_should_be_excluded\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Collection_should_be_explicitly_included_with_expression\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Collection_should_be_explicitly_included_with_string\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Condition_should_work_with_child_collection\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Correctly_gets_collection_indices\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Correctly_gets_collection_indices_async\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Executes_rule_for_each_item_in_collection\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Executes_rule_for_each_item_in_collection_async\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Nested_collection_for_null_property_should_not_throw_null_reference' + '\nFluentValidation.Tests.ForEachRuleTests ‑ ' + 'Nested_conditions_Rule_For\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Nested_conditions_Rule_For_Each\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ Overrides_indexer\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ Overrides_indexer_async\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Regular_rules_can_drop_into_RuleForEach\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'RuleForEach_async_RunsTasksSynchronously\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Should_not_scramble_property_name_when_using_collection_validators_' + 'several_levels_deep\nFluentValidation.Tests.ForEachRuleTests ‑ ' + 'Should_not_scramble_property_name_when_using_collection_validators_' + 'several_levels_deep_with_ValidateAsync\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Should_override_property_name\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ Skips_null_items\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ Top_level_collection\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Uses_useful_error_message_when_used_on_non_property\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Validates_child_validator_asynchronously\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'Validates_child_validator_synchronously\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ Validates_collection\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'When_runs_outside_RuleForEach_loop\n' + 'FluentValidation.Tests.ForEachRuleTests ‑ ' + 'When_runs_outside_RuleForEach_loop_async\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Comparison_property_uses_custom_resolver\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Comparison_type\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Should_fail_when_less_than_input\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Should_localize_value\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Should_set_default_error_when_validation_fails\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Should_succeed_when_equal_to_input\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Should_succeed_when_greater_than_input\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Validates_nullable_with_nullable_property\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Validates_with_nullable_property\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Validates_with_nullable_when_property_is_null\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Validates_with_nullable_when_property_is_null_cross_property\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Validates_with_nullable_when_property_not_null\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Validates_with_nullable_when_property_not_null_cross_property\n' + 'FluentValidation.Tests.GreaterThanOrEqualToValidatorTester ‑ ' + 'Validates_with_property\n' + 'FluentValidation.Tests.GreaterThanValidatorTester ‑ ' + 'Comparison_Type\nFluentValidation.Tests.GreaterThanValidatorTester ' + '‑ Comparison_property_uses_custom_resolver\n' + 'FluentValidation.Tests.GreaterThanValidatorTester ‑ ' + 'Should_fail_when_equal_to_input\n' + 'FluentValidation.Tests.GreaterThanValidatorTester ‑ ' + 'Should_fail_when_less_than_input\n' + 'FluentValidation.Tests.GreaterThanValidatorTester ‑ ' + 'Should_set_default_error_when_validation_fails\n' + 'FluentValidation.Tests.GreaterThanValidatorTester ‑ ' + 'Should_succeed_when_greater_than_input\n' + 'FluentValidation.Tests.GreaterThanValidatorTester ‑ ' + 'Validates_nullable_with_nullable_property\n' + 'FluentValidation.Tests.GreaterThanValidatorTester ‑ ' + 'Validates_with_nullable_property\n' + 'FluentValidation.Tests.GreaterThanValidatorTester ‑ ' + 'Validates_with_nullable_when_property_is_null\n' + 'FluentValidation.Tests.GreaterThanValidatorTester ‑ ' + 'Validates_with_nullable_when_property_is_null_cross_property\n' + 'FluentValidation.Tests.GreaterThanValidatorTester ‑ ' + 'Validates_with_nullable_when_property_not_null\n' + 'FluentValidation.Tests.GreaterThanValidatorTester ‑ ' + 'Validates_with_nullable_when_property_not_null_cross_property\n' + 'FluentValidation.Tests.GreaterThanValidatorTester ‑ ' + 'Validates_with_property\n' + 'FluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'To_and_from_properties_should_be_set\n' + 'FluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'To_and_from_properties_should_be_set_for_strings\n' + 'FluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'Validates_with_nullable_when_property_is_null\n' + 'FluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'Validates_with_nullable_when_property_not_null\n' + 'FluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_text_is_larger_than_the_range_then_the_validator_should_fa' + 'il\nFluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_text_is_larger_than_the_range_then_the_validator_should_fa' + 'il_for_strings\n' + 'FluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_to_is_smaller_than_the_from_then_the_validator_should_thro' + 'w\nFluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_to_is_smaller_than_the_from_then_the_validator_should_thro' + 'w_for_strings\n' + 'FluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_validator_fails_the_error_message_should_be_set\n' + 'FluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_validator_fails_the_error_message_should_be_set_for_string' + 's\nFluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_between_the_range_specified_then_the_validator_sh' + 'ould_pass\nFluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_between_the_range_specified_then_the_validator_sh' + 'ould_pass_for_strings\n' + 'FluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_exactly_the_size_of_the_lower_bound_then_the_vali' + 'dator_should_pass\n' + 'FluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_exactly_the_size_of_the_lower_bound_then_the_vali' + 'dator_should_pass_for_strings\n' + 'FluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_exactly_the_size_of_the_upper_bound_then_the_vali' + 'dator_should_pass\n' + 'FluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_exactly_the_size_of_the_upper_bound_then_the_vali' + 'dator_should_pass_for_strings\n' + 'FluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_smaller_than_the_range_then_the_validator_should_' + 'fail\nFluentValidation.Tests.InclusiveBetweenValidatorTests ‑ ' + 'When_the_value_is_smaller_than_the_range_then_the_validator_should_' + 'fail_for_strings\nFluentValidation.Tests.InheritanceValidatorTest ' + '‑ Can_use_custom_subclass_with_nongeneric_overload\n' + 'FluentValidation.Tests.InheritanceValidatorTest ‑ ' + 'Validates_collection\n' + 'FluentValidation.Tests.InheritanceValidatorTest ‑ ' + 'Validates_collection_async\n' + 'FluentValidation.Tests.InheritanceValidatorTest ‑ ' + 'Validates_inheritance_async\n' + 'FluentValidation.Tests.InheritanceValidatorTest ‑ ' + 'Validates_inheritance_hierarchy\n' + 'FluentValidation.Tests.InheritanceValidatorTest ‑ ' + 'Validates_ruleset\nFluentValidation.Tests.InheritanceValidatorTest ' + '‑ Validates_ruleset_async\n' + 'FluentValidation.Tests.InheritanceValidatorTest ‑ ' + 'Validates_with_callback\n' + 'FluentValidation.Tests.InheritanceValidatorTest ‑ ' + 'Validates_with_callback_accepting_derived\n' + 'FluentValidation.Tests.InheritanceValidatorTest ‑ ' + 'Validates_with_callback_accepting_derived_async\n' + 'FluentValidation.Tests.InheritanceValidatorTest ‑ ' + 'Validates_with_callback_async\n' + 'FluentValidation.Tests.InlineValidatorTester ‑ ' + 'Uses_inline_validator_to_build_rules\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'All_languages_should_be_loaded\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'All_localizations_have_same_parameters_as_English\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Always_use_specific_language\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Always_use_specific_language_with_string_source\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ Can_replace_message\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Can_replace_message_without_overriding_all_languages\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Disables_localization\nFluentValidation.Tests.LanguageManagerTests ' + '‑ ' + 'Falls_back_to_default_localization_key_when_error_code_key_not_foun' + 'd\nFluentValidation.Tests.LanguageManagerTests ‑ ' + 'Falls_back_to_english_when_culture_not_registered\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Falls_back_to_english_when_translation_missing\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Falls_back_to_parent_culture\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Gets_translation_for_bosnian_latin_culture(cultureName: "bs")\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Gets_translation_for_bosnian_latin_culture(cultureName: "bs-Latn")\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Gets_translation_for_bosnian_latin_culture(cultureName: ' + '"bs-Latn-BA")\nFluentValidation.Tests.LanguageManagerTests ‑ ' + 'Gets_translation_for_croatian_culture\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Gets_translation_for_culture\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Gets_translation_for_serbian_culture(cultureName: "sr")\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Gets_translation_for_serbian_culture(cultureName: "sr-Latn")\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Gets_translation_for_serbian_culture(cultureName: "sr-Latn-RS")\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Gets_translation_for_specific_culture\n' + 'FluentValidation.Tests.LanguageManagerTests ‑ ' + 'Uses_error_code_as_localization_key\n' + 'FluentValidation.Tests.LengthValidatorTests ‑ ' + 'Min_and_max_properties_should_be_set\n' + 'FluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_input_is_null_then_the_validator_should_pass\n' + 'FluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_max_is_smaller_than_the_min_then_the_validator_should_thro' + 'w\nFluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_maxlength_validator_fails_the_error_message_should_be_set\n' + 'FluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_minlength_validator_fails_the_error_message_should_be_set\n' + 'FluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_text_is_between_the_lambda_range_specified_then_the_valida' + 'tor_should_pass\nFluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_text_is_between_the_range_specified_then_the_validator_sho' + 'uld_pass\nFluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_text_is_exactly_the_size_of_the_lambda_lower_bound_then_th' + 'e_validator_should_pass\n' + 'FluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_text_is_exactly_the_size_of_the_lambda_upper_bound_then_th' + 'e_validator_should_pass\n' + 'FluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_text_is_exactly_the_size_of_the_lower_bound_then_the_valid' + 'ator_should_pass\nFluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_text_is_exactly_the_size_of_the_upper_bound_then_the_valid' + 'ator_should_pass\nFluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_text_is_larger_than_the_lambda_range_then_the_validator_sh' + 'ould_fail\nFluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_text_is_larger_than_the_range_then_the_validator_should_fa' + 'il\nFluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_text_is_smaller_than_the_lambda_range_then_the_validator_s' + 'hould_fail\nFluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_text_is_smaller_than_the_range_then_the_validator_should_f' + 'ail\nFluentValidation.Tests.LengthValidatorTests ‑ ' + 'When_the_validator_fails_the_error_message_should_be_set\n' + 'FluentValidation.Tests.LessThanOrEqualToValidatorTester ‑ ' + 'Comparison_property_uses_custom_resolver\n' + 'FluentValidation.Tests.LessThanOrEqualToValidatorTester ‑ ' + 'Comparison_type\n' + 'FluentValidation.Tests.LessThanOrEqualToValidatorTester ‑ ' + 'Should_fail_when_greater_than_input\n' + 'FluentValidation.Tests.LessThanOrEqualToValidatorTester ‑ ' + 'Should_set_default_error_when_validation_fails\n' + 'FluentValidation.Tests.LessThanOrEqualToValidatorTester ‑ ' + 'Should_succeed_when_equal_to_input\n' + 'FluentValidation.Tests.LessThanOrEqualToValidatorTester ‑ ' + 'Should_succeed_when_less_than_input\n' + 'FluentValidation.Tests.LessThanOrEqualToValidatorTester ‑ ' + 'Validates_nullable_with_nullable_property\n' + 'FluentValidation.Tests.LessThanOrEqualToValidatorTester ‑ ' + 'Validates_with_nullable_property\n' + 'FluentValidation.Tests.LessThanOrEqualToValidatorTester ‑ ' + 'Validates_with_nullable_when_property_is_null\n' + 'FluentValidation.Tests.LessThanOrEqualToValidatorTester ‑ ' + 'Validates_with_nullable_when_property_is_null_cross_property\n' + 'FluentValidation.Tests.LessThanOrEqualToValidatorTester ‑ ' + 'Validates_with_nullable_when_property_not_null\n' + 'FluentValidation.Tests.LessThanOrEqualToValidatorTester ‑ ' + 'Validates_with_nullable_when_property_not_null_cross_property\n' + 'FluentValidation.Tests.LessThanOrEqualToValidatorTester ‑ ' + 'Validates_with_property\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Comparison_property_uses_custom_resolver\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ Comparison_type\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Extracts_property_from_constant_using_expression\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Extracts_property_from_expression\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Should_fail_when_equal_to_input\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Should_fail_when_greater_than_input\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Should_set_default_validation_message_when_validation_fails\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Should_succeed_when_less_than_input\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Should_throw_when_value_to_compare_is_null\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Validates_against_property\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Validates_nullable_with_nullable_property\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Validates_with_nullable_property\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Validates_with_nullable_when_property_is_null\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Validates_with_nullable_when_property_not_null\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Validates_with_nullable_when_property_not_null_cross_property\n' + 'FluentValidation.Tests.LessThanValidatorTester ‑ ' + 'Validates_with_nullable_when_property_null_cross_property\n' + 'FluentValidation.Tests.LocalisedMessagesTester ‑ ' + 'Correctly_assigns_default_localized_error_message\n' + 'FluentValidation.Tests.LocalisedMessagesTester ‑ ' + 'Does_not_throw_InvalidCastException_when_using_RuleForEach\n' + 'FluentValidation.Tests.LocalisedMessagesTester ‑ ' + 'Formats_string_with_placeholders\n' + 'FluentValidation.Tests.LocalisedMessagesTester ‑ ' + 'Formats_string_with_placeholders_when_you_cant_edit_the_string\n' + 'FluentValidation.Tests.LocalisedMessagesTester ‑ ' + 'Uses_func_to_get_message\n' + 'FluentValidation.Tests.LocalisedMessagesTester ‑ ' + 'Uses_string_format_with_property_value\n' + 'FluentValidation.Tests.LocalisedNameTester ‑ Uses_localized_name\n' + 'FluentValidation.Tests.LocalisedNameTester ‑ ' + 'Uses_localized_name_expression\n' + 'FluentValidation.Tests.MemberAccessorTests ‑ ComplexPropertyGet\n' + 'FluentValidation.Tests.MemberAccessorTests ‑ ComplexPropertySet\n' + 'FluentValidation.Tests.MemberAccessorTests ‑ Equality\n' + 'FluentValidation.Tests.MemberAccessorTests ‑ ImplicitCast\n' + 'FluentValidation.Tests.MemberAccessorTests ‑ Name\n' + 'FluentValidation.Tests.MemberAccessorTests ‑ SimpleFieldGet\n' + 'FluentValidation.Tests.MemberAccessorTests ‑ SimpleFieldSet\n' + 'FluentValidation.Tests.MemberAccessorTests ‑ SimplePropertyGet\n' + 'FluentValidation.Tests.MemberAccessorTests ‑ SimplePropertySet\n' + 'FluentValidation.Tests.MessageFormatterTests ‑ ' + 'Adds_PropertyName_to_message\n' + 'FluentValidation.Tests.MessageFormatterTests ‑ ' + 'Adds_argument_and_custom_arguments\n' + 'FluentValidation.Tests.MessageFormatterTests ‑ ' + 'Adds_formatted_argument_and_custom_arguments\n' + 'FluentValidation.Tests.MessageFormatterTests ‑ ' + 'Adds_formatted_argument_and_formatted_custom_arguments\n' + 'FluentValidation.Tests.MessageFormatterTests ‑ ' + 'Adds_value_to_message\n' + 'FluentValidation.Tests.MessageFormatterTests ‑ ' + 'Format_property_value\n' + 'FluentValidation.Tests.MessageFormatterTests ‑ ' + 'Should_ignore_unknown_numbered_parameters\n' + 'FluentValidation.Tests.MessageFormatterTests ‑ ' + 'Should_ignore_unknown_parameters\n' + 'FluentValidation.Tests.MessageFormatterTests ‑ ' + 'Understands_date_formats\n' + 'FluentValidation.Tests.MessageFormatterTests ‑ ' + 'Understands_numeric_formats\n' + 'FluentValidation.Tests.ModelLevelValidatorTests ‑ ' + 'Can_use_child_validator_at_model_level\n' + 'FluentValidation.Tests.ModelLevelValidatorTests ‑ ' + 'Validates_at_model_level\n' + 'FluentValidation.Tests.NameResolutionPluggabilityTester ‑ ' + 'Resolves_nested_properties\n' + 'FluentValidation.Tests.NameResolutionPluggabilityTester ‑ ' + 'ShouldHaveValidationError_Should_support_custom_propertynameresolve' + 'r\nFluentValidation.Tests.NameResolutionPluggabilityTester ‑ ' + 'Uses_custom_property_name\nFluentValidation.Tests.NotEmptyTester ‑ ' + 'Fails_for_array\nFluentValidation.Tests.NotEmptyTester ‑ ' + 'Fails_for_ienumerable_that_doesnt_implement_ICollection\n' + 'FluentValidation.Tests.NotEmptyTester ‑ ' + 'Fails_when_collection_empty\nFluentValidation.Tests.NotEmptyTester ' + '‑ When_there_is_a_value_then_the_validator_should_pass\n' + 'FluentValidation.Tests.NotEmptyTester ‑ ' + 'When_validation_fails_error_should_be_set\n' + 'FluentValidation.Tests.NotEmptyTester ‑ ' + 'When_value_is_Default_for_type_validator_should_fail_datetime\n' + 'FluentValidation.Tests.NotEmptyTester ‑ ' + 'When_value_is_Default_for_type_validator_should_fail_int\n' + 'FluentValidation.Tests.NotEmptyTester ‑ ' + 'When_value_is_empty_string_validator_should_fail\n' + 'FluentValidation.Tests.NotEmptyTester ‑ ' + 'When_value_is_null_validator_should_fail\n' + 'FluentValidation.Tests.NotEmptyTester ‑ ' + 'When_value_is_whitespace_validation_should_fail\n' + 'FluentValidation.Tests.NotEqualValidatorTests ‑ ' + 'Comparison_property_uses_custom_resolver\n' + 'FluentValidation.Tests.NotEqualValidatorTests ‑ ' + 'Should_handle_custom_value_types_correctly\n' + 'FluentValidation.Tests.NotEqualValidatorTests ‑ ' + 'Should_not_be_valid_for_case_insensitve_comparison\n' + 'FluentValidation.Tests.NotEqualValidatorTests ‑ ' + 'Should_not_be_valid_for_case_insensitve_comparison_with_expression\n' + 'FluentValidation.Tests.NotEqualValidatorTests ‑ ' + 'Should_store_comparison_type\n' + 'FluentValidation.Tests.NotEqualValidatorTests ‑ ' + 'Should_store_property_to_compare\n' + 'FluentValidation.Tests.NotEqualValidatorTests ‑ ' + 'Should_use_ordinal_comparison_by_default\n' + 'FluentValidation.Tests.NotEqualValidatorTests ‑ ' + 'Validates_across_properties\n' + 'FluentValidation.Tests.NotEqualValidatorTests ‑ ' + 'When_the_objects_are_equal_then_the_validator_should_fail\n' + 'FluentValidation.Tests.NotEqualValidatorTests ‑ ' + 'When_the_objects_are_not_equal_then_the_validator_should_pass\n' + 'FluentValidation.Tests.NotEqualValidatorTests ‑ ' + 'When_the_validator_fails_the_error_message_should_be_set\n' + 'FluentValidation.Tests.NotNullTester ‑ ' + 'Fails_when_nullable_value_type_is_null\n' + 'FluentValidation.Tests.NotNullTester ‑ ' + 'NotNullValidator_should_fail_if_value_is_null\n' + 'FluentValidation.Tests.NotNullTester ‑ ' + 'NotNullValidator_should_pass_if_value_has_value\n' + 'FluentValidation.Tests.NotNullTester ‑ ' + 'Not_null_validator_should_not_crash_with_non_nullable_value_type\n' + 'FluentValidation.Tests.NotNullTester ‑ ' + 'When_the_validator_fails_the_error_message_should_be_set\n' + 'FluentValidation.Tests.NullTester ‑ ' + 'Not_null_validator_should_not_crash_with_non_nullable_value_type\n' + 'FluentValidation.Tests.NullTester ‑ ' + 'NullValidator_should_fail_if_value_has_value\n' + 'FluentValidation.Tests.NullTester ‑ ' + 'NullValidator_should_pass_if_value_is_null\n' + 'FluentValidation.Tests.NullTester ‑ ' + 'Passes_when_nullable_value_type_is_null\n' + 'FluentValidation.Tests.NullTester ‑ ' + 'When_the_validator_passes_the_error_message_should_be_set\n' + 'FluentValidation.Tests.OnFailureTests ‑ ' + 'OnFailure_called_for_each_failed_rule\n' + 'FluentValidation.Tests.OnFailureTests ‑ ' + 'OnFailure_called_for_each_failed_rule_asyncAsync\n' + 'FluentValidation.Tests.OnFailureTests ‑ ' + 'ShouldHaveChildValidator_should_be_true\n' + 'FluentValidation.Tests.OnFailureTests ‑ ' + 'ShouldHaveChildValidator_works_with_Include\n' + 'FluentValidation.Tests.OnFailureTests ‑ ' + 'Should_be_able_to_access_error_message_in_OnFailure\n' + 'FluentValidation.Tests.OnFailureTests ‑ ' + 'WhenAsyncWithOnFailure_should_invoke_condition_on_async_inner_valid' + 'ator\nFluentValidation.Tests.OnFailureTests ‑ ' + 'WhenAsyncWithOnFailure_should_invoke_condition_on_inner_validator\n' + 'FluentValidation.Tests.OnFailureTests ‑ ' + 'WhenAsyncWithOnFailure_should_invoke_condition_on_inner_validator_i' + 'nvoked_synchronously\nFluentValidation.Tests.OnFailureTests ‑ ' + 'WhenWithOnFailure_should_invoke_condition_on_async_inner_validator\n' + 'FluentValidation.Tests.OnFailureTests ‑ ' + 'WhenWithOnFailure_should_invoke_condition_on_inner_validator\n' + 'FluentValidation.Tests.PredicateValidatorTester ‑ ' + 'Should_fail_when_predicate_returns_false\n' + 'FluentValidation.Tests.PredicateValidatorTester ‑ ' + 'Should_succeed_when_predicate_returns_true\n' + 'FluentValidation.Tests.PredicateValidatorTester ‑ ' + 'Should_throw_when_predicate_is_null\n' + 'FluentValidation.Tests.PredicateValidatorTester ‑ ' + 'When_validation_fails_metadata_should_be_set_on_failure\n' + 'FluentValidation.Tests.PredicateValidatorTester ‑ ' + 'When_validation_fails_the_default_error_should_be_set\n' + 'FluentValidation.Tests.PropertyChainTests ‑ ' + 'AddIndexer_throws_when_nothing_added\n' + 'FluentValidation.Tests.PropertyChainTests ‑ ' + 'Calling_ToString_should_construct_string_representation_of_chain\n' + 'FluentValidation.Tests.PropertyChainTests ‑ ' + 'Calling_ToString_should_construct_string_representation_of_chain_wi' + 'th_indexers\nFluentValidation.Tests.PropertyChainTests ‑ ' + 'Creates_from_expression\nFluentValidation.Tests.PropertyChainTests ' + '‑ Should_be_subchain\nFluentValidation.Tests.PropertyChainTests ‑ ' + 'Should_ignore_blanks\nFluentValidation.Tests.PropertyChainTests ‑ ' + 'Should_not_be_subchain\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'Can_access_expression_in_message\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'Can_access_expression_in_message_lambda\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'Can_access_expression_in_message_lambda_regex\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'Uses_lazily_loaded_expression\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'Uses_lazily_loaded_expression_with_options\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'Uses_regex_object\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'When_the_text_does_not_match_the_lambda_regex_regular_expression_th' + 'en_the_validator_should_fail\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'When_the_text_does_not_match_the_lambda_regular_expression_then_the' + '_validator_should_fail\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'When_the_text_does_not_match_the_regular_expression_then_the_valida' + 'tor_should_fail\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'When_the_text_is_empty_then_the_validator_should_fail\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'When_the_text_is_null_then_the_validator_should_pass\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'When_the_text_matches_the_lambda_regex_regular_expression_then_the_' + 'validator_should_pass\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'When_the_text_matches_the_lambda_regular_expression_then_the_valida' + 'tor_should_pass\n' + 'FluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'When_the_text_matches_the_regular_expression_then_the_validator_sho' + 'uld_pass\nFluentValidation.Tests.RegularExpressionValidatorTests ‑ ' + 'When_validation_fails_the_default_error_should_be_set\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Adding_a_validator_should_return_builder\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Adding_a_validator_should_store_validator\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Calling_ValidateAsync_should_delegate_to_underlying_async_validator' + '\nFluentValidation.Tests.RuleBuilderTests ‑ ' + 'Calling_ValidateAsync_should_delegate_to_underlying_sync_validator\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Calling_validate_should_delegate_to_underlying_validator\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Conditional_child_validator_should_register_with_validator_type_not' + '_property\nFluentValidation.Tests.RuleBuilderTests ‑ ' + 'Nullable_object_with_async_condition_should_not_throw\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Nullable_object_with_condition_should_not_throw\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'PropertyDescription_should_return_custom_property_name\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'PropertyDescription_should_return_property_name_split\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Property_should_return_null_when_it_is_not_a_property_being_validat' + 'ed\nFluentValidation.Tests.RuleBuilderTests ‑ ' + 'Property_should_return_property_being_validated\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Result_should_use_custom_property_name_when_no_property_name_can_be' + '_determined\nFluentValidation.Tests.RuleBuilderTests ‑ ' + 'Rule_for_a_non_memberexpression_should_not_generate_property_name\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Should_build_property_name\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Should_compile_expression\nFluentValidation.Tests.RuleBuilderTests ' + '‑ Should_set_custom_error\nFluentValidation.Tests.RuleBuilderTests ' + '‑ Should_set_custom_property_name\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Should_throw_if_message_is_null\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Should_throw_if_overriding_validator_is_null\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Should_throw_if_overriding_validator_provider_is_null\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Should_throw_if_property_name_is_null\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Should_throw_if_validator_is_null\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Should_throw_when_async_inverse_predicate_is_null\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Should_throw_when_async_predicate_is_null\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Should_throw_when_context_predicate_is_null\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Should_throw_when_inverse_context_predicate_is_null\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Should_throw_when_inverse_predicate_is_null\n' + 'FluentValidation.Tests.RuleBuilderTests ‑ ' + 'Should_throw_when_predicate_is_null\n' + 'FluentValidation.Tests.RuleDependencyTests ‑ ' + 'Async_inside_dependent_rules\n' + 'FluentValidation.Tests.RuleDependencyTests ‑ ' + 'Async_inside_dependent_rules_when_parent_rule_not_async\n' + 'FluentValidation.Tests.RuleDependencyTests ‑ ' + 'Dependent_rules_inside_ruleset\n' + 'FluentValidation.Tests.RuleDependencyTests ‑ ' + 'Dependent_rules_inside_when\n' + 'FluentValidation.Tests.RuleDependencyTests ‑ ' + 'Does_not_invoke_dependent_rule_if_parent_rule_does_not_pass\n' + 'FluentValidation.Tests.RuleDependencyTests ‑ ' + 'Invokes_dependent_rule_if_parent_rule_passes\n' + 'FluentValidation.Tests.RuleDependencyTests ‑ ' + 'Nested_dependent_rules\nFluentValidation.Tests.RuleDependencyTests ' + '‑ Nested_dependent_rules_inside_ruleset\n' + 'FluentValidation.Tests.RuleDependencyTests ‑ ' + 'Nested_dependent_rules_inside_ruleset_inside_method\n' + 'FluentValidation.Tests.RuleDependencyTests ‑ ' + 'Nested_dependent_rules_inside_ruleset_no_result_when_second_level_f' + 'ails\nFluentValidation.Tests.RuleDependencyTests ‑ ' + 'Nested_dependent_rules_inside_ruleset_no_result_when_top_level_fail' + 's\nFluentValidation.Tests.RuleDependencyTests ‑ ' + 'TestAsyncWithDependentRules_AsyncEntry\n' + 'FluentValidation.Tests.RuleDependencyTests ‑ ' + 'TestAsyncWithDependentRules_SyncEntry\n' + 'FluentValidation.Tests.RuleDependencyTests ‑ ' + 'Treats_root_level_RuleFor_call_as_dependent_rule_if_user_forgets_to' + '_use_DependentRulesBuilder\nFluentValidation.Tests.RulesetTests ‑ ' + 'Applies_multiple_rulesets_to_rule\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Combines_rulesets_and_explicit_properties\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Combines_rulesets_and_explicit_properties_async\n' + 'FluentValidation.Tests.RulesetTests ‑ Executes_all_rules\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Executes_in_rule_in_default_and_none\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Executes_in_rule_in_ruleset_and_default\n' + 'FluentValidation.Tests.RulesetTests ‑ Executes_multiple_rulesets\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Executes_rules_in_default_ruleset_and_specific_ruleset\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Executes_rules_in_specified_ruleset\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Executes_rules_not_specified_in_ruleset\n' + 'FluentValidation.Tests.RulesetTests ‑ Includes_all_rulesets\n' + 'FluentValidation.Tests.RulesetTests ‑ Includes_all_rulesets_async\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Includes_combination_of_rulesets\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Includes_combination_of_rulesets_async\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Ruleset_cascades_to_child_collection_validator\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Ruleset_cascades_to_child_validator\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Ruleset_selection_should_cascade_downwards_with_when_setting_child_' + 'validator_using_include_statement\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Ruleset_selection_should_cascade_downwards_with_when_setting_child_' + 'validator_using_include_statement_with_lambda\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'Ruleset_selection_should_not_cascade_downwards_when_set_on_property' + '\nFluentValidation.Tests.RulesetTests ‑ Trims_spaces\n' + 'FluentValidation.Tests.RulesetTests ‑ ' + 'WithMessage_works_inside_rulesets\n' + 'FluentValidation.Tests.ScalePrecisionValidatorTests ‑ ' + 'Scale_precision_should_be_valid\n' + 'FluentValidation.Tests.ScalePrecisionValidatorTests ‑ ' + 'Scale_precision_should_be_valid_when_ignoring_trailing_zeroes\n' + 'FluentValidation.Tests.ScalePrecisionValidatorTests ‑ ' + 'Scale_precision_should_be_valid_when_they_are_equal\n' + 'FluentValidation.Tests.ScalePrecisionValidatorTests ‑ ' + 'Scale_precision_should_not_be_valid\n' + 'FluentValidation.Tests.ScalePrecisionValidatorTests ‑ ' + 'Scale_precision_should_not_be_valid_when_ignoring_trailing_zeroes\n' + 'FluentValidation.Tests.ScalePrecisionValidatorTests ‑ ' + 'Scale_precision_should_not_be_valid_when_they_are_equal\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Async_condition_can_be_used_inside_ruleset\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Condition_can_be_used_inside_ruleset' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 804 tests, see "Raw output" for the list of tests 626 to ' + '804.', + 'title': '804 tests found (test 626 to 804)', + 'raw_details': + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Does_not_execute_custom_Rule_when_async_condition_false\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Does_not_execute_custom_Rule_when_condition_false\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Does_not_execute_customasync_Rule_when_async_condition_false\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Does_not_execute_customasync_Rule_when_condition_false\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Doesnt_throw_NullReferenceException_when_instance_not_null\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Doesnt_throw_NullReferenceException_when_instance_not_null_async\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Executes_custom_rule_when_async_condition_true\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Executes_custom_rule_when_condition_true\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Executes_customasync_rule_when_async_condition_true\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Executes_customasync_rule_when_condition_true\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Nested_async_conditions_with_CustomAsync_rule\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Nested_async_conditions_with_Custom_rule\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Nested_conditions_with_CustomAsync_rule\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Nested_conditions_with_Custom_rule\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Outer_Unless_clause_will_trump_an_inner_Unless_clause_when_inner_fa' + 'ils_but_the_outer_is_satisfied\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Outer_async_Unless_clause_will_trump_an_inner_Unless_clause_when_in' + 'ner_fails_but_the_outer_is_satisfied\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'RuleSet_can_be_used_inside_async_condition\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'RuleSet_can_be_used_inside_condition\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Rules_invoke_when_inverse_shared_async_condition_matches\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Rules_invoke_when_inverse_shared_condition_matches\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Rules_not_invoked_when_inverse_shared_async_condition_does_not_matc' + 'h\nFluentValidation.Tests.SharedConditionTests ‑ ' + 'Rules_not_invoked_when_inverse_shared_condition_does_not_match\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Runs_otherwise_conditions_for_UnlessAsync\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Runs_otherwise_conditions_for_When\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Runs_otherwise_conditions_for_WhenAsync\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Runs_otherwise_conditons_for_Unless\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Shared_When_is_applied_to_groupd_rules_when_initial_predicate_is_tr' + 'ue_and_all_individual_rules_are_satisfied\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Shared_When_is_applied_to_grouped_rules_when_initial_predicate_is_t' + 'rue\nFluentValidation.Tests.SharedConditionTests ‑ ' + 'Shared_When_is_not_applied_to_grouped_rules_when_initial_predicate_' + 'is_false\nFluentValidation.Tests.SharedConditionTests ‑ ' + 'Shared_When_respects_the_smaller_scope_of_a_inner_Unless_when_the_i' + 'nner_Unless_predicate_fails\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Shared_When_respects_the_smaller_scope_of_an_inner_Unless_when_the_' + 'inner_Unless_predicate_is_satisfied\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Shared_async_When_is_applied_to_groupd_rules_when_initial_predicate' + '_is_true_and_all_individual_rules_are_satisfied\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Shared_async_When_is_applied_to_grouped_rules_when_initial_predicat' + 'e_is_true\nFluentValidation.Tests.SharedConditionTests ‑ ' + 'Shared_async_When_is_not_applied_to_grouped_rules_when_initial_pred' + 'icate_is_false\nFluentValidation.Tests.SharedConditionTests ‑ ' + 'Shared_async_When_respects_the_smaller_scope_of_a_inner_Unless_when' + '_the_inner_Unless_predicate_fails\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'Shared_async_When_respects_the_smaller_scope_of_an_inner_Unless_whe' + 'n_the_inner_Unless_predicate_is_satisfied\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'WhenAsync_condition_only_executed_once\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'When_async_condition_executed_for_each_instance_of_RuleForEach_cond' + 'ition_should_not_be_cached\n' + 'FluentValidation.Tests.SharedConditionTests ‑ ' + 'When_condition_executed_for_each_instance_of_RuleForEach_condition_' + 'should_not_be_cached\nFluentValidation.Tests.SharedConditionTests ' + '‑ When_condition_only_executed_once\n' + 'FluentValidation.Tests.StandalonePropertyValidationTester ‑ ' + 'Should_validate_property_value_without_instance\n' + 'FluentValidation.Tests.StringEnumValidatorTests ‑ ' + 'IsValidTests_CaseInsensitive_CaseCorrect\n' + 'FluentValidation.Tests.StringEnumValidatorTests ‑ ' + 'IsValidTests_CaseInsensitive_CaseIncorrect\n' + 'FluentValidation.Tests.StringEnumValidatorTests ‑ ' + 'IsValidTests_CaseSensitive_CaseCorrect\n' + 'FluentValidation.Tests.StringEnumValidatorTests ‑ ' + 'IsValidTests_CaseSensitive_CaseIncorrect\n' + 'FluentValidation.Tests.StringEnumValidatorTests ‑ ' + 'When_enumType_is_not_an_enum_it_should_throw\n' + 'FluentValidation.Tests.StringEnumValidatorTests ‑ ' + 'When_enumType_is_null_it_should_throw\n' + 'FluentValidation.Tests.StringEnumValidatorTests ‑ ' + 'When_the_property_is_initialized_with_empty_string_then_the_validat' + 'or_should_fail\nFluentValidation.Tests.StringEnumValidatorTests ‑ ' + 'When_the_property_is_initialized_with_invalid_string_then_the_valid' + 'ator_should_fail\nFluentValidation.Tests.StringEnumValidatorTests ' + '‑ ' + 'When_the_property_is_initialized_with_null_then_the_validator_shoul' + 'd_be_valid\nFluentValidation.Tests.StringEnumValidatorTests ‑ ' + 'When_validation_fails_the_default_error_should_be_set\n' + 'FluentValidation.Tests.TrackingCollectionTests ‑ Add_AddsItem\n' + 'FluentValidation.Tests.TrackingCollectionTests ‑ ' + 'Should_not_raise_event_once_handler_detached\n' + 'FluentValidation.Tests.TrackingCollectionTests ‑ ' + 'When_Item_Added_Raises_ItemAdded\n' + 'FluentValidation.Tests.TransformTests ‑ ' + 'Transforms_collection_element\n' + 'FluentValidation.Tests.TransformTests ‑ ' + 'Transforms_collection_element_async\n' + 'FluentValidation.Tests.TransformTests ‑ Transforms_property_value\n' + 'FluentValidation.Tests.TransformTests ‑ ' + 'Transforms_property_value_to_another_type\n' + 'FluentValidation.Tests.UserSeverityTester ‑ ' + 'Can_Provide_conditional_severity\n' + 'FluentValidation.Tests.UserSeverityTester ‑ ' + 'Can_Provide_severity_for_item_in_collection\n' + 'FluentValidation.Tests.UserSeverityTester ‑ ' + 'Correctly_provides_object_being_validated\n' + 'FluentValidation.Tests.UserSeverityTester ‑ ' + 'Defaults_user_severity_to_error\n' + 'FluentValidation.Tests.UserSeverityTester ‑ ' + 'Should_use_last_supplied_severity\n' + 'FluentValidation.Tests.UserSeverityTester ‑ ' + 'Stores_user_severity_against_validation_failure\n' + 'FluentValidation.Tests.UserSeverityTester ‑ ' + 'Throws_when_provider_is_null\n' + 'FluentValidation.Tests.UserStateTester ‑ ' + 'Can_Provide_state_for_item_in_collection\n' + 'FluentValidation.Tests.UserStateTester ‑ ' + 'Correctly_provides_object_being_validated\n' + 'FluentValidation.Tests.UserStateTester ‑ ' + 'Stores_user_state_against_validation_failure\n' + 'FluentValidation.Tests.UserStateTester ‑ ' + 'Throws_when_provider_is_null\n' + 'FluentValidation.Tests.ValidateAndThrowTester ‑ ' + 'Does_not_throw_when_valid\n' + 'FluentValidation.Tests.ValidateAndThrowTester ‑ ' + 'Does_not_throw_when_valid_and_a_ruleset\n' + 'FluentValidation.Tests.ValidateAndThrowTester ‑ ' + 'Does_not_throw_when_valid_and_a_ruleset_async\n' + 'FluentValidation.Tests.ValidateAndThrowTester ‑ ' + 'Does_not_throw_when_valid_async\n' + 'FluentValidation.Tests.ValidateAndThrowTester ‑ ' + 'Only_root_validator_throws\n' + 'FluentValidation.Tests.ValidateAndThrowTester ‑ Populates_errors\n' + 'FluentValidation.Tests.ValidateAndThrowTester ‑ ' + 'Serializes_exception\n' + 'FluentValidation.Tests.ValidateAndThrowTester ‑ Throws_exception\n' + 'FluentValidation.Tests.ValidateAndThrowTester ‑ ' + 'Throws_exception_async\n' + 'FluentValidation.Tests.ValidateAndThrowTester ‑ ' + 'Throws_exception_with_a_ruleset\n' + 'FluentValidation.Tests.ValidateAndThrowTester ‑ ' + 'Throws_exception_with_a_ruleset_async\n' + 'FluentValidation.Tests.ValidateAndThrowTester ‑ ' + 'ToString_provides_error_details\n' + 'FluentValidation.Tests.ValidateAndThrowTester ‑ ' + 'ValidationException_provides_correct_message_when_appendDefaultMess' + 'age_false\nFluentValidation.Tests.ValidateAndThrowTester ‑ ' + 'ValidationException_provides_correct_message_when_appendDefaultMess' + 'age_true\nFluentValidation.Tests.ValidationResultTests ‑ ' + 'Can_serialize_failure\n' + 'FluentValidation.Tests.ValidationResultTests ‑ ' + 'Can_serialize_result\nFluentValidation.Tests.ValidationResultTests ' + '‑ Should_add_errors\nFluentValidation.Tests.ValidationResultTests ' + '‑ Should_be_valid_when_there_are_no_errors\n' + 'FluentValidation.Tests.ValidationResultTests ‑ ' + 'Should_not_be_valid_when_there_are_errors\n' + 'FluentValidation.Tests.ValidationResultTests ‑ ' + 'ToString_return_empty_string_when_there_is_no_error\n' + 'FluentValidation.Tests.ValidationResultTests ‑ ' + 'ToString_return_error_messages_with_given_separator\n' + 'FluentValidation.Tests.ValidationResultTests ‑ ' + 'ToString_return_error_messages_with_newline_as_separator\n' + 'FluentValidation.Tests.ValidatorDescriptorTester ‑ ' + 'Does_not_throw_when_rule_declared_without_property\n' + 'FluentValidation.Tests.ValidatorDescriptorTester ‑ ' + 'GetValidatorsForMember_and_GetRulesForMember_can_both_retrieve_for_' + 'model_level_rule\nFluentValidation.Tests.ValidatorDescriptorTester ' + '‑ Gets_validators_for_property\n' + 'FluentValidation.Tests.ValidatorDescriptorTester ‑ ' + 'Returns_empty_collection_for_property_with_no_validators\n' + 'FluentValidation.Tests.ValidatorDescriptorTester ‑ ' + 'Should_retrieve_name_given_to_it_pass_property_as_string\n' + 'FluentValidation.Tests.ValidatorSelectorTests ‑ ' + 'Can_use_property_with_include\n' + 'FluentValidation.Tests.ValidatorSelectorTests ‑ ' + 'Does_not_validate_other_property\n' + 'FluentValidation.Tests.ValidatorSelectorTests ‑ ' + 'Does_not_validate_other_property_using_expression\n' + 'FluentValidation.Tests.ValidatorSelectorTests ‑ ' + 'Executes_correct_rule_when_using_property_with_include\n' + 'FluentValidation.Tests.ValidatorSelectorTests ‑ ' + 'Executes_correct_rule_when_using_property_with_include_async\n' + 'FluentValidation.Tests.ValidatorSelectorTests ‑ ' + 'Includes_nested_property\n' + 'FluentValidation.Tests.ValidatorSelectorTests ‑ ' + 'Includes_nested_property_using_expression\n' + 'FluentValidation.Tests.ValidatorSelectorTests ‑ ' + 'MemberNameValidatorSelector_returns_true_when_property_name_matches' + '\nFluentValidation.Tests.ValidatorSelectorTests ‑ ' + 'Validates_nullable_property_with_overriden_name_when_selected\n' + 'FluentValidation.Tests.ValidatorSelectorTests ‑ ' + 'Validates_property_using_expression\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Allows_only_one_failure_to_match\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Can_use_indexer_in_string_message\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Can_use_indexer_in_string_message_inverse\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Expected_error_code_check\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Expected_message_argument_check\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Expected_message_check\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Expected_severity_check\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Expected_state_check\nFluentValidation.Tests.ValidatorTesterTester ' + '‑ Matches_any_failure\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Matches_model_level_rule\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Model_level_check_fails_if_no_model_level_failures\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveChildValidator_should_not_throw_when_property_Does_have_c' + 'hild_validator\nFluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveChildValidator_should_not_throw_when_property_Does_have_c' + 'hild_validator_and_expecting_a_basetype\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveChildValidator_should_not_throw_when_property_has_collect' + 'ion_validators\nFluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveChildValidator_should_throw_when_property_has_a_different' + '_child_validator\nFluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveChildValidator_should_work_with_DependentRules\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveChildValidator_throws_when_property_does_not_have_child_v' + 'alidator\nFluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveChildValidator_works_on_model_level_rules\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveChildvalidator_throws_when_collection_property_Does_not_h' + 'ave_child_validator\nFluentValidation.Tests.ValidatorTesterTester ' + '‑ ShouldHaveValidationErrorFor_takes_account_of_rulesets\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationErrorFor_takes_account_of_rulesets_fluent_appro' + 'ach\nFluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_Should_support_nested_properties\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_Should_throw_when_there_are_no_validation' + '_errors\nFluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_async\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_async_throws\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_model_async\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_model_async_throws\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_preconstructed_object_does_not_throw_for_' + 'unwritable_property\nFluentValidation.Tests.ValidatorTesterTester ' + '‑ ' + 'ShouldHaveValidationError_should_not_throw_when_there_are_errors_wi' + 'th_preconstructed_object\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_should_not_throw_when_there_are_validatio' + 'n_errors\nFluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_should_not_throw_when_there_are_validatio' + 'n_errors__WhenAsyn_is_used(age: 42, cardNumber: "")\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_should_not_throw_when_there_are_validatio' + 'n_errors__WhenAsyn_is_used(age: 42, cardNumber: null)\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_should_not_throw_when_there_are_validatio' + 'n_errors_ruleforeach\nFluentValidation.Tests.ValidatorTesterTester ' + '‑ ' + 'ShouldHaveValidationError_should_throw_when_there_are_no_validation' + '_errors_with_preconstructed_object\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_should_throw_when_there_are_not_validatio' + 'n_errors__WhenAsyn_Is_Used(age: 17, cardNumber: "")\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_should_throw_when_there_are_not_validatio' + 'n_errors__WhenAsyn_Is_Used(age: 17, cardNumber: "cardNumber")\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_should_throw_when_there_are_not_validatio' + 'n_errors__WhenAsyn_Is_Used(age: 17, cardNumber: null)\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_should_throw_when_there_are_not_validatio' + 'n_errors__WhenAsyn_Is_Used(age: 42, cardNumber: "cardNumber")\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_should_throw_when_there_are_not_validatio' + 'n_errors_ruleforeach\nFluentValidation.Tests.ValidatorTesterTester ' + '‑ ' + 'ShouldHaveValidationError_with_an_unmatched_rule_and_a_single_error' + '_should_throw_an_exception\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldHaveValidationError_with_an_unmatched_rule_and_multiple_error' + 's_should_throw_an_exception\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHAveValidationError_should_not_throw_When_there_are_no_err' + 'ors_with_preconstructed_object\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_Should_support_nested_properties\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_async\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_async_model_throws\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_async_throws\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_model_async\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_correctly_handle_explicitly_pro' + 'viding_object_to_validate\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_correctly_handle_explicitly_pro' + 'viding_object_to_validate_and_other_property_fails_validation\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_have_validation_error_details_w' + 'hen_thrown_ruleforeach\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_not_throw_when_there_are_no_err' + 'ors\nFluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_not_throw_when_there_are_not_va' + 'lidation_errors_ruleforeach\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_throw_when_there_are_errors\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_throw_when_there_are_errors_wit' + 'h_preconstructed_object\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_throw_when_there_are_not_valida' + 'tion_errors__WhenAsyn_is_used(age: 17, cardNumber: "")\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_throw_when_there_are_not_valida' + 'tion_errors__WhenAsyn_is_used(age: 17, cardNumber: "cardNumber")\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_throw_when_there_are_not_valida' + 'tion_errors__WhenAsyn_is_used(age: 17, cardNumber: null)\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_throw_when_there_are_not_valida' + 'tion_errors__WhenAsyn_is_used(age: 42, cardNumber: "cardNumber")\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_throw_when_there_are_validation' + '_errors__WhenAsyn_is_used(age: 42, cardNumber: "")\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_throw_when_there_are_validation' + '_errors__WhenAsyn_is_used(age: 42, cardNumber: null)\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'ShouldNotHaveValidationError_should_throw_when_there_are_validation' + '_errors_ruleforeach\nFluentValidation.Tests.ValidatorTesterTester ' + '‑ TestValidate_runs_async\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'TestValidate_runs_async_throws\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Tests_nested_property\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Tests_nested_property_reverse\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Tests_nested_property_using_obsolete_method\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Unexpected_error_code_check\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Unexpected_message_check(withoutErrMsg: "bar", errMessages: ' + '["bar"])\nFluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Unexpected_message_check(withoutErrMsg: "bar", errMessages: ' + '["foo", "bar"])\nFluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Unexpected_message_check(withoutErrMsg: "bar", errMessages: ' + '["foo"])\nFluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Unexpected_message_check(withoutErrMsg: "bar", errMessages: [])\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Unexpected_severity_check\n' + 'FluentValidation.Tests.ValidatorTesterTester ‑ ' + 'Unexpected_state_check' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/trx/nunit/FluentValidation.Tests.junit-xml b/python/test/files/trx/nunit/FluentValidation.Tests.junit-xml new file mode 100644 index 0000000..e3a69f7 --- /dev/null +++ b/python/test/files/trx/nunit/FluentValidation.Tests.junit-xml @@ -0,0 +1,811 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/trx/nunit/FluentValidation.Tests.results b/python/test/files/trx/nunit/FluentValidation.Tests.results new file mode 100644 index 0000000..8e89db3 --- /dev/null +++ b/python/test/files/trx/nunit/FluentValidation.Tests.results @@ -0,0 +1,10651 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=804, + suite_skipped=1, + suite_failures=0, + suite_errors=0, + suite_time=3, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='MSTestSuite', + tests=804, + skipped=1, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_async_model_throws', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011796 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChildRulesTests', + test_name='ChildRules_works_with_RuleSet', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0058553 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEqualValidatorTests', + test_name='Should_store_comparison_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004205 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Tests_nested_property_reverse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004663 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Validation_should_fail_when_condition_matches', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.55e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Async_condition_is_applied_to_single_validator_in_the_chain_when_Appl' + 'yConditionTo_set_to_CurrentValidator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000818 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanOrEqualToValidatorTester', + test_name='Validates_with_nullable_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005804 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Regular_rules_can_drop_into_RuleForEach', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008814 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainedValidationTester', + test_name='Chained_property_should_be_excluded', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001883 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_should_throw_when_there_are_no_validation_e' + 'rrors_with_preconstructed_object', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008107 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Validation_should_fail_when_condition_does_not_match', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.88e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MessageFormatterTests', + test_name='Should_ignore_unknown_parameters', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008556 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='To_and_from_properties_should_be_set_for_dates', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.18e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Runs_otherwise_conditions_for_When', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000563 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='GreaterThanOrEqual_should_create_GreaterThanOrEqualValidator_with_exp' + 'licit_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002869 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExactLengthValidatorTester', + test_name='When_the_text_length_is_larger_the_validator_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002571 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Executes_customasync_rule_when_async_condition_true', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011516 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='When_the_text_matches_the_lambda_regular_expression_then_the_validato' + 'r_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003275 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='When_validation_fails_the_default_error_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001265 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.UserStateTester', + test_name='Throws_when_provider_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002016 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_not_throw_when_there_are_no_error' + 's', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008221 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.StringEnumValidatorTests', + test_name='When_the_property_is_initialized_with_invalid_string_then_the_validat' + 'or_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.87e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Extracts_property_from_constant_using_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.78e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Includes_combination_of_rulesets_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0010528 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.TrackingCollectionTests', + test_name='Should_not_raise_event_once_handler_detached', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001609 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_validator_fails_the_error_message_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002535 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='NotEqual_should_create_NotEqualValidator_with_lambda', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005555 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MessageFormatterTests', + test_name='Should_ignore_unknown_numbered_parameters', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001013 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EqualValidatorTests', + test_name='When_the_objects_are_equal_validation_should_succeed', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003395 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorSelectorTests', + test_name='Validates_nullable_property_with_overriden_name_when_selected', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0019595 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='Validates_with_nullable_when_property_not_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006361 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_failure_when_set_to_StopOnFirstFailure_at_v' + 'alidator_level_async_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008011 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.TransformTests', + test_name='Transforms_collection_element', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008965 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='When_the_text_matches_the_regular_expression_then_the_validator_shoul' + 'd_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001805 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='Nested_dependent_rules_inside_ruleset_no_result_when_second_level_fai' + 'ls', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005888 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.UserSeverityTester', + test_name='Throws_when_provider_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001945 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_Should_throw_when_there_are_no_validation_e' + 'rrors', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008389 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Validation_should_succeed_when_async_condition_does_not_match', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000355 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Includes_combination_of_rulesets', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006445 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='WhenPreValidationReturnsFalse_ResultReturnToUserImmediatly_ValidateAs' + 'ync(preValidationResult: )', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009155 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanValidatorTester', + test_name='Validates_with_nullable_when_property_not_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002583 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.UserSeverityTester', + test_name='Can_Provide_conditional_severity', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002891 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Fails_email_validation_aspnetcore_compatible(email: " \\r \\t \\n")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.47e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Nested_async_conditions_with_CustomAsync_rule', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009986 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EnumValidatorTests', + test_name='When_the_enum_is_not_initialized_with_valid_value_then_the_validator_' + 'should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.19e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_failure_when_set_to_Continue_and_overriden_at_rul' + 'e_level_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002166 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmptyTester', + test_name='When_validation_fails_error_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002342 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_Should_support_nested_properties', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009315 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEmptyTester', + test_name='When_value_is_null_validator_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002923 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='Uses_lazily_loaded_expression_with_options', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001451 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Should_not_main_state', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001976 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Can_use_cascade_with_RuleForEach', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006943 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_on_failure_when_set_to_Stop_globally_and_overrid' + 'en_at_rule_level_async_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0014877 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomValidatorTester', + test_name='New_Custom_When_property_name_omitted_infers_property_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002338 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Rules_invoke_when_inverse_shared_async_condition_matches', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000943 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Skips_null_items', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005809 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NullTester', + test_name='Not_null_validator_should_not_crash_with_non_nullable_value_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005603 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Should_override_propertyName', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007523 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_aspnetcore_compatible(email: ' + '"someName@some_domain.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.62e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EnumValidatorTests', + test_name='Flags_enum_validates_correctly_when_using_zero_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003854 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEqualValidatorTests', + test_name='When_the_validator_fails_the_error_message_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002574 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_minlength_validator_fails_the_error_message_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003529 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Gets_translation_for_culture', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.41e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorDescriptorTester', + test_name='GetValidatorsForMember_and_GetRulesForMember_can_both_retrieve_for_mo' + 'del_level_rule', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003935 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='Nested_dependent_rules', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005419 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Combines_rulesets_and_explicit_properties_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001092 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Rules_invoke_when_inverse_shared_condition_matches', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001103 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanOrEqualToValidatorTester', + test_name='Should_fail_when_greater_than_input', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.53e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Async_condition_should_work_with_child_collection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007258 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.UserSeverityTester', + test_name='Stores_user_severity_against_validation_failure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002021 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.PredicateValidatorTester', + test_name='Should_throw_when_predicate_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003168 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Should_validate_single_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002735 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanValidatorTester', + test_name='Should_fail_when_equal_to_input', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.29e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_Failure_when_set_to_Continue_globally_and_o' + 'verriden_at_rule_level_and_async_validator_is_invoked_synchronously', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004818 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Executes_in_rule_in_default_and_none', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000354 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_throw_when_inverse_predicate_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003373 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InheritanceValidatorTest', + test_name='Validates_inheritance_hierarchy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005062 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Async_condition_executed_synchronosuly_with_asynchronous_collection_r' + 'ule', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003309 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_validator_fails_the_error_message_should_be_set_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002385 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_aspnetcore_compatible(email: null)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002029 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Matches_any_failure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006901 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Validates_type_when_using_non_generic_validate_overload', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001769 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanOrEqualToValidatorTester', + test_name='Validates_with_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000321 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Calling_ValidateAsync_should_delegate_to_underlying_async_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0134579 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Gets_translation_for_serbian_culture(cultureName: "sr")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004456 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_max_is_smaller_than_the_min_then_the_validator_should_throw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004785 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmptyTester', + test_name='Passes_when_collection_empty', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.00027 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotNullTester', + test_name='NotNullValidator_should_fail_if_value_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002029 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Validates_with_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002879 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_with_an_unmatched_rule_and_a_single_error_s' + 'hould_throw_an_exception', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006954 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MessageFormatterTests', + test_name='Format_property_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002165 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Falls_back_to_english_when_culture_not_registered', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002145 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='Only_root_validator_throws', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007993 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.OnFailureTests', + test_name='WhenAsyncWithOnFailure_should_invoke_condition_on_inner_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0014785 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AccessorCacheTests', + test_name='Identifies_if_memberexp_acts_on_model_instance', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002977 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_on_failure_when_set_to_Stop_globally_and_overrid' + 'en_at_rule_level_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005266 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Result_should_use_custom_property_name_when_no_property_name_can_be_d' + 'etermined', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005762 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExactLengthValidatorTester', + test_name='When_exact_length_rule_failes_error_should_have_exact_length_error_er' + 'rorcode', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006217 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EnumValidatorTests', + test_name='Flags_enum_with_overlapping_flags_valid_when_using_bitwise_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000299 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_Failure_when_set_to_Continue_globally_and_o' + 'verriden_at_rule_level_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000813 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='Throws_exception_with_a_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007043 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Ruleset_cascades_to_child_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005176 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEqualValidatorTests', + test_name='Should_handle_custom_value_types_correctly', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0022529 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LocalisedMessagesTester', + test_name='Does_not_throw_InvalidCastException_when_using_RuleForEach', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004547 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorDescriptorTester', + test_name='Returns_empty_collection_for_property_with_no_validators', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.85e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanValidatorTester', + test_name='Should_succeed_when_greater_than_input', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.6e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='GreaterThan_should_create_GreaterThanValidator_with_lambda', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004794 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Executes_rule_for_each_item_in_collection_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007972 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanValidatorTester', + test_name='Should_fail_when_less_than_input', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.64e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Conditional_child_validator_should_register_with_validator_type_not_p' + 'roperty', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003936 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Invalid_email_addressex_regex(email: "testperso")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.95e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Collection_should_be_explicitly_included_with_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007823 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Should_throw_for_non_member_expression_when_validating_single_propert' + 'y', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002162 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Adding_a_validator_should_store_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.07e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='Async_inside_dependent_rules', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.5056573 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomValidatorTester', + test_name='New_Custom_Returns_single_failure_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000586 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Should_validate_single_property_where_property_as_string', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003409 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Can_replace_message_without_overriding_all_languages', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001258 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='WithName_should_override_field_name_with_value_from_other_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002316 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_regex(email: ' + '"customer/department=shipping@example.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.34e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='To_and_from_properties_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.68e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_should_not_throw_when_there_are_validation_' + 'errors__WhenAsyn_is_used(age: 42, cardNumber: null)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000746 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Multiple_rules_in_chain_with_childvalidator_shouldnt_reuse_accessor_a' + 'sync', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0057314 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InheritanceValidatorTest', + test_name='Validates_with_callback_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011748 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_failure_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007821 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Validates_complex_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009826 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EqualValidatorTests', + test_name='Should_use_ordinal_comparison_by_default', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001642 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorSelectorTests', + test_name='Executes_correct_rule_when_using_property_with_include_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0017452 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Validates_collection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0010225 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Should_localize_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008092 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Comparison_property_uses_custom_resolver', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005497 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_failure_when_set_to_Continue_and_overriden_at_rul' + 'e_level_async_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008677 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_on_failure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001733 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomMessageFormatTester', + test_name='Uses_custom_delegate_for_building_message', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005953 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MemberAccessorTests', + test_name='SimplePropertyGet', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004082 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Falls_back_to_default_localization_key_when_error_code_key_not_found', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002815 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Should_set_default_error_when_validation_fails', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001125 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Should_be_valid_when_there_are_no_failures_for_single_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001968 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='Must_should_create_PredicateValidator_with_context', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008328 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Fails_email_validation_aspnetcore_compatible(email: ' + '"@someDomain@abc.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.34e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEmptyTester', + test_name='Fails_when_collection_empty', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005821 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmptyTester', + test_name='When_value_is_Default_for_type_validator_should_pass_datetime', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003325 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MemberAccessorTests', + test_name='ComplexPropertySet', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004413 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Validates_with_nullable_when_property_not_null_cross_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002708 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='When_runs_outside_RuleForEach_loop_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012558 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ModelLevelValidatorTests', + test_name='Validates_at_model_level', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003216 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LocalisedMessagesTester', + test_name='Uses_string_format_with_property_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007191 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Nested_collection_for_null_property_should_not_throw_null_reference', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004414 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Should_fail_when_greater_than_input', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001925 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='GreaterThanOrEqual_should_create_GreaterThanOrEqualValidator_with_lam' + 'bda', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0078787 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Validates_child_validator_synchronously', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000356 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EnumValidatorTests', + test_name='Nullable_enum_valid_when_value_specified', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0018848 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Doesnt_throw_NullReferenceException_when_instance_not_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001771 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='Min_and_max_properties_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001047 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='WithMessage_and_WithErrorCode_should_override_error_message_and_error' + '_code', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001772 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0015238 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Nested_conditions_Rule_For', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009052 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Cascade_mode_can_be_set_after_validator_instantiated_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002101 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_throw_when_there_are_validation_e' + 'rrors__WhenAsyn_is_used(age: 42, cardNumber: "")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006807 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.PropertyChainTests', + test_name='AddIndexer_throws_when_nothing_added', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000171 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Collection_should_be_explicitly_included_with_string', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005652 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Executes_rules_not_specified_in_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001365 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Ruleset_selection_should_cascade_downwards_with_when_setting_child_va' + 'lidator_using_include_statement', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001585 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_model_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0013614 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Condition_should_work_with_complex_property_when_invoked_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008233 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='RuleSet_can_be_used_inside_condition', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000827 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NameResolutionPluggabilityTester', + test_name='Uses_custom_property_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002739 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Does_not_execute_customasync_Rule_when_condition_false', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003491 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_Failure_when_set_to_Continue_globally_and_o' + 'verriden_at_rule_level_async_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007979 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Matches_model_level_rule', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006479 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Should_work_with_top_level_collection_validator_and_overriden_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003943 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='When_the_text_does_not_match_the_lambda_regex_regular_expression_then' + '_the_validator_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003653 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveChildValidator_throws_when_property_does_not_have_child_val' + 'idator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003445 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Collection_should_be_explicitly_included_with_string', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005339 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_should_throw_when_there_are_not_validation_' + 'errors__WhenAsyn_Is_Used(age: 17, cardNumber: "")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0017937 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_value_is_smaller_than_the_range_then_the_validator_should_fa' + 'il', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002294 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='Treats_root_level_RuleFor_call_as_dependent_rule_if_user_forgets_to_u' + 'se_DependentRulesBuilder', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003687 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='When_the_text_does_not_match_the_lambda_regular_expression_then_the_v' + 'alidator_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001392 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Fails_email_validation_aspnetcore_compatible(email: ' + '"@someDomain.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.48e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AccessorCacheTests', + test_name='Benchmark', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Should_set_default_validation_message_when_validation_fails', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000352 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LocalisedMessagesTester', + test_name='Uses_func_to_get_message', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002383 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Validates_with_nullable_when_property_not_null_cross_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002763 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanValidatorTester', + test_name='Validates_nullable_with_nullable_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006029 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Can_specify_condition_for_individual_collection_elements', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009154 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Can_validate_using_validator_for_base_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012612 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='Length_should_create_MaximumLengthValidator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002942 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Trims_spaces', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004086 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorDescriptorTester', + test_name='Should_retrieve_name_given_to_it_pass_property_as_string', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005729 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='WhenAsync_condition_only_executed_once', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.00092 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExactLengthValidatorTester', + test_name='When_the_validator_fails_the_error_message_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002836 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_value_is_between_the_range_specified_then_the_validator_shou' + 'ld_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011727 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='Must_should_create_PredicteValidator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004938 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Applies_multiple_rulesets_to_rule', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004398 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EnumValidatorTests', + test_name='Flags_enum_invalid_when_using_outofrange_negative_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000756 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.PredicateValidatorTester', + test_name='When_validation_fails_the_default_error_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004204 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Fails_email_validation_aspnetcore_compatible(email: "someName@")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.27e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_input_is_null_then_the_validator_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000352 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Cascade_mode_can_be_set_after_validator_instantiated_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008338 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ModelLevelValidatorTests', + test_name='Can_use_child_validator_at_model_level', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004502 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainedValidationTester', + test_name='Separate_validation_on_chained_property_conditional', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002104 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_value_is_smaller_than_the_range_then_the_validator_should_fa' + 'il', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002469 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EnumValidatorTests', + test_name='Flags_enum_invalid_when_using_outofrange_positive_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007806 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_should_not_throw_when_there_are_errors_with' + '_preconstructed_object', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006462 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ScalePrecisionValidatorTests', + test_name='Scale_precision_should_not_be_valid_when_they_are_equal', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004406 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Validates_single_property_by_path', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004851 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_throw_when_async_inverse_predicate_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000354 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_throw_when_inverse_context_predicate_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003354 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_should_not_throw_when_there_are_validation_' + 'errors_ruleforeach', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008097 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_to_second_validator_when_first_validator_succeed' + 's_and_cascade_set_to_stop_async_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011758 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InheritanceValidatorTest', + test_name='Validates_collection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006633 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_throw_when_context_predicate_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001421 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EqualValidatorTests', + test_name='When_validation_fails_the_error_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.00023 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Should_throw_when_value_to_compare_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002891 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_text_is_between_the_lambda_range_specified_then_the_validato' + 'r_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.00092 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotNullTester', + test_name='Fails_when_nullable_value_type_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003908 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='Does_not_throw_when_valid_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001014 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.StringEnumValidatorTests', + test_name='When_the_property_is_initialized_with_null_then_the_validator_should_' + 'be_valid', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.7e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_should_not_throw_when_there_are_validation_' + 'errors__WhenAsyn_is_used(age: 42, cardNumber: "")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006089 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Gets_translation_for_serbian_culture(cultureName: "sr-Latn")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.84e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_value_is_smaller_than_the_range_then_the_validator_should_fa' + 'il_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002367 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.PropertyChainTests', + test_name='Should_be_subchain', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001203 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEqualValidatorTests', + test_name='When_the_objects_are_equal_then_the_validator_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002655 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EqualValidatorTests', + test_name='When_the_objects_are_not_equal_validation_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002192 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmptyTester', + test_name='Passes_for_ienumerable_that_doesnt_implement_ICollection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0030376 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.StringEnumValidatorTests', + test_name='When_enumType_is_not_an_enum_it_should_throw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004576 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EnumValidatorTests', + test_name='When_validation_fails_the_default_error_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001061 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Async_condition_executed_synchronosuly_with_synchronous_role', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003076 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Tests_nested_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004914 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Validates_child_validator_asynchronously', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007232 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainedValidationTester', + test_name='Separate_validation_on_chained_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001368 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='TestAsyncWithDependentRules_SyncEntry', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0013846 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Async_condition_is_applied_to_all_validators_in_the_chain', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008144 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Unexpected_message_check(withoutErrMsg: "bar", errMessages: ["bar"])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001548 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_model_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012782 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Expected_message_argument_check', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009659 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorDescriptorTester', + test_name='Does_not_throw_when_rule_declared_without_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001726 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Async_condition_executed_synchronosuly_with_asynchronous_rule', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002385 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_on_failure_when_set_to_StopOnFirstFailure_at_val' + 'idator_level_and_overriden_at_rule_level_async_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0010468 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Validates_collection_several_levels_deep', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011437 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='Validates_with_nullable_when_property_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002242 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmptyTester', + test_name='When_value_is_whitespace_validation_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001968 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Disables_localization', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001637 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomMessageFormatTester', + test_name='Uses_property_value_in_message', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008185 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanOrEqualToValidatorTester', + test_name='Comparison_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001094 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='When_condition_only_executed_once', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004167 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.PropertyChainTests', + test_name='Should_not_be_subchain', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.03e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Shared_When_is_applied_to_groupd_rules_when_initial_predicate_is_true' + '_and_all_individual_rules_are_satisfied', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000455 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Can_access_colletion_index', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002866 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_should_throw_when_there_are_not_validation_' + 'errors__WhenAsyn_Is_Used(age: 17, cardNumber: null)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005484 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Rules_not_invoked_when_inverse_shared_async_condition_does_not_match', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0028571 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_throw_when_there_are_not_validati' + 'on_errors__WhenAsyn_is_used(age: 17, cardNumber: "")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005719 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_throw_when_predicate_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001542 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Nested_conditions_Rule_For_Each', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009411 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEqualValidatorTests', + test_name='Should_not_be_valid_for_case_insensitve_comparison_with_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002824 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Correctly_gets_collection_indices', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003199 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Should_validate_single_property_where_invalid_property_as_string', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000216 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EnumValidatorTests', + test_name='Nullable_enum_valid_when_property_value_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001594 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidationResultTests', + test_name='Should_not_be_valid_when_there_are_errors', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.49e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.UserSeverityTester', + test_name='Correctly_provides_object_being_validated', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003232 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Unexpected_error_code_check', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0016124 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Complex_property_should_be_excluded', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003153 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainingValidatorsTester', + test_name='Should_execute_multiple_validators', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001781 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_throw_when_there_are_errors', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008858 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Should_succeed_when_greater_than_input', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_should_throw_when_there_are_not_validation_' + 'errors__WhenAsyn_Is_Used(age: 42, cardNumber: "cardNumber")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008127 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='TestAsyncWithDependentRules_AsyncEntry', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0050739 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='All_localizations_have_same_parameters_as_English', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.021761 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Async_condition_should_work_with_child_collection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007127 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.PropertyChainTests', + test_name='Calling_ToString_should_construct_string_representation_of_chain', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.48e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Falls_back_to_english_when_translation_missing', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000523 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotNullTester', + test_name='NotNullValidator_should_pass_if_value_has_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002119 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Unexpected_message_check(withoutErrMsg: "bar", errMessages: ["foo"])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003297 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Gets_translation_for_croatian_culture', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004599 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveChildValidator_should_work_with_DependentRules', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012294 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NameResolutionPluggabilityTester', + test_name='Resolves_nested_properties', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005703 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='WhenPreValidationReturnsTrue_ValidatorsGetHit_Validate', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004371 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='To_and_from_properties_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000131 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainedValidationTester', + test_name='Uses_explicit_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005128 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidationResultTests', + test_name='ToString_return_error_messages_with_given_separator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.27e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Can_validate_collection_using_validator_for_base_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0010737 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanOrEqualToValidatorTester', + test_name='Validates_with_nullable_when_property_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003672 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_throw_when_there_are_not_validati' + 'on_errors__WhenAsyn_is_used(age: 42, cardNumber: "cardNumber")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000897 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0027288 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_value_is_exactly_the_size_of_the_lower_bound_then_the_valida' + 'tor_should_pass_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003005 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Fails_email_validation_aspnetcore_compatible(email: "0")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002222 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Validation_should_fail_when_async_condition_does_not_match', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004177 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ScalePrecisionValidatorTests', + test_name='Scale_precision_should_not_be_valid', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006978 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EnumValidatorTests', + test_name='Nullable_enum_invalid_when_bad_value_specified', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003847 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='LessThan_should_create_LessThanValidator_with_explicit_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002742 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomValidatorTester', + test_name='New_Custom_Returns_single_failure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000335 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Top_level_collection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003379 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_should_not_throw_when_there_are_validation_' + 'errors', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007782 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EnumValidatorTests', + test_name='IsValidTests', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001215 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEmptyTester', + test_name='When_value_is_Default_for_type_validator_should_fail_datetime', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0019899 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_regex(email: "!def!xyz%abc@example.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.2e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Validates_collection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005174 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MemberAccessorTests', + test_name='ImplicitCast', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008766 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEmptyTester', + test_name='When_value_is_Default_for_type_validator_should_fail_int', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0018552 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LocalisedMessagesTester', + test_name='Formats_string_with_placeholders', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002342 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Multiple_rules_in_chain_with_childvalidator_shouldnt_reuse_accessor', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007222 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Doesnt_throw_NullReferenceException_when_instance_not_null_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0014608 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MemberAccessorTests', + test_name='SimpleFieldGet', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004109 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomMessageFormatTester', + test_name='Replaces_propertyvalue_with_empty_string_when_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002456 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LocalisedNameTester', + test_name='Uses_localized_name_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008903 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='When_the_text_does_not_match_the_regular_expression_then_the_validato' + 'r_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001644 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomFailureActionTester', + test_name='Does_not_invoke_action_if_validation_success', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002528 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Should_succeed_when_less_than_input', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001783 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='CanValidateInstancesOfType_returns_true_when_comparing_against_same_t' + 'ype', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.65e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_failure_when_set_to_StopOnFirstFailure_at_v' + 'alidator_level_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000909 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Executes_custom_rule_when_async_condition_true', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007245 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Should_not_scramble_property_name_when_using_collection_validators_se' + 'veral_levels_deep_with_ValidateAsync', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0021348 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MessageFormatterTests', + test_name='Adds_formatted_argument_and_formatted_custom_arguments', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002678 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_regex(email: "\\"Abc\\\\@def\\"@example.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.6e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.PredicateValidatorTester', + test_name='When_validation_fails_metadata_should_be_set_on_failure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012067 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.PropertyChainTests', + test_name='Calling_ToString_should_construct_string_representation_of_chain_with' + '_indexers', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002382 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='Does_not_throw_when_valid_and_a_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003775 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Comparison_property_uses_custom_resolver', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003967 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Can_replace_default_errorcode_resolver', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003158 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Calling_validate_should_delegate_to_underlying_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0014739 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NullTester', + test_name='NullValidator_should_pass_if_value_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003543 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.UserStateTester', + test_name='Correctly_provides_object_being_validated', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003223 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_text_is_larger_than_the_range_then_the_validator_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002796 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EnumValidatorTests', + test_name='When_the_enum_is_initialized_with_invalid_value_then_the_validator_sh' + 'ould_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002553 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Tests_nested_property_using_obsolete_method', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004676 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='Can_access_expression_in_message', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002264 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidationResultTests', + test_name='Can_serialize_result', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0071021 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_on_failure_when_set_to_StopOnFirstFailure_at_val' + 'idator_level_and_overriden_at_rule_level_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001072 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Async_condition_can_be_used_inside_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0018948 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanValidatorTester', + test_name='Comparison_Type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002061 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Validates_child_validator_synchronously', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004716 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CreditCardValidatorTests', + test_name='IsValidTests', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002171 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_to_second_validator_when_first_validator_succeed' + 's_and_cascade_set_to_stop', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001667 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.StringEnumValidatorTests', + test_name='When_enumType_is_null_it_should_throw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0021549 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanOrEqualToValidatorTester', + test_name='Validates_nullable_with_nullable_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004362 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomMessageFormatTester', + test_name='Replaces_propertyvalue_placeholder', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009778 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_should_throw_when_there_are_not_validation_' + 'errors_ruleforeach', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0022202 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEqualValidatorTests', + test_name='Should_store_property_to_compare', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001834 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanValidatorTester', + test_name='Validates_with_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004626 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Validates_with_nullable_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005735 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomValidatorTester', + test_name='Runs_sync_rule_asynchronously_when_validator_invoked_asynchronously', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005199 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='Does_not_invoke_dependent_rule_if_parent_rule_does_not_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000328 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Executes_rules_in_specified_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001594 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Async_condition_should_work_with_complex_property_when_validator_invo' + 'ked_synchronously', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007052 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_aspnetcore_compatible(email: ' + '"someName@someDomain.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.67e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='WithErrorCode_should_override_error_code', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001849 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.OnFailureTests', + test_name='WhenAsyncWithOnFailure_should_invoke_condition_on_async_inner_validat' + 'or', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006154 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_failure_when_set_to_Continue_and_overriden_at_rul' + 'e_level_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008319 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainedValidationTester', + test_name='Should_allow_normal_rules_and_chained_property_on_same_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002278 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Shared_When_respects_the_smaller_scope_of_a_inner_Unless_when_the_inn' + 'er_Unless_predicate_fails', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001544 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='LessThanOrEqual_should_create_LessThanOrEqualValidator_with_lambda_wi' + 'th_other_Nullable', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004351 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Validates_with_nullable_when_property_null_cross_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004635 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEqualValidatorTests', + test_name='When_the_objects_are_not_equal_then_the_validator_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002334 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='Nested_dependent_rules_inside_ruleset_no_result_when_top_level_fails', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0016869 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidationResultTests', + test_name='Should_be_valid_when_there_are_no_errors', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.35e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Default_error_code_should_be_class_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001555 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_text_is_exactly_the_size_of_the_lambda_upper_bound_then_the_' + 'validator_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003149 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.StandalonePropertyValidationTester', + test_name='Should_validate_property_value_without_instance', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003758 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorSelectorTests', + test_name='Includes_nested_property_using_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005054 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_regex(email: null)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004724 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='MustAsync_should_create_AsyncPredicteValidator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005833 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='Length_should_create_ExactLengthValidator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003392 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='Can_access_expression_in_message_lambda_regex', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002767 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanValidatorTester', + test_name='Should_set_default_error_when_validation_fails', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001047 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_value_is_exactly_the_size_of_the_upper_bound_then_the_valida' + 'tor_should_fail_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000234 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Shared_When_respects_the_smaller_scope_of_an_inner_Unless_when_the_in' + 'ner_Unless_predicate_is_satisfied', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0019156 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidationResultTests', + test_name='ToString_return_empty_string_when_there_is_no_error', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001348 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Allows_only_one_failure_to_match', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007083 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Executes_custom_rule_when_condition_true', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0010053 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_throw_when_there_are_validation_e' + 'rrors__WhenAsyn_is_used(age: 42, cardNumber: null)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008635 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Gets_translation_for_bosnian_latin_culture(cultureName: "bs-Latn-BA")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.1e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_value_is_smaller_than_the_range_then_the_validator_should_fa' + 'il_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002161 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainedValidationTester', + test_name='Validates_chained_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0010483 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Runs_otherwise_conditions_for_WhenAsync', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001126 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='When_async_condition_executed_for_each_instance_of_RuleForEach_condit' + 'ion_should_not_be_cached', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012985 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='Length_should_create_MinimumLengthValidator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002645 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Executes_in_rule_in_ruleset_and_default', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003802 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanOrEqualToValidatorTester', + test_name='Should_set_default_error_when_validation_fails', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001044 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InheritanceValidatorTest', + test_name='Can_use_custom_subclass_with_nongeneric_overload', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006978 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='Serializes_exception', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007024 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Nested_async_conditions_with_Custom_rule', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007695 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='PreValidate_bypasses_nullcheck_on_instance', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002142 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='GreaterThan_should_create_GreaterThanValidator_with_explicit_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002587 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Invalid_email_addressex_regex(email: ' + '"thisisaverylongstringcodeplex.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.16e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='WithMessage_should_override_error_message', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000159 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Expected_error_code_check', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005874 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MessageFormatterTests', + test_name='Understands_numeric_formats', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001847 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Should_override_property_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004755 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MessageFormatterTests', + test_name='Adds_formatted_argument_and_custom_arguments', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.08e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Nested_conditions_with_Custom_rule', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004756 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomValidatorTester', + test_name='New_Custom_When_property_name_omitted_infers_property_name_nested', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006318 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorSelectorTests', + test_name='Validates_property_using_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0013178 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_throw_when_there_are_not_validati' + 'on_errors__WhenAsyn_is_used(age: 17, cardNumber: "cardNumber")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005838 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Extracts_property_from_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005257 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Validates_collection_several_levels_deep_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0010832 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_Failure_when_set_to_Continue_globally_and_o' + 'verriden_at_rule_level_and_async_validator_is_invoked_synchronously_l' + 'egacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005077 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEqualValidatorTests', + test_name='Should_use_ordinal_comparison_by_default', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002273 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AssemblyScannerTester', + test_name='Finds_validators_for_types', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002381 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Falls_back_to_parent_culture', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004487 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.OnFailureTests', + test_name='ShouldHaveChildValidator_should_be_true', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012439 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.OnFailureTests', + test_name='WhenAsyncWithOnFailure_should_invoke_condition_on_inner_validator_inv' + 'oked_synchronously', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003084 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_failure_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001809 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.StringEnumValidatorTests', + test_name='When_the_property_is_initialized_with_empty_string_then_the_validator' + '_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001593 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AssemblyScannerTester', + test_name='ForEach_iterates_over_types', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0018574 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Gets_translation_for_serbian_culture(cultureName: "sr-Latn-RS")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Nullable_object_with_condition_should_not_throw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001088 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_text_is_smaller_than_the_range_then_the_validator_should_fai' + 'l', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004057 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AccessorCacheTests', + test_name='Equality_comparison_check', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0021363 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_regex(email: "\\"Joe\\\\Blow\\"@example.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.44e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Property_should_return_null_when_it_is_not_a_property_being_validated', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003637 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Expected_severity_check', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0046721 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Can_access_parent_index', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0016135 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_throw_if_overriding_validator_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001488 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_text_is_between_the_range_specified_then_the_validator_shoul' + 'd_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002179 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanOrEqualToValidatorTester', + test_name='Should_succeed_when_less_than_input', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.2e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Validation_should_succeed_when_async_condition_matches', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0015482 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomMessageFormatTester', + test_name='Should_format_custom_message', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002244 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Should_allow_normal_rules_and_complex_property_on_same_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007508 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_throw_when_there_are_not_validati' + 'on_errors__WhenAsyn_is_used(age: 17, cardNumber: null)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005804 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Collection_should_be_excluded', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000544 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='LessThanOrEqual_should_create_LessThanOrEqualValidator_with_explicit_' + 'value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006133 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Executes_rules_in_default_ruleset_and_specific_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004761 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Includes_all_rulesets', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008384 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Complex_validator_should_not_be_invoked_on_null_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001677 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_not_throw_when_there_are_not_vali' + 'dation_errors_ruleforeach', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008012 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainedValidationTester', + test_name='Condition_should_work_with_chained_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002054 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Async_condition_executed_synchronosuly_with_synchronous_collection_ro' + 'le', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003216 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveChildValidator_works_on_model_level_rules', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0013494 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='CanValidateInstancesOfType_returns_false_when_comparing_against_some_' + 'other_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.13e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='To_and_from_properties_should_be_set_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.7e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Collection_should_be_excluded', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000525 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Ruleset_cascades_to_child_collection_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008193 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Skips_null_items', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004786 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Validates_with_nullable_when_property_is_null_cross_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000466 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.TransformTests', + test_name='Transforms_collection_element_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012982 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='WhenPreValidationReturnsFalse_ResultReturnToUserImmediatly_Validate(p' + 'reValidationResult: )', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009423 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_to_is_smaller_than_the_from_then_the_validator_should_throw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006236 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Cascade_mode_can_be_set_after_validator_instantiated_async_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0013079 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='CanValidateInstancesOfType_returns_true_when_comparing_against_subcla' + 'ss', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.66e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_maxlength_validator_fails_the_error_message_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000356 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_with_an_unmatched_rule_and_multiple_errors_' + 'should_throw_an_exception', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0017809 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_when_set_to_Continue_at_validator_level_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0010564 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Unexpected_state_check', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009751 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_aspnetcore_compatible(email: ' + '"!#$%&\'*+-/=?^_`|~@someDomain.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.74e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='Validates_with_nullable_when_property_not_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004472 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Validation_should_succeed_when_condition_matches', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004142 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='WithMessage_works_inside_rulesets', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001237 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='NotNull_should_create_NotNullValidator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002713 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='MustAsync_should_create_AsyncPredicateValidator_with_PropertyValidato' + 'rContext', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0016185 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='WithName_should_override_field_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002032 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_on_failure_when_set_to_Stop_globally_and_overrid' + 'en_at_rule_level', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000223 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainingValidatorsTester', + test_name='Options_should_only_apply_to_current_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002348 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_aspnetcore_compatible(email: ' + '"someName@some~domain.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.66e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotNullTester', + test_name='Not_null_validator_should_not_crash_with_non_nullable_value_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002735 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Shared_When_is_not_applied_to_grouped_rules_when_initial_predicate_is' + '_false', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001183 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_throw_if_property_name_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002522 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanValidatorTester', + test_name='Validates_with_nullable_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005936 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='ToString_provides_error_details', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0117391 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Ruleset_selection_should_cascade_downwards_with_when_setting_child_va' + 'lidator_using_include_statement_with_lambda', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000822 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Uses_named_parameters_to_validate_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003616 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEmptyTester', + test_name='When_validation_fails_error_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002777 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Sync_condition_is_applied_to_async_validators', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006349 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_aspnetcore_compatible(email: ' + '"someName@someDomain\uffef.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.57e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EqualValidatorTests', + test_name='Should_store_property_to_compare', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003283 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Gets_translation_for_bosnian_latin_culture(cultureName: "bs-Latn")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.27e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='TestValidate_runs_async_throws', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000854 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.OnFailureTests', + test_name='WhenWithOnFailure_should_invoke_condition_on_inner_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002855 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Nested_conditions_with_CustomAsync_rule', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0027533 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainedValidationTester', + test_name='Can_validate_using_validator_for_base_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0014496 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEmptyTester', + test_name='When_value_is_empty_string_validator_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002511 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_aspnetcore_compatible(email: ' + '"\\"firstName.lastName\\"@someDomain.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.67e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Expected_state_check', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008908 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_text_is_exactly_the_size_of_the_upper_bound_then_the_validat' + 'or_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000329 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='Does_not_throw_when_valid', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002097 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Overrides_indexer_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011101 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveChildValidator_should_not_throw_when_property_has_collectio' + 'n_validators', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005756 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ScalePrecisionValidatorTests', + test_name='Scale_precision_should_be_valid_when_ignoring_trailing_zeroes', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0015273 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='All_languages_should_be_loaded', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0013878 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='When_the_validators_fail_then_validatorrunner_should_return_false', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001522 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_throw_when_async_predicate_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001555 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEmptyTester', + test_name='When_there_is_a_value_then_the_validator_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002293 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='To_and_from_properties_should_be_set_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.02e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Should_fail_when_equal_to_input', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidationResultTests', + test_name='Can_serialize_failure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.95e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Unexpected_severity_check', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009091 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_on_failure_when_set_to_StopOnFirstFailure_at_val' + 'idator_level_and_overriden_at_rule_level_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002332 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='When_the_text_is_empty_then_the_validator_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001049 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.TransformTests', + test_name='Transforms_property_value_to_another_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004268 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanOrEqualToValidatorTester', + test_name='Validates_with_nullable_when_property_not_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002607 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='LessThan_should_create_LessThanValidator_with_lambda', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005323 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorSelectorTests', + test_name='Can_use_property_with_include', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009319 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Outer_async_Unless_clause_will_trump_an_inner_Unless_clause_when_inne' + 'r_fails_but_the_outer_is_satisfied', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004874 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_regex(email: "__somename@example.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.19e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NullTester', + test_name='When_the_validator_passes_the_error_message_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002424 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Invalid_email_addressex_regex(email: "first.last@test..co.uk")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.34e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Shared_async_When_respects_the_smaller_scope_of_an_inner_Unless_when_' + 'the_inner_Unless_predicate_is_satisfied', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004845 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.TrackingCollectionTests', + test_name='Add_AddsItem', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006937 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorSelectorTests', + test_name='Executes_correct_rule_when_using_property_with_include', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001118 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AccessorCacheTests', + test_name='Gets_member_for_nested_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001084 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Async_condition_should_work_with_complex_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0023119 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.PropertyChainTests', + test_name='Should_ignore_blanks', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.72e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_correctly_handle_explicitly_provi' + 'ding_object_to_validate_and_other_property_fails_validation', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0014272 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.OnFailureTests', + test_name='OnFailure_called_for_each_failed_rule_asyncAsync', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0018854 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_regex(email: "\\"Abc@def\\"@example.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.45e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Can_access_parent_index_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011019 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.OnFailureTests', + test_name='Should_be_able_to_access_error_message_in_OnFailure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005239 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Always_use_specific_language_with_string_source', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003219 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Validates_with_nullable_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005793 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Overrides_indexer', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012315 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_Should_support_nested_properties', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008523 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.TrackingCollectionTests', + test_name='When_Item_Added_Raises_ItemAdded', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008476 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_compile_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001225 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.UserStateTester', + test_name='Can_Provide_state_for_item_in_collection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001113 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_regex(email: "testperson@gmail.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0163384 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomFailureActionTester', + test_name='Invokes_custom_action_on_failure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002284 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Condition_is_applied_to_single_validator_in_the_chain_when_ApplyCondi' + 'tionTo_set_to_CurrentValidator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003365 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Explicitly_included_properties_should_be_propagated_to_nested_validat' + 'ors', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0023437 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EqualValidatorTests', + test_name='Should_store_comparison_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003359 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.StringEnumValidatorTests', + test_name='IsValidTests_CaseInsensitive_CaseCorrect', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001096 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExactLengthValidatorTester', + test_name='When_the_text_length_is_smaller_the_validator_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004689 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEmptyTester', + test_name='Fails_for_ienumerable_that_doesnt_implement_ICollection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012461 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEqualValidatorTests', + test_name='Should_not_be_valid_for_case_insensitve_comparison', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004358 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InheritanceValidatorTest', + test_name='Validates_with_callback_accepting_derived', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006007 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Nullable_object_with_async_condition_should_not_throw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005033 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEqualValidatorTests', + test_name='Comparison_property_uses_custom_resolver', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004998 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_have_validation_error_details_whe' + 'n_thrown_ruleforeach', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012661 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEqualValidatorTests', + test_name='Validates_across_properties', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003172 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NameResolutionPluggabilityTester', + test_name='ShouldHaveValidationError_Should_support_custom_propertynameresolver', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007316 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Outer_Unless_clause_will_trump_an_inner_Unless_clause_when_inner_fail' + 's_but_the_outer_is_satisfied', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001804 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InlineValidatorTester', + test_name='Uses_inline_validator_to_build_rules', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001679 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanOrEqualToValidatorTester', + test_name='Validates_with_nullable_when_property_is_null_cross_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004822 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.UserSeverityTester', + test_name='Should_use_last_supplied_severity', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006834 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Can_use_indexer_in_string_message', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000562 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Runs_otherwise_conditions_for_UnlessAsync', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0041051 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidationResultTests', + test_name='Should_add_errors', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.17e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveChildvalidator_throws_when_collection_property_Does_not_hav' + 'e_child_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003538 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='OverridePropertyName_with_lambda_should_override_property_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003832 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Executes_all_rules', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007106 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Calling_ValidateAsync_should_delegate_to_underlying_sync_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0735933 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Does_not_execute_customasync_Rule_when_async_condition_false', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008363 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Expected_message_check', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006922 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEmptyTester', + test_name='Fails_for_array', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000465 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ScalePrecisionValidatorTests', + test_name='Scale_precision_should_be_valid', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003178 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorDescriptorTester', + test_name='Gets_validators_for_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001789 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomMessageFormatTester', + test_name='Uses_custom_delegate_for_building_message_only_for_specific_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003785 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainedValidationTester', + test_name='Explicitly_included_properties_should_be_propagated_to_nested_validat' + 'ors_using_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001753 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Correctly_gets_collection_indices_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000699 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_to_second_validator_when_first_validator_succeed' + 's_and_cascade_set_to_stop_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002155 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_text_is_larger_than_the_lambda_range_then_the_validator_shou' + 'ld_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003557 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InheritanceValidatorTest', + test_name='Validates_inheritance_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012652 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomValidatorTester', + test_name='New_custom_uses_empty_property_name_for_model_level_rule', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002515 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Comparison_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.94e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LocalisedMessagesTester', + test_name='Correctly_assigns_default_localized_error_message', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011647 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Gets_translation_for_specific_culture', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.89e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_text_is_larger_than_the_range_then_the_validator_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005079 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.PropertyChainTests', + test_name='Creates_from_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001276 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExactLengthValidatorTester', + test_name='Min_and_max_properties_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.07e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='Populates_errors', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003478 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='OverridePropertyName_should_override_property_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001676 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ScalePrecisionValidatorTests', + test_name='Scale_precision_should_not_be_valid_when_ignoring_trailing_zeroes', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004372 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LocalisedNameTester', + test_name='Uses_localized_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003024 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.StringEnumValidatorTests', + test_name='IsValidTests_CaseSensitive_CaseIncorrect', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001363 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Executes_customasync_rule_when_condition_true', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0014208 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='MustAsync_should_not_throw_InvalidCastException', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0104516 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanOrEqualToValidatorTester', + test_name='Comparison_property_uses_custom_resolver', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0010333 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Shared_async_When_is_not_applied_to_grouped_rules_when_initial_predic' + 'ate_is_false', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004089 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Unexpected_message_check(withoutErrMsg: "bar", errMessages: ["foo", ' + '"bar"])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001797 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Does_not_execute_custom_Rule_when_async_condition_false', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000904 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='LessThanOrEqual_should_create_LessThanOrEqualValidator_with_lambda', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005095 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmptyTester', + test_name='When_there_is_a_value_then_the_validator_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001989 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_to_is_smaller_than_the_from_then_the_validator_should_throw_' + 'for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001329 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_model_async_throws', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011585 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Gets_translation_for_bosnian_latin_culture(cultureName: "bs")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000661 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Condition_should_work_with_child_collection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007357 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_value_is_between_the_range_specified_then_the_validator_shou' + 'ld_pass_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002261 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_value_is_exactly_the_size_of_the_upper_bound_then_the_valida' + 'tor_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002263 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='PropertyDescription_should_return_custom_property_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006558 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExtensionTester', + test_name='Should_return_null_for_non_member_expressions', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001368 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Property_should_return_property_being_validated', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.16e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Condition_should_work_with_child_collection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003728 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Validation_should_succeed_when_condition_does_not_match', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002873 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MessageFormatterTests', + test_name='Adds_PropertyName_to_message', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.79e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_text_is_exactly_the_size_of_the_lower_bound_then_the_validat' + 'or_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002191 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_aspnetcore_compatible(email: ' + '"firstName.lastName@someDomain.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.91e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_failure_when_set_to_Continue_and_overriden_at_rul' + 'e_level', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002008 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_text_is_exactly_the_size_of_the_lambda_lower_bound_then_the_' + 'validator_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003377 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_value_is_exactly_the_size_of_the_upper_bound_then_the_valida' + 'tor_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002097 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Fails_email_validation_aspnetcore_compatible(email: ' + '"someName@a@b.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.27e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_text_is_larger_than_the_range_then_the_validator_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002468 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='Dependent_rules_inside_when', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.00114 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Rule_for_a_non_memberexpression_should_not_generate_property_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003533 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Shared_async_When_is_applied_to_grouped_rules_when_initial_predicate_' + 'is_true', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000946 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_to_second_validator_when_first_validator_succeed' + 's_and_cascade_set_to_stop_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000987 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Ruleset_selection_should_not_cascade_downwards_when_set_on_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009641 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_value_is_exactly_the_size_of_the_lower_bound_then_the_valida' + 'tor_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002839 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainedValidationTester', + test_name='Separate_validation_on_chained_property_valid', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002025 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='WhenPreValidationReturnsTrue_ValidatorsGetHit_ValidateAsync', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0015404 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHAveValidationError_should_not_throw_When_there_are_no_error' + 's_with_preconstructed_object', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000721 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_value_is_exactly_the_size_of_the_lower_bound_then_the_valida' + 'tor_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001963 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_on_failure_when_set_to_StopOnFirstFailure_at_val' + 'idator_level_and_overriden_at_rule_level', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001937 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MessageFormatterTests', + test_name='Adds_value_to_message', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.83e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='TestValidate_runs_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007477 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_Failure_when_set_to_Continue_globally_and_o' + 'verriden_at_rule_level', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000188 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Condition_is_applied_to_all_validators_in_the_chain', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003098 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanValidatorTester', + test_name='Validates_with_nullable_when_property_not_null_cross_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005453 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Shared_When_is_applied_to_grouped_rules_when_initial_predicate_is_tru' + 'e', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001524 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='Dependent_rules_inside_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004097 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_value_is_exactly_the_size_of_the_upper_bound_then_the_valida' + 'tor_should_pass_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001956 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomValidatorTester', + test_name='Perserves_property_chain_using_custom', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008351 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Unexpected_message_check(withoutErrMsg: "bar", errMessages: [])', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007346 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='Empty_should_create_EmptyValidator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002993 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.OnFailureTests', + test_name='ShouldHaveChildValidator_works_with_Include', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002922 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='ValidationException_provides_correct_message_when_appendDefaultMessag' + 'e_false', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001514 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExactLengthValidatorTester', + test_name='When_the_text_is_an_exact_length_the_validator_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='ValidationException_provides_correct_message_when_appendDefaultMessag' + 'e_true', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.92e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='PropertyDescription_should_return_property_name_split', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004031 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Should_fail_when_less_than_input', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003948 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomValidatorTester', + test_name='New_Custom_within_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005423 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.StringEnumValidatorTests', + test_name='IsValidTests_CaseInsensitive_CaseIncorrect', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002316 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='RuleForeach_with_null_instances', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0019478 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='When_the_validators_fail_then_the_errors_Should_be_accessible_via_the' + '_errors_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.00017 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_set_custom_error', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001428 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_on_failure_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0010423 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='Nested_dependent_rules_inside_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005315 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='Invokes_dependent_rule_if_parent_rule_passes', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003376 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Should_work_with_top_level_collection_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007124 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Validates_with_nullable_when_property_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003426 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_regex(email: "$A12345@example.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.26e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='Must_should_create_PredicateValidator_with_PropertyValidatorContext', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0042745 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MemberAccessorTests', + test_name='SimpleFieldSet', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004092 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanValidatorTester', + test_name='Validates_with_nullable_when_property_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005007 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.UserSeverityTester', + test_name='Can_Provide_severity_for_item_in_collection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007448 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='NotEqual_should_create_NotEqualValidator_with_explicit_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003685 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_set_custom_property_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003705 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanOrEqualToValidatorTester', + test_name='Validates_with_nullable_when_property_not_null_cross_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000283 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='Validates_with_nullable_when_property_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002167 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_text_is_larger_than_the_range_then_the_validator_should_fail' + '_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002392 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ScalePrecisionValidatorTests', + test_name='Scale_precision_should_be_valid_when_they_are_equal', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000383 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='Throws_exception_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004271 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_value_is_exactly_the_size_of_the_lower_bound_then_the_valida' + 'tor_should_fail_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009095 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Includes_all_rulesets_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0010633 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EqualValidatorTests', + test_name='Should_succeed_on_case_insensitive_comparison', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002418 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveChildValidator_should_throw_when_property_has_a_different_c' + 'hild_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005867 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChildRulesTests', + test_name='Can_define_nested_rules_for_collection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0016096 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Should_not_scramble_property_name_when_using_collection_validators_se' + 'veral_levels_deep', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003437 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_failure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001706 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Can_specify_condition_for_individual_collection_elements', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004461 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.PredicateValidatorTester', + test_name='Should_fail_when_predicate_returns_false', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.17e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EnumValidatorTests', + test_name='Flags_enum_valid_when_using_bitwise_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0192413 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainedValidationTester', + test_name='Explicitly_included_properties_should_be_propagated_to_nested_validat' + 'ors', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002691 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationErrorFor_takes_account_of_rulesets', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007521 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Combines_rulesets_and_explicit_properties', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000997 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Validates_against_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012022 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveChildValidator_should_not_throw_when_property_Does_have_chi' + 'ld_validator_and_expecting_a_basetype', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004634 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Can_replace_message', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002451 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EqualValidatorTests', + test_name='Comparison_property_uses_custom_resolver', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003829 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EqualValidatorTests', + test_name='Validates_against_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002933 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_validator_fails_the_error_message_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0113406 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Should_validate_single_Field', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002444 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Runs_otherwise_conditons_for_Unless', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008404 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Fails_email_validation_aspnetcore_compatible(email: "someName")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.3e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MemberAccessorTests', + test_name='Equality', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0010753 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MemberAccessorTests', + test_name='SimplePropertySet', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004415 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmptyTester', + test_name='When_value_is_null_validator_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002377 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Can_access_colletion_index_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007781 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainedValidationTester', + test_name='Chained_validator_should_not_be_invoked_on_null_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000121 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InheritanceValidatorTest', + test_name='Validates_with_callback_accepting_derived_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0048985 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Can_directly_validate_multiple_fields_of_same_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0034497 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_text_is_larger_than_the_range_then_the_validator_should_fail' + '_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002377 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_aspnetcore_compatible(email: ' + '"1234@someDomain.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.07e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorSelectorTests', + test_name='Does_not_validate_other_property_using_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003211 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.StringEnumValidatorTests', + test_name='When_validation_fails_the_default_error_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001968 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InheritanceValidatorTest', + test_name='Validates_ruleset_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0016248 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InheritanceValidatorTest', + test_name='Validates_collection_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0040405 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationErrorFor_takes_account_of_rulesets_fluent_approac' + 'h', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007776 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainingValidatorsTester', + test_name='Should_create_multiple_validators', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003311 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_throw_when_there_are_errors_with_' + 'preconstructed_object', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0023915 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_aspnetcore_compatible(email: ' + '"someName@1234.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.58e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.UserSeverityTester', + test_name='Defaults_user_severity_to_error', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001704 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.OnFailureTests', + test_name='OnFailure_called_for_each_failed_rule', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000567 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InheritanceValidatorTest', + test_name='Validates_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009577 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='RuleSet_can_be_used_inside_async_condition', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011754 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='Equal_should_create_EqualValidator_with_explicit_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003343 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MessageFormatterTests', + test_name='Adds_argument_and_custom_arguments', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.99e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Validates_collection_asynchronously', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0014053 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='Uses_regex_object', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004951 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_throw_if_message_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001371 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='When_condition_executed_for_each_instance_of_RuleForEach_condition_sh' + 'ould_not_be_cached', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006033 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Uses_error_code_as_localization_key', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003393 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='When_the_text_is_null_then_the_validator_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.28e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_validator_fails_the_error_message_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002354 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='GreaterThanOrEqual_should_create_GreaterThanOrEqualValidator_with_lam' + 'bda_with_other_Nullable', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0019589 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MemberAccessorTests', + test_name='Name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004323 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmptyTester', + test_name='When_value_is_Default_for_type_validator_should_pass_int', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003963 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_regex(email: "TestPerson@gmail.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=8.05e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='Throws_exception', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004234 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_should_throw_when_there_are_not_validation_' + 'errors__WhenAsyn_Is_Used(age: 17, cardNumber: "cardNumber")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006412 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.StringEnumValidatorTests', + test_name='IsValidTests_CaseSensitive_CaseCorrect', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012556 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='Can_access_expression_in_message_lambda', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002801 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_aspnetcore_compatible(email: "\xa0' + '@someDomain.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.73e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='Async_inside_dependent_rules_when_parent_rule_not_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.9925303 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='WhenPreValidationReturnsFalse_ResultReturnToUserImmediatly_Validate(p' + 'reValidationResult: AnotherInt Test Message)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.28e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Adding_a_validator_should_return_builder', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.14e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Validates_nullable_with_nullable_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003854 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='When_the_Validators_pass_then_the_validatorRunner_should_return_true', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001471 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='When_the_text_matches_the_lambda_regex_regular_expression_then_the_va' + 'lidator_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001215 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MessageFormatterTests', + test_name='Understands_date_formats', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001748 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.MemberAccessorTests', + test_name='ComplexPropertyGet', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009033 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_regex(email: "\\"Fred Bloggs\\"@example.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.58e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='Length_should_create_LengthValidator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002207 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Creates_validator_using_context_from_property_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011184 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExtensionTester', + test_name='SplitPascalCase_should_return_null_when_input_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.14e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExtensionTester', + test_name='Should_split_pascal_cased_member_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006315 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LocalisedMessagesTester', + test_name='Formats_string_with_placeholders_when_you_cant_edit_the_string', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002285 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.UserStateTester', + test_name='Stores_user_state_against_validation_failure', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011142 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ChainedValidationTester', + test_name='Chained_validator_descriptor', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002971 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='RuleForEach_async_RunsTasksSynchronously', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0236497 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorSelectorTests', + test_name='MemberNameValidatorSelector_returns_true_when_property_name_matches', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002975 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Condition_should_work_with_complex_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004359 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.TransformTests', + test_name='Transforms_property_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003746 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_throw_when_there_are_validation_e' + 'rrors_ruleforeach', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009397 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Should_validate_public_Field', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003435 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveChildValidator_should_not_throw_when_property_Does_have_chi' + 'ld_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0022892 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Can_validate_collection_using_validator_for_base_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004768 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Comparison_type', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002012 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_async_throws', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001366 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Shared_async_When_respects_the_smaller_scope_of_a_inner_Unless_when_t' + 'he_inner_Unless_predicate_fails', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0029811 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CollectionValidatorWithParentTests', + test_name='Collection_should_be_explicitly_included_with_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006578 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_failure_when_set_to_StopOnFirstFailure_at_v' + 'alidator_level', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002738 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanOrEqualToValidatorTester', + test_name='Should_succeed_when_equal_to_input', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.26e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_should_correctly_handle_explicitly_provi' + 'ding_object_to_validate', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0031438 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='MustAsync_should_create_AsyncPredicateValidator_with_context', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001972 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Async_condition_is_applied_to_all_validators_in_the_chain_when_execut' + 'ed_synchronously', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005363 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_regex(email: "testperson+label@gmail.com")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.75e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomValidatorTester', + test_name='New_CustomAsync_within_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008872 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Executes_rule_for_each_item_in_collection', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003186 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Cascade_mode_can_be_set_after_validator_instantiated', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001778 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_failure_async_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008751 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomValidatorTester', + test_name='Runs_async_rule_synchronously_when_validator_invoked_synchronously', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006956 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_Failure_when_set_to_Continue_globally_and_o' + 'verriden_at_rule_level_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.00023 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Condition_can_be_used_inside_ruleset', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005964 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='Does_not_throw_when_valid_and_a_ruleset_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003892 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanValidatorTester', + test_name='Comparison_property_uses_custom_resolver', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004008 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Fails_email_validation_aspnetcore_compatible(email: "")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.14e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldNotHaveValidationError_async_throws', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0022945 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_on_failure_when_set_to_Stop_globally_and_overrid' + 'en_at_rule_level_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011028 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Uses_useful_error_message_when_used_on_non_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004356 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_to_is_smaller_than_the_from_then_the_validator_should_throw_' + 'for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001562 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidateAndThrowTester', + test_name='Throws_exception_with_a_ruleset_async', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0011925 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EqualValidatorTests', + test_name='Should_succeed_on_case_insensitive_comparison_using_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002538 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='When_runs_outside_RuleForEach_loop', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004935 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Validates_with_nullable_when_property_not_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002465 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ForEachRuleTests', + test_name='Should_override_property_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003821 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmptyTester', + test_name='When_value_is_empty_string_validator_should_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002019 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_throw_if_overriding_validator_provider_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001555 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='Equal_should_create_EqualValidator_with_lambda', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006079 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotNullTester', + test_name='When_the_validator_fails_the_error_message_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002246 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Shared_async_When_is_applied_to_groupd_rules_when_initial_predicate_i' + 's_true_and_all_individual_rules_are_satisfied', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004461 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_build_property_name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.59e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CreditCardValidatorTests', + test_name='When_validation_fails_the_default_error_should_be_set', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0017932 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_to_is_smaller_than_the_from_then_the_validator_should_throw', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003029 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='ScalePrecision_should_create_ScalePrecisionValidator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0020009 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidationResultTests', + test_name='ToString_return_error_messages_with_newline_as_separator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003538 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Validates_child_validator_asynchronously', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0037362 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LengthValidatorTests', + test_name='When_the_text_is_smaller_than_the_lambda_range_then_the_validator_sho' + 'uld_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003612 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Rules_not_invoked_when_inverse_shared_condition_does_not_match', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0009262 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_value_is_between_the_range_specified_then_the_validator_shou' + 'ld_pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003572 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ConditionTests', + test_name='Validation_should_fail_when_async_condition_matches', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007917 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Can_use_indexer_in_string_message_inverse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000553 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExtensionTester', + test_name='Should_extract_member_from_member_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000113 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ComplexValidationTester', + test_name='Explicitly_included_properties_should_be_propagated_to_nested_validat' + 'ors_using_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008307 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Validates_with_nullable_when_property_not_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002586 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorSelectorTests', + test_name='Does_not_validate_other_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0004189 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleBuilderTests', + test_name='Should_throw_if_validator_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001458 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AccessorCacheTests', + test_name='Gets_accessor', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003872 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_continues_when_set_to_Continue_at_validator_level', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002021 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RegularExpressionValidatorTests', + test_name='Uses_lazily_loaded_expression', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003176 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanValidatorTester', + test_name='Validates_with_nullable_when_property_is_null_cross_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002669 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LessThanValidatorTester', + test_name='Validates_nullable_with_nullable_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000773 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InclusiveBetweenValidatorTests', + test_name='When_the_value_is_between_the_range_specified_then_the_validator_shou' + 'ld_pass_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.00022 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.InheritanceValidatorTest', + test_name='Validates_with_callback', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0012889 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.OnFailureTests', + test_name='WhenWithOnFailure_should_invoke_condition_on_async_inner_validator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000634 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Should_succeed_when_equal_to_input', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.8e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='ScalePrecision_should_create_ScalePrecisionValidator_with_ignore_trai' + 'ling_zeros', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002146 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.LanguageManagerTests', + test_name='Always_use_specific_language', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=6.66e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.SharedConditionTests', + test_name='Does_not_execute_custom_Rule_when_condition_false', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008838 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='ShouldHaveValidationError_preconstructed_object_does_not_throw_for_un' + 'writable_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008894 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorTesterTester', + test_name='Model_level_check_fails_if_no_model_level_failures', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0005599 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='Should_throw_when_rule_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001178 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.AbstractValidatorTester', + test_name='WhenPreValidationReturnsFalse_ResultReturnToUserImmediatly_ValidateAs' + 'ync(preValidationResult: AnotherInt Test Message)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=5.78e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Valid_email_addresses_regex(email: "first.last@test.co.uk")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=2.26e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RuleDependencyTests', + test_name='Nested_dependent_rules_inside_ruleset_inside_method', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0007514 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.RulesetTests', + test_name='Executes_multiple_rulesets', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0017933 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NullTester', + test_name='Passes_when_nullable_value_type_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002748 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ValidatorSelectorTests', + test_name='Includes_nested_property', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0006524 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.PredicateValidatorTester', + test_name='Should_succeed_when_predicate_returns_true', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.66e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NotEmptyTester', + test_name='When_value_is_whitespace_validation_should_fail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002178 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.ExclusiveBetweenValidatorTests', + test_name='When_the_validator_fails_the_error_message_should_be_set_for_strings', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002668 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.NullTester', + test_name='NullValidator_should_fail_if_value_has_value', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002075 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CustomFailureActionTester', + test_name='Passes_object_being_validated_to_action', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.000852 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.DefaultValidatorExtensionTester', + test_name='NotEmpty_should_create_NotEmptyValidator', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002803 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.CascadingFailuresTester', + test_name='Validation_stops_on_first_failure_when_set_to_StopOnFirstFailure_at_v' + 'alidator_level_legacy', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002253 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.EmailValidatorTests', + test_name='Invalid_email_addressex_regex(email: "")', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0003859 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/FluentValidation.Tests.trx', + test_file=None, + line=None, + class_name='FluentValidation.Tests.GreaterThanOrEqualToValidatorTester', + test_name='Validates_with_nullable_when_property_is_null', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0002299 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/trx/nunit/FluentValidation.Tests.trx b/python/test/files/trx/nunit/FluentValidation.Tests.trx new file mode 100644 index 0000000..4ac6323 --- /dev/null +++ b/python/test/files/trx/nunit/FluentValidation.Tests.trx @@ -0,0 +1,4662 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + [xUnit.net 00:00:00.2837788] Discovering: FluentValidation.Tests +[xUnit.net 00:00:00.5914704] Discovered: FluentValidation.Tests +[xUnit.net 00:00:00.7008540] Starting: FluentValidation.Tests +[xUnit.net 00:00:00.8623084] Manual benchmark +Test 'FluentValidation.Tests.AccessorCacheTests.Benchmark' was skipped in the test run. +[xUnit.net 00:00:04.0635438] Finished: FluentValidation.Tests + + + + + [xUnit.net 00:00:00.8621784] FluentValidation.Tests.AccessorCacheTests.Benchmark [SKIP] + + + + \ No newline at end of file diff --git a/python/test/files/trx/nunit/NUnit-net461-sample.annotations b/python/test/files/trx/nunit/NUnit-net461-sample.annotations new file mode 100644 index 0000000..34311de --- /dev/null +++ b/python/test/files/trx/nunit/NUnit-net461-sample.annotations @@ -0,0 +1,100 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '3 fail, 1 skipped, 2 pass in 0s', + 'summary': + '6 tests\u2002\u2003\u20032 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20031 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20033 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MOw6AIBAFr' + '0KoLfwkFl7GEJS4UcEsUBnv7spH6N7MZufmCo7V8ol1DePWg/th8SgcGE3YEtLBfacx79' + 'l6KUn0RexwpecolICDxPCLFdFgCqLXuffNOhe51AJXscB1S5rzBEeQFrOb4M8LVJ41VN0' + 'AAAA=\n', + 'annotations': [ + { + 'path': 'SampleProject.NUnit.TestServiceTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit/NUnit-net461-sample.trx\u2003[took 0s]', + 'title': 'TestTheory(False) (SampleProject.NUnit.TestServiceTests) failed', + 'raw_details': + ' Expected: False\r\n But was: True\r\n at ' + 'SampleProject.NUnit.TestServiceTests.TestTheory(Boolean expected) ' + 'in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.NUn' + 'it\\TestServiceTests.cs:line 25' + }, + { + 'path': 'SampleProject.NUnit.TestServiceTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit/NUnit-net461-sample.trx\u2003[took 0s]', + 'title': 'TestThrowingException (SampleProject.NUnit.TestServiceTests) failed', + 'raw_details': + 'System.Exception : Pretty good exception at ' + 'SampleProject.TestService.GetException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject\\TestServi' + 'ce.cs:line 19\r\n at ' + 'SampleProject.NUnit.TestServiceTests.TestThrowingException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.NUn' + 'it\\TestServiceTests.cs:line 50' + }, + { + 'path': 'SampleProject.NUnit.TestServiceTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit/NUnit-net461-sample.trx\u2003[took 0s]', + 'title': 'FailTest (SampleProject.NUnit.TestServiceTests) failed', + 'raw_details': + ' Expected: True\r\n But was: False\r\n at ' + 'SampleProject.NUnit.TestServiceTests.FailTest() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.NUn' + 'it\\TestServiceTests.cs:line 61' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'SampleProject.NUnit.TestServiceTests ‑ SkipTest' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 6 tests, see "Raw output" for the full list of tests.', + 'title': '6 tests found', + 'raw_details': + 'SampleProject.NUnit.TestServiceTests ‑ FailTest\n' + 'SampleProject.NUnit.TestServiceTests ‑ PassingTest\n' + 'SampleProject.NUnit.TestServiceTests ‑ SkipTest\n' + 'SampleProject.NUnit.TestServiceTests ‑ TestTheory(False)\n' + 'SampleProject.NUnit.TestServiceTests ‑ TestTheory(True)\n' + 'SampleProject.NUnit.TestServiceTests ‑ TestThrowingException' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/trx/nunit/NUnit-net461-sample.junit-xml b/python/test/files/trx/nunit/NUnit-net461-sample.junit-xml new file mode 100644 index 0000000..d3120e5 --- /dev/null +++ b/python/test/files/trx/nunit/NUnit-net461-sample.junit-xml @@ -0,0 +1,26 @@ + + + + + Expected: False + But was: True + at SampleProject.NUnit.TestServiceTests.TestTheory(Boolean expected) in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\TestServiceTests.cs:line 25 + + + + Ignore test + + + + System.Exception : Pretty good exception at SampleProject.TestService.GetException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject\TestService.cs:line 19 + at SampleProject.NUnit.TestServiceTests.TestThrowingException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\TestServiceTests.cs:line 50 + + + Expected: True + But was: False + at SampleProject.NUnit.TestServiceTests.FailTest() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\TestServiceTests.cs:line 61 + + + + + diff --git a/python/test/files/trx/nunit/NUnit-net461-sample.results b/python/test/files/trx/nunit/NUnit-net461-sample.results new file mode 100644 index 0000000..e0fe6fe --- /dev/null +++ b/python/test/files/trx/nunit/NUnit-net461-sample.results @@ -0,0 +1,113 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=6, + suite_skipped=1, + suite_failures=3, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='MSTestSuite', + tests=6, + skipped=1, + failures=3, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit/NUnit-net461-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.NUnit.TestServiceTests', + test_name='TestTheory(False)', + result='failure', + message=' Expected: False\r\n But was: True\r\n', + content=' Expected: False\r\n But was: True\r\n at ' + 'SampleProject.NUnit.TestServiceTests.TestTheory(Boolean expected) in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.NUnit' + '\\TestServiceTests.cs:line 25\r\n', + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/NUnit-net461-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.NUnit.TestServiceTests', + test_name='SkipTest', + result='skipped', + message='Ignore test', + content='Ignore test', + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/NUnit-net461-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.NUnit.TestServiceTests', + test_name='TestTheory(True)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.018 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/NUnit-net461-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.NUnit.TestServiceTests', + test_name='TestThrowingException', + result='failure', + message='System.Exception : Pretty good exception', + content='System.Exception : Pretty good exception at ' + 'SampleProject.TestService.GetException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject\\TestService' + '.cs:line 19\r\n at ' + 'SampleProject.NUnit.TestServiceTests.TestThrowingException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.NUnit' + '\\TestServiceTests.cs:line 50', + stdout=None, + stderr=None, + time=0.005 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/NUnit-net461-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.NUnit.TestServiceTests', + test_name='FailTest', + result='failure', + message=' Expected: True\r\n But was: False\r\n', + content=' Expected: True\r\n But was: False\r\n at ' + 'SampleProject.NUnit.TestServiceTests.FailTest() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.NUnit' + '\\TestServiceTests.cs:line 61\r\n', + stdout=None, + stderr=None, + time=0.111 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/NUnit-net461-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.NUnit.TestServiceTests', + test_name='PassingTest', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/trx/nunit/NUnit-net461-sample.trx b/python/test/files/trx/nunit/NUnit-net461-sample.trx new file mode 100644 index 0000000..64ed93b --- /dev/null +++ b/python/test/files/trx/nunit/NUnit-net461-sample.trx @@ -0,0 +1,123 @@ + + + + + + + + + + Running SampleProject.Tests.NUnit tests + + Expected: False + But was: True + + at SampleProject.NUnit.TestServiceTests.TestTheory(Boolean expected) in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\TestServiceTests.cs:line 25 + + + + + + + Ignore test + + Ignore test + + + + + + Running SampleProject.Tests.NUnit tests + + + + + Running SampleProject.Tests.NUnit tests + + System.Exception : Pretty good exception + at SampleProject.TestService.GetException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject\TestService.cs:line 19 + at SampleProject.NUnit.TestServiceTests.TestThrowingException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\TestServiceTests.cs:line 50 + + + + + + Running SampleProject.Tests.NUnit tests +This test will fail + + Expected: True + But was: False + + at SampleProject.NUnit.TestServiceTests.FailTest() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\TestServiceTests.cs:line 61 + + + + + + + Running SampleProject.Tests.NUnit tests + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + NUnit Adapter 3.17.0.0: Test execution started +Running all tests in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\bin\Debug\net461\SampleProject.NUnit.dll + NUnit3TestExecutor discovered 6 of 6 NUnit test cases +Running SampleProject.Tests.NUnit tests +This test will fail + +Running SampleProject.Tests.NUnit tests + +SkipTest: Ignore test +Running SampleProject.Tests.NUnit tests + +Running SampleProject.Tests.NUnit tests + +Running SampleProject.Tests.NUnit tests + +NUnit Adapter 3.17.0.0: Test execution complete +Test 'SkipTest' was skipped in the test run. + + + + \ No newline at end of file diff --git a/python/test/files/trx/nunit/NUnit-netcoreapp3.1-sample.annotations b/python/test/files/trx/nunit/NUnit-netcoreapp3.1-sample.annotations new file mode 100644 index 0000000..6a3038a --- /dev/null +++ b/python/test/files/trx/nunit/NUnit-netcoreapp3.1-sample.annotations @@ -0,0 +1,100 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '3 fail, 1 skipped, 2 pass in 0s', + 'summary': + '6 tests\u2002\u2003\u20032 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20031 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20033 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MOw6AIBAFr' + '0KoLfwkFl7GEJS4UcEsUBnv7spH6N7MZufmCo7V8ol1DePWg/th8SgcGE3YEtLBfacx79' + 'l6KUn0RexwpecolICDxPCLFdFgCqLXuffNOhe51AJXscB1S5rzBEeQFrOb4M8LVJ41VN0' + 'AAAA=\n', + 'annotations': [ + { + 'path': 'SampleProject.NUnit.TestServiceTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit/NUnit-netcoreapp3.1-sample.trx\u2003[took 0s]', + 'title': 'FailTest (SampleProject.NUnit.TestServiceTests) failed', + 'raw_details': + ' Expected: True\r\n But was: False\r\n at ' + 'SampleProject.NUnit.TestServiceTests.FailTest() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.NUn' + 'it\\TestServiceTests.cs:line 61' + }, + { + 'path': 'SampleProject.NUnit.TestServiceTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit/NUnit-netcoreapp3.1-sample.trx\u2003[took 0s]', + 'title': 'TestThrowingException (SampleProject.NUnit.TestServiceTests) failed', + 'raw_details': + 'System.Exception : Pretty good exception at ' + 'SampleProject.TestService.GetException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject\\TestServi' + 'ce.cs:line 19\r\n at ' + 'SampleProject.NUnit.TestServiceTests.TestThrowingException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.NUn' + 'it\\TestServiceTests.cs:line 50' + }, + { + 'path': 'SampleProject.NUnit.TestServiceTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'nunit/NUnit-netcoreapp3.1-sample.trx\u2003[took 0s]', + 'title': 'TestTheory(False) (SampleProject.NUnit.TestServiceTests) failed', + 'raw_details': + ' Expected: False\r\n But was: True\r\n at ' + 'SampleProject.NUnit.TestServiceTests.TestTheory(Boolean expected) ' + 'in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.NUn' + 'it\\TestServiceTests.cs:line 25' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'SampleProject.NUnit.TestServiceTests ‑ SkipTest' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 6 tests, see "Raw output" for the full list of tests.', + 'title': '6 tests found', + 'raw_details': + 'SampleProject.NUnit.TestServiceTests ‑ FailTest\n' + 'SampleProject.NUnit.TestServiceTests ‑ PassingTest\n' + 'SampleProject.NUnit.TestServiceTests ‑ SkipTest\n' + 'SampleProject.NUnit.TestServiceTests ‑ TestTheory(False)\n' + 'SampleProject.NUnit.TestServiceTests ‑ TestTheory(True)\n' + 'SampleProject.NUnit.TestServiceTests ‑ TestThrowingException' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/trx/nunit/NUnit-netcoreapp3.1-sample.junit-xml b/python/test/files/trx/nunit/NUnit-netcoreapp3.1-sample.junit-xml new file mode 100644 index 0000000..4c52822 --- /dev/null +++ b/python/test/files/trx/nunit/NUnit-netcoreapp3.1-sample.junit-xml @@ -0,0 +1,26 @@ + + + + + Expected: True + But was: False + at SampleProject.NUnit.TestServiceTests.FailTest() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\TestServiceTests.cs:line 61 + + + + Ignore test + + + System.Exception : Pretty good exception at SampleProject.TestService.GetException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject\TestService.cs:line 19 + at SampleProject.NUnit.TestServiceTests.TestThrowingException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\TestServiceTests.cs:line 50 + + + Expected: False + But was: True + at SampleProject.NUnit.TestServiceTests.TestTheory(Boolean expected) in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\TestServiceTests.cs:line 25 + + + + + + diff --git a/python/test/files/trx/nunit/NUnit-netcoreapp3.1-sample.results b/python/test/files/trx/nunit/NUnit-netcoreapp3.1-sample.results new file mode 100644 index 0000000..3601570 --- /dev/null +++ b/python/test/files/trx/nunit/NUnit-netcoreapp3.1-sample.results @@ -0,0 +1,113 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=6, + suite_skipped=1, + suite_failures=3, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='MSTestSuite', + tests=6, + skipped=1, + failures=3, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit/NUnit-netcoreapp3.1-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.NUnit.TestServiceTests', + test_name='FailTest', + result='failure', + message=' Expected: True\r\n But was: False\r\n', + content=' Expected: True\r\n But was: False\r\n at ' + 'SampleProject.NUnit.TestServiceTests.FailTest() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.NUnit' + '\\TestServiceTests.cs:line 61\r\n', + stdout=None, + stderr=None, + time=0.059742 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/NUnit-netcoreapp3.1-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.NUnit.TestServiceTests', + test_name='SkipTest', + result='skipped', + message='Ignore test', + content='Ignore test', + stdout=None, + stderr=None, + time=0.000447 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/NUnit-netcoreapp3.1-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.NUnit.TestServiceTests', + test_name='TestThrowingException', + result='failure', + message='System.Exception : Pretty good exception', + content='System.Exception : Pretty good exception at ' + 'SampleProject.TestService.GetException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject\\TestService' + '.cs:line 19\r\n at ' + 'SampleProject.NUnit.TestServiceTests.TestThrowingException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.NUnit' + '\\TestServiceTests.cs:line 50', + stdout=None, + stderr=None, + time=0.003151 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/NUnit-netcoreapp3.1-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.NUnit.TestServiceTests', + test_name='TestTheory(False)', + result='failure', + message=' Expected: False\r\n But was: True\r\n', + content=' Expected: False\r\n But was: True\r\n at ' + 'SampleProject.NUnit.TestServiceTests.TestTheory(Boolean expected) in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.NUnit' + '\\TestServiceTests.cs:line 25\r\n', + stdout=None, + stderr=None, + time=0.001997 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/NUnit-netcoreapp3.1-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.NUnit.TestServiceTests', + test_name='TestTheory(True)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0113449 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/NUnit-netcoreapp3.1-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.NUnit.TestServiceTests', + test_name='PassingTest', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0010839 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/trx/nunit/NUnit-netcoreapp3.1-sample.trx b/python/test/files/trx/nunit/NUnit-netcoreapp3.1-sample.trx new file mode 100644 index 0000000..65775c3 --- /dev/null +++ b/python/test/files/trx/nunit/NUnit-netcoreapp3.1-sample.trx @@ -0,0 +1,123 @@ + + + + + + + + + + Running SampleProject.Tests.NUnit tests +This test will fail + + Expected: True + But was: False + + at SampleProject.NUnit.TestServiceTests.FailTest() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\TestServiceTests.cs:line 61 + + + + + + + Ignore test + + Ignore test + + + + + + Running SampleProject.Tests.NUnit tests + + System.Exception : Pretty good exception + at SampleProject.TestService.GetException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject\TestService.cs:line 19 + at SampleProject.NUnit.TestServiceTests.TestThrowingException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\TestServiceTests.cs:line 50 + + + + + + Running SampleProject.Tests.NUnit tests + + Expected: False + But was: True + + at SampleProject.NUnit.TestServiceTests.TestTheory(Boolean expected) in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\TestServiceTests.cs:line 25 + + + + + + + Running SampleProject.Tests.NUnit tests + + + + + Running SampleProject.Tests.NUnit tests + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + NUnit Adapter 3.17.0.0: Test execution started +Running all tests in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.NUnit\bin\Debug\netcoreapp3.1\SampleProject.NUnit.dll + NUnit3TestExecutor discovered 6 of 6 NUnit test cases +Running SampleProject.Tests.NUnit tests +This test will fail + +Running SampleProject.Tests.NUnit tests + +SkipTest: Ignore test +Running SampleProject.Tests.NUnit tests + +Running SampleProject.Tests.NUnit tests + +Running SampleProject.Tests.NUnit tests + +NUnit Adapter 3.17.0.0: Test execution complete +Test 'SkipTest' was skipped in the test run. + + + + \ No newline at end of file diff --git a/python/test/files/trx/nunit/SilentNotes.annotations b/python/test/files/trx/nunit/SilentNotes.annotations new file mode 100644 index 0000000..b883e97 --- /dev/null +++ b/python/test/files/trx/nunit/SilentNotes.annotations @@ -0,0 +1,233 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 67 tests pass, 12 skipped in 0s', + 'summary': + '79 tests\u2002\u2003\u200367 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20041 suites\u2003\u200312 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20040 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0KoLdRCopcxBCVuRDELVMa7u/hB7HbmZWfnGszoeMeqgnEXwCcYAkoPdiUsCWnwcRLtC7' + '0LSpFpxGdm2OJ7nYyWYPJCPyJafAyGNSXj/SveIgteIutdnOeUXRbwBM/F3CT5cQKN/0L' + 'n4wAAAA==\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 12 skipped tests, see "Raw output" for the full list of ' + 'skipped tests.', + 'title': '12 skipped tests found', + 'raw_details': + 'VanillaCloudStorageClientTest.CloudStorageProviders.DropboxCloudSto' + 'rageClientTest ‑ ReallyDoFetchToken\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.DropboxCloudSto' + 'rageClientTest ‑ ReallyDoOpenAuthorizationPageInBrowser\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.DropboxCloudSto' + 'rageClientTest ‑ ReallyDoRefreshToken\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorage' + 'ClientTest ‑ ThrowsWithInvalidPassword\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorage' + 'ClientTest ‑ ThrowsWithInvalidUrl\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorage' + 'ClientTest ‑ ThrowsWithInvalidUsername\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.GoogleCloudStor' + 'ageClientTest ‑ ReallyDoFetchToken\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.GoogleCloudStor' + 'ageClientTest ‑ ReallyDoOpenAuthorizationPageInBrowser\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.GoogleCloudStor' + 'ageClientTest ‑ ReallyDoRefreshToken\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.OnedriveCloudSt' + 'orageClientTest ‑ ReallyDoFetchToken\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.OnedriveCloudSt' + 'orageClientTest ‑ ReallyDoOpenAuthorizationPageInBrowser\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.OnedriveCloudSt' + 'orageClientTest ‑ ReallyDoRefreshToken' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 79 tests, see "Raw output" for the full list of tests.', + 'title': '79 tests found', + 'raw_details': + 'VanillaCloudStorageClientTest.CloudStorageCredentialsTest ‑ ' + 'AreEqualWorksWithDifferentPassword\n' + 'VanillaCloudStorageClientTest.CloudStorageCredentialsTest ‑ ' + 'AreEqualWorksWithSameContent\n' + 'VanillaCloudStorageClientTest.CloudStorageCredentialsTest ‑ ' + 'CorrectlyConvertsSecureStringToString\n' + 'VanillaCloudStorageClientTest.CloudStorageCredentialsTest ‑ ' + 'CorrectlyConvertsStringToSecureString\n' + 'VanillaCloudStorageClientTest.CloudStorageCredentialsTest ‑ ' + 'ValidateAcceptsValidCredentials\n' + 'VanillaCloudStorageClientTest.CloudStorageCredentialsTest ‑ ' + 'ValidateRejectsInvalidCredentials\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.DropboxCloudSto' + 'rageClientTest ‑ FileLifecycleWorks\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.DropboxCloudSto' + 'rageClientTest ‑ ReallyDoFetchToken\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.DropboxCloudSto' + 'rageClientTest ‑ ReallyDoOpenAuthorizationPageInBrowser\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.DropboxCloudSto' + 'rageClientTest ‑ ReallyDoRefreshToken\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.DropboxCloudSto' + 'rageClientTest ‑ ThrowsAccessDeniedExceptionWithInvalidToken\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorage' + 'ClientTest ‑ FileLifecycleWorks\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorage' + 'ClientTest ‑ SanitizeCredentials_ChangesInvalidPrefix\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorage' + 'ClientTest ‑ SecureSslConnectionWorks\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorage' + 'ClientTest ‑ ThrowsWithHttpInsteadOfFtp\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorage' + 'ClientTest ‑ ThrowsWithInvalidPassword\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorage' + 'ClientTest ‑ ThrowsWithInvalidUrl\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.FtpCloudStorage' + 'ClientTest ‑ ThrowsWithInvalidUsername\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.GmxCloudStorage' + 'ClientTest ‑ ChoosesCorrectUrlForGmxComEmail\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.GmxCloudStorage' + 'ClientTest ‑ ChoosesCorrectUrlForGmxNetEmail\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.GoogleCloudStor' + 'ageClientTest ‑ FileLifecycleWorks\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.GoogleCloudStor' + 'ageClientTest ‑ ReallyDoFetchToken\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.GoogleCloudStor' + 'ageClientTest ‑ ReallyDoOpenAuthorizationPageInBrowser\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.GoogleCloudStor' + 'ageClientTest ‑ ReallyDoRefreshToken\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.OnedriveCloudSt' + 'orageClientTest ‑ FileLifecycleWorks\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.OnedriveCloudSt' + 'orageClientTest ‑ ReallyDoFetchToken\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.OnedriveCloudSt' + 'orageClientTest ‑ ReallyDoOpenAuthorizationPageInBrowser\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.OnedriveCloudSt' + 'orageClientTest ‑ ReallyDoRefreshToken\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.WebdavCloudStor' + 'ageClientTest ‑ FileLifecycleWorks\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.WebdavCloudStor' + 'ageClientTest ‑ ParseGmxWebdavResponseCorrectly\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.WebdavCloudStor' + 'ageClientTest ‑ ParseStratoWebdavResponseCorrectly\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.WebdavCloudStor' + 'ageClientTest ‑ ThrowsWithInvalidPath\n' + 'VanillaCloudStorageClientTest.CloudStorageProviders.WebdavCloudStor' + 'ageClientTest ‑ ThrowsWithInvalidUsername\n' + 'VanillaCloudStorageClientTest.CloudStorageTokenTest ‑ ' + 'AreEqualWorksWithNullDate\n' + 'VanillaCloudStorageClientTest.CloudStorageTokenTest ‑ ' + 'AreEqualWorksWithSameContent\n' + 'VanillaCloudStorageClientTest.CloudStorageTokenTest ‑ ' + 'NeedsRefreshReturnsFalseForTokenFlow\n' + 'VanillaCloudStorageClientTest.CloudStorageTokenTest ‑ ' + 'NeedsRefreshReturnsFalseIfNotExpired\n' + 'VanillaCloudStorageClientTest.CloudStorageTokenTest ‑ ' + 'NeedsRefreshReturnsTrueIfExpired\n' + 'VanillaCloudStorageClientTest.CloudStorageTokenTest ‑ ' + 'NeedsRefreshReturnsTrueIfNoExpirationDate\n' + 'VanillaCloudStorageClientTest.CloudStorageTokenTest ‑ ' + 'SetExpiryDateBySecondsWorks\n' + 'VanillaCloudStorageClientTest.CloudStorageTokenTest ‑ ' + 'SetExpiryDateBySecondsWorksWithNull\n' + 'VanillaCloudStorageClientTest.CloudStorageTokenTest ‑ ' + 'SetExpiryDateBySecondsWorksWithVeryShortPeriod\n' + 'VanillaCloudStorageClientTest.OAuth2.AuthorizationResponseErrorTest' + ' ‑ ParsesAllErrorCodesCorrectly\n' + 'VanillaCloudStorageClientTest.OAuth2.AuthorizationResponseErrorTest' + ' ‑ ParsesNullErrorCodeCorrectly\n' + 'VanillaCloudStorageClientTest.OAuth2.AuthorizationResponseErrorTest' + ' ‑ ParsesUnknownErrorCodeCorrectly\n' + 'VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest ‑ ' + 'BuildAuthorizationRequestUrlEscapesParameters\n' + 'VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest ‑ ' + 'BuildAuthorizationRequestUrlLeavesOutOptionalParameters\n' + 'VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest ‑ ' + 'BuildAuthorizationRequestUrlThrowsWithMissingRedirectUrlForTokenFlo' + 'w\nVanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest ‑ ' + 'BuildAuthorizationRequestUrlUsesAllParameters\n' + 'VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest ‑ ' + 'BuildAuthorizationRequestUrlUsesCodeVerifier\n' + 'VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest ‑ ' + 'ParseRealWorldDropboxRejectResponse\n' + 'VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest ‑ ' + 'ParseRealWorldDropboxSuccessResponse\n' + 'VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest ‑ ' + 'ParseRealWorldGoogleRejectResponse\n' + 'VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest ‑ ' + 'ParseRealWorldGoogleSuccessResponse\n' + 'VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest ‑ ' + 'BuildOAuth2AuthorizationRequestUrlWorks\n' + 'VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest ‑ ' + 'FetchTokenCanInterpretGoogleResponse\n' + 'VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest ‑ ' + 'FetchTokenReturnsNullForDeniedAccess\n' + 'VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest ‑ ' + 'FetchTokenThrowsWithWrongState\n' + 'VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest ‑ ' + 'RefreshTokenCanInterpretGoogleResponse\n' + 'VanillaCloudStorageClientTest.SecureStringExtensionsTest ‑ ' + 'AreEqualsWorksCorrectly\n' + 'VanillaCloudStorageClientTest.SecureStringExtensionsTest ‑ ' + 'CorrectlyConvertsSecureStringToString\n' + 'VanillaCloudStorageClientTest.SecureStringExtensionsTest ‑ ' + 'CorrectlyConvertsSecureStringToUnicodeBytes\n' + 'VanillaCloudStorageClientTest.SecureStringExtensionsTest ‑ ' + 'CorrectlyConvertsSecureStringToUtf8Bytes\n' + 'VanillaCloudStorageClientTest.SecureStringExtensionsTest ‑ ' + 'CorrectlyConvertsStringToSecureString\n' + 'VanillaCloudStorageClientTest.SecureStringExtensionsTest ‑ ' + 'CorrectlyConvertsUnicodeBytesToSecureString\n' + 'VanillaCloudStorageClientTest.SecureStringExtensionsTest ‑ ' + 'CorrectlyConvertsUtf8BytesToSecureString\n' + 'VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsT' + 'est ‑ DecryptAfterDesrializationCanReadAllPropertiesBack\n' + 'VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsT' + 'est ‑ DecryptAfterDesrializationRespectsNullProperties\n' + 'VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsT' + 'est ‑ EncryptBeforeSerializationProtectsAllNecessaryProperties\n' + 'VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsT' + 'est ‑ EncryptBeforeSerializationRespectsNullProperties\n' + 'VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsT' + 'est ‑ SerializedDatacontractCanBeReadBack\n' + 'VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsT' + 'est ‑ SerializedDatacontractDoesNotContainNullProperties\n' + 'VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsT' + 'est ‑ SerializedDatacontractDoesNotContainPlaintextData\n' + 'VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsT' + 'est ‑ SerializedJsonCanBeReadBack\n' + 'VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsT' + 'est ‑ SerializedJsonDoesNotContainNullProperties\n' + 'VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsT' + 'est ‑ SerializedJsonDoesNotContainPlaintextData\n' + 'VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsT' + 'est ‑ SerializedXmlCanBeReadBack\n' + 'VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsT' + 'est ‑ SerializedXmlDoesNotContainNullProperties\n' + 'VanillaCloudStorageClientTest.SerializeableCloudStorageCredentialsT' + 'est ‑ SerializedXmlDoesNotContainPlaintextData' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/trx/nunit/SilentNotes.junit-xml b/python/test/files/trx/nunit/SilentNotes.junit-xml new file mode 100644 index 0000000..68f1453 --- /dev/null +++ b/python/test/files/trx/nunit/SilentNotes.junit-xml @@ -0,0 +1,108 @@ + + + + + + + Opens the authorization page in the system browse, to get a real access-token + + + + + + + + + + + + + Refreshes a real token + + + + + Too many consecutive fails seems to block an FTP server. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Gets a real access-token + + + Opens the authorization page in the system browse, to get a real authorization-code + + + + + + + + + + + + Gets a real access-token + + + + + Gets a real access-token + + + Refreshes a real token + + + + + Opens the authorization page in the system browse, to get a real authorization-code + + + + Too many consecutive fails seems to block an FTP server. + + + + + + + + Too many consecutive fails seems to block an FTP server. + + + + + + + + Refreshes a real token + + + + + diff --git a/python/test/files/trx/nunit/SilentNotes.results b/python/test/files/trx/nunit/SilentNotes.results new file mode 100644 index 0000000..dc5e2e8 --- /dev/null +++ b/python/test/files/trx/nunit/SilentNotes.results @@ -0,0 +1,1096 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=79, + suite_skipped=12, + suite_failures=0, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='MSTestSuite', + tests=79, + skipped=12, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest', + test_name='FetchTokenReturnsNullForDeniedAccess', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.' + 'SerializeableCloudStorageCredentialsTest', + test_name='SerializedJsonDoesNotContainPlaintextData', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'DropboxCloudStorageClientTest', + test_name='ReallyDoOpenAuthorizationPageInBrowser', + result='skipped', + message='Opens the authorization page in the system browse, to get a real ' + 'access-token', + content='Opens the authorization page in the system browse, to get a real ' + 'access-token', + stdout=None, + stderr=None, + time=8.3e-06 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'FtpCloudStorageClientTest', + test_name='FileLifecycleWorks', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.161 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.SecureStringExtensionsTest', + test_name='CorrectlyConvertsStringToSecureString', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageCredentialsTest', + test_name='ValidateAcceptsValidCredentials', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest', + test_name='BuildAuthorizationRequestUrlThrowsWithMissingRedirectUrlForTokenFlow', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest', + test_name='ParseRealWorldDropboxSuccessResponse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest', + test_name='BuildAuthorizationRequestUrlUsesAllParameters', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.' + 'SerializeableCloudStorageCredentialsTest', + test_name='SerializedDatacontractDoesNotContainNullProperties', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageCredentialsTest', + test_name='CorrectlyConvertsSecureStringToString', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.' + 'SerializeableCloudStorageCredentialsTest', + test_name='SerializedJsonDoesNotContainNullProperties', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.' + 'SerializeableCloudStorageCredentialsTest', + test_name='SerializedDatacontractCanBeReadBack', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.016 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'OnedriveCloudStorageClientTest', + test_name='ReallyDoRefreshToken', + result='skipped', + message='Refreshes a real token', + content='Refreshes a real token', + stdout=None, + stderr=None, + time=1e-06 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'WebdavCloudStorageClientTest', + test_name='ParseStratoWebdavResponseCorrectly', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.' + 'SerializeableCloudStorageCredentialsTest', + test_name='SerializedXmlDoesNotContainNullProperties', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'FtpCloudStorageClientTest', + test_name='ThrowsWithInvalidUrl', + result='skipped', + message='Too many consecutive fails seems to block an FTP server.', + content='Too many consecutive fails seems to block an FTP server.', + stdout=None, + stderr=None, + time=1.4e-06 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageTokenTest', + test_name='NeedsRefreshReturnsFalseIfNotExpired', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'WebdavCloudStorageClientTest', + test_name='ThrowsWithInvalidUsername', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'FtpCloudStorageClientTest', + test_name='ThrowsWithHttpInsteadOfFtp', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.004 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.' + 'SerializeableCloudStorageCredentialsTest', + test_name='SerializedXmlDoesNotContainPlaintextData', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'WebdavCloudStorageClientTest', + test_name='ThrowsWithInvalidPath', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest', + test_name='BuildAuthorizationRequestUrlLeavesOutOptionalParameters', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'FtpCloudStorageClientTest', + test_name='SanitizeCredentials_ChangesInvalidPrefix', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.SecureStringExtensionsTest', + test_name='CorrectlyConvertsUnicodeBytesToSecureString', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageTokenTest', + test_name='NeedsRefreshReturnsTrueIfNoExpirationDate', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest', + test_name='ParseRealWorldDropboxRejectResponse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.009 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.' + 'SerializeableCloudStorageCredentialsTest', + test_name='SerializedJsonCanBeReadBack', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'GmxCloudStorageClientTest', + test_name='ChoosesCorrectUrlForGmxNetEmail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'DropboxCloudStorageClientTest', + test_name='ThrowsAccessDeniedExceptionWithInvalidToken', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.' + 'SerializeableCloudStorageCredentialsTest', + test_name='SerializedDatacontractDoesNotContainPlaintextData', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest', + test_name='RefreshTokenCanInterpretGoogleResponse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'WebdavCloudStorageClientTest', + test_name='ParseGmxWebdavResponseCorrectly', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.SecureStringExtensionsTest', + test_name='CorrectlyConvertsUtf8BytesToSecureString', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageCredentialsTest', + test_name='CorrectlyConvertsStringToSecureString', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageCredentialsTest', + test_name='AreEqualWorksWithSameContent', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.SecureStringExtensionsTest', + test_name='CorrectlyConvertsSecureStringToString', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2.AuthorizationResponseErrorTest', + test_name='ParsesNullErrorCodeCorrectly', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'GmxCloudStorageClientTest', + test_name='ChoosesCorrectUrlForGmxComEmail', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'WebdavCloudStorageClientTest', + test_name='FileLifecycleWorks', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.014 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageTokenTest', + test_name='SetExpiryDateBySecondsWorksWithNull', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageCredentialsTest', + test_name='ValidateRejectsInvalidCredentials', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.' + 'SerializeableCloudStorageCredentialsTest', + test_name='EncryptBeforeSerializationProtectsAllNecessaryProperties', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.' + 'SerializeableCloudStorageCredentialsTest', + test_name='DecryptAfterDesrializationRespectsNullProperties', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'DropboxCloudStorageClientTest', + test_name='ReallyDoFetchToken', + result='skipped', + message='Gets a real access-token', + content='Gets a real access-token', + stdout=None, + stderr=None, + time=0.0004561 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'OnedriveCloudStorageClientTest', + test_name='ReallyDoOpenAuthorizationPageInBrowser', + result='skipped', + message='Opens the authorization page in the system browse, to get a real ' + 'authorization-code', + content='Opens the authorization page in the system browse, to get a real ' + 'authorization-code', + stdout=None, + stderr=None, + time=1e-06 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.' + 'SerializeableCloudStorageCredentialsTest', + test_name='DecryptAfterDesrializationCanReadAllPropertiesBack', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'FtpCloudStorageClientTest', + test_name='SecureSslConnectionWorks', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageTokenTest', + test_name='NeedsRefreshReturnsFalseForTokenFlow', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest', + test_name='FetchTokenCanInterpretGoogleResponse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageTokenTest', + test_name='NeedsRefreshReturnsTrueIfExpired', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest', + test_name='ParseRealWorldGoogleSuccessResponse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'GoogleCloudStorageClientTest', + test_name='FileLifecycleWorks', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.04 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageTokenTest', + test_name='SetExpiryDateBySecondsWorks', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageTokenTest', + test_name='SetExpiryDateBySecondsWorksWithVeryShortPeriod', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'GoogleCloudStorageClientTest', + test_name='ReallyDoFetchToken', + result='skipped', + message='Gets a real access-token', + content='Gets a real access-token', + stdout=None, + stderr=None, + time=3.1e-06 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'OnedriveCloudStorageClientTest', + test_name='FileLifecycleWorks', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.015 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2.AuthorizationResponseErrorTest', + test_name='ParsesAllErrorCodesCorrectly', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'OnedriveCloudStorageClientTest', + test_name='ReallyDoFetchToken', + result='skipped', + message='Gets a real access-token', + content='Gets a real access-token', + stdout=None, + stderr=None, + time=3.3e-06 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'GoogleCloudStorageClientTest', + test_name='ReallyDoRefreshToken', + result='skipped', + message='Refreshes a real token', + content='Refreshes a real token', + stdout=None, + stderr=None, + time=5.94e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'DropboxCloudStorageClientTest', + test_name='FileLifecycleWorks', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.095 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest', + test_name='ParseRealWorldGoogleRejectResponse', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'GoogleCloudStorageClientTest', + test_name='ReallyDoOpenAuthorizationPageInBrowser', + result='skipped', + message='Opens the authorization page in the system browse, to get a real ' + 'authorization-code', + content='Opens the authorization page in the system browse, to get a real ' + 'authorization-code', + stdout=None, + stderr=None, + time=1.2e-06 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.SecureStringExtensionsTest', + test_name='CorrectlyConvertsSecureStringToUnicodeBytes', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'FtpCloudStorageClientTest', + test_name='ThrowsWithInvalidUsername', + result='skipped', + message='Too many consecutive fails seems to block an FTP server.', + content='Too many consecutive fails seems to block an FTP server.', + stdout=None, + stderr=None, + time=1e-06 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest', + test_name='BuildAuthorizationRequestUrlEscapesParameters', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest', + test_name='FetchTokenThrowsWithWrongState', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2CloudStorageClientTest', + test_name='BuildOAuth2AuthorizationRequestUrlWorks', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.SecureStringExtensionsTest', + test_name='AreEqualsWorksCorrectly', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.' + 'SerializeableCloudStorageCredentialsTest', + test_name='EncryptBeforeSerializationRespectsNullProperties', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'FtpCloudStorageClientTest', + test_name='ThrowsWithInvalidPassword', + result='skipped', + message='Too many consecutive fails seems to block an FTP server.', + content='Too many consecutive fails seems to block an FTP server.', + stdout=None, + stderr=None, + time=1.31e-05 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageCredentialsTest', + test_name='AreEqualWorksWithDifferentPassword', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.007 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageTokenTest', + test_name='AreEqualWorksWithNullDate', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.' + 'SerializeableCloudStorageCredentialsTest', + test_name='SerializedXmlCanBeReadBack', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.016 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2.OAuth2UtilsTest', + test_name='BuildAuthorizationRequestUrlUsesCodeVerifier', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.SecureStringExtensionsTest', + test_name='CorrectlyConvertsSecureStringToUtf8Bytes', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageProviders.' + 'DropboxCloudStorageClientTest', + test_name='ReallyDoRefreshToken', + result='skipped', + message='Refreshes a real token', + content='Refreshes a real token', + stdout=None, + stderr=None, + time=1.2e-06 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.OAuth2.AuthorizationResponseErrorTest', + test_name='ParsesUnknownErrorCodeCorrectly', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ), + publish.unittestresults.UnitTestCase( + result_file='nunit/SilentNotes.trx', + test_file=None, + line=None, + class_name='VanillaCloudStorageClientTest.CloudStorageTokenTest', + test_name='AreEqualWorksWithSameContent', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1e-07 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/trx/nunit/SilentNotes.trx b/python/test/files/trx/nunit/SilentNotes.trx new file mode 100644 index 0000000..7dbc3c2 --- /dev/null +++ b/python/test/files/trx/nunit/SilentNotes.trx @@ -0,0 +1,609 @@ + + + + + + + + + + + + Opens the authorization page in the system browse, to get a real access-token + + Opens the authorization page in the system browse, to get a real access-token + + + + + + + + + + + + + + + + Refreshes a real token + + Refreshes a real token + + + + + + + + Too many consecutive fails seems to block an FTP server. + + Too many consecutive fails seems to block an FTP server. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Gets a real access-token + + Gets a real access-token + + + + + + Opens the authorization page in the system browse, to get a real authorization-code + + Opens the authorization page in the system browse, to get a real authorization-code + + + + + + + + + + + + + + + Gets a real access-token + + Gets a real access-token + + + + + + + + Gets a real access-token + + Gets a real access-token + + + + + + Refreshes a real token + + Refreshes a real token + + + + + + + + Opens the authorization page in the system browse, to get a real authorization-code + + Opens the authorization page in the system browse, to get a real authorization-code + + + + + + + Too many consecutive fails seems to block an FTP server. + + Too many consecutive fails seems to block an FTP server. + + + + + + + + + + + Too many consecutive fails seems to block an FTP server. + + Too many consecutive fails seems to block an FTP server. + + + + + + + + + + + Refreshes a real token + + Refreshes a real token + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + NUnit Adapter 3.17.0.0: Test execution started +Running all tests in C:\Users\Michal\Workspace\github\SilentNotes\bin\VanillaCloudStorageClientTest\netcoreapp2.1\VanillaCloudStorageClientTest.dll + NUnit3TestExecutor discovered 79 of 79 NUnit test cases +ReallyDoFetchToken: Gets a real access-token +ReallyDoOpenAuthorizationPageInBrowser: Opens the authorization page in the system browse, to get a real access-token +Test 'ReallyDoFetchToken' was skipped in the test run. +Test 'ReallyDoOpenAuthorizationPageInBrowser' was skipped in the test run. +ReallyDoRefreshToken: Refreshes a real token +ThrowsWithInvalidPassword: Too many consecutive fails seems to block an FTP server. +ThrowsWithInvalidUrl: Too many consecutive fails seems to block an FTP server. +ThrowsWithInvalidUsername: Too many consecutive fails seems to block an FTP server. +Test 'ReallyDoRefreshToken' was skipped in the test run. +Test 'ThrowsWithInvalidPassword' was skipped in the test run. +Test 'ThrowsWithInvalidUrl' was skipped in the test run. +Test 'ThrowsWithInvalidUsername' was skipped in the test run. +ReallyDoFetchToken: Gets a real access-token +ReallyDoOpenAuthorizationPageInBrowser: Opens the authorization page in the system browse, to get a real authorization-code +ReallyDoRefreshToken: Refreshes a real token +ReallyDoFetchToken: Gets a real access-token +ReallyDoOpenAuthorizationPageInBrowser: Opens the authorization page in the system browse, to get a real authorization-code +Test 'ReallyDoFetchToken' was skipped in the test run. +Test 'ReallyDoOpenAuthorizationPageInBrowser' was skipped in the test run. +Test 'ReallyDoRefreshToken' was skipped in the test run. +Test 'ReallyDoFetchToken' was skipped in the test run. +Test 'ReallyDoOpenAuthorizationPageInBrowser' was skipped in the test run. +ReallyDoRefreshToken: Refreshes a real token +Test 'ReallyDoRefreshToken' was skipped in the test run. +NUnit Adapter 3.17.0.0: Test execution complete + + + + \ No newline at end of file diff --git a/python/test/files/trx/xunit/dotnet-trx.annotations b/python/test/files/trx/xunit/dotnet-trx.annotations new file mode 100644 index 0000000..81e9437 --- /dev/null +++ b/python/test/files/trx/xunit/dotnet-trx.annotations @@ -0,0 +1,139 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '5 fail, 1 skipped, 5 pass in 0s', + 'summary': + '11 tests\u2002\u2003\u20035 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20041 suites\u2003\u20031 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u20035 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBBEr' + '0KoLaSw8TKGIMaNfMwClfHuroCK3byZ3XfwBYwOfGSiYzwkiC/MCWUE7wh7QhpinsQDU0' + 'hKUTN8xQZ7/S7FIsH8LjSix2rE5F7hnVtf4U+XubFlbmXKWwuRoCYWVsnPC2b3Tg/fAAA' + 'A\n', + 'annotations': [ + { + 'path': 'DotnetTests.XUnitTests.CalculatorTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'xunit/dotnet-trx.trx\u2003[took 0s]', + 'title': + 'Should be even number(i: 3) ' + '(DotnetTests.XUnitTests.CalculatorTests) failed', + 'raw_details': + 'Assert.True() Failure\r\nExpected: True\r\nActual: False at ' + 'DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int3' + '2 i) in ' + 'C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\Dotnet' + 'Tests.XUnitTests\\CalculatorTests.cs:line 67' + }, + { + 'path': 'DotnetTests.XUnitTests.CalculatorTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'xunit/dotnet-trx.trx\u2003[took 0s]', + 'title': + 'Exception_In_TargetTest (DotnetTests.XUnitTests.CalculatorTests) ' + 'failed', + 'raw_details': + 'System.DivideByZeroException : Attempted to divide by zero. at ' + 'DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in ' + 'C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\Dotnet' + 'Tests.Unit\\Calculator.cs:line 9\r\n at ' + 'DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() ' + 'in ' + 'C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\Dotnet' + 'Tests.XUnitTests\\CalculatorTests.cs:line 33' + }, + { + 'path': 'DotnetTests.XUnitTests.CalculatorTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'xunit/dotnet-trx.trx\u2003[took 0s]', + 'title': 'Exception_In_Test (DotnetTests.XUnitTests.CalculatorTests) failed', + 'raw_details': + 'System.Exception : Test at ' + 'DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in ' + 'C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\Dotnet' + 'Tests.XUnitTests\\CalculatorTests.cs:line 39' + }, + { + 'path': 'DotnetTests.XUnitTests.CalculatorTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'xunit/dotnet-trx.trx\u2003[took 0s]', + 'title': 'Failing_Test (DotnetTests.XUnitTests.CalculatorTests) failed', + 'raw_details': + 'Assert.Equal() Failure\r\nExpected: 3\r\nActual: 2 at ' + 'DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in ' + 'C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\Dotnet' + 'Tests.XUnitTests\\CalculatorTests.cs:line 27' + }, + { + 'path': 'DotnetTests.XUnitTests.CalculatorTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'xunit/dotnet-trx.trx\u2003[took 0s]', + 'title': + 'Is_Even_Number(i: 3) (DotnetTests.XUnitTests.CalculatorTests) ' + 'failed', + 'raw_details': + 'Assert.True() Failure\r\nExpected: True\r\nActual: False at ' + 'DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in ' + 'C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\Dotnet' + 'Tests.XUnitTests\\CalculatorTests.cs:line 59' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'DotnetTests.XUnitTests.CalculatorTests ‑ Skipped_Test' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 11 tests, see "Raw output" for the full list of tests.', + 'title': '11 tests found', + 'raw_details': + 'DotnetTests.XUnitTests.CalculatorTests ‑ Custom Name\n' + 'DotnetTests.XUnitTests.CalculatorTests ‑ Exception_In_TargetTest\n' + 'DotnetTests.XUnitTests.CalculatorTests ‑ Exception_In_Test\n' + 'DotnetTests.XUnitTests.CalculatorTests ‑ Failing_Test\n' + 'DotnetTests.XUnitTests.CalculatorTests ‑ Is_Even_Number(i: 2)\n' + 'DotnetTests.XUnitTests.CalculatorTests ‑ Is_Even_Number(i: 3)\n' + 'DotnetTests.XUnitTests.CalculatorTests ‑ Passing_Test\n' + 'DotnetTests.XUnitTests.CalculatorTests ‑ Should be even number(i: ' + '2)\nDotnetTests.XUnitTests.CalculatorTests ‑ Should be even ' + 'number(i: 3)\nDotnetTests.XUnitTests.CalculatorTests ‑ ' + 'Skipped_Test\nDotnetTests.XUnitTests.CalculatorTests ‑ ' + 'Timeout_Test' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/trx/xunit/dotnet-trx.junit-xml b/python/test/files/trx/xunit/dotnet-trx.junit-xml new file mode 100644 index 0000000..f063282 --- /dev/null +++ b/python/test/files/trx/xunit/dotnet-trx.junit-xml @@ -0,0 +1,35 @@ + + + + + + + Assert.True() Failure +Expected: True +Actual: False at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 67 + + + + + + + + System.DivideByZeroException : Attempted to divide by zero. at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.Unit\Calculator.cs:line 9 + at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 33 + + + System.Exception : Test at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 39 + + + Assert.Equal() Failure +Expected: 3 +Actual: 2 at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 27 + + + + Assert.True() Failure +Expected: True +Actual: False at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 59 + + + diff --git a/python/test/files/trx/xunit/dotnet-trx.results b/python/test/files/trx/xunit/dotnet-trx.results new file mode 100644 index 0000000..8af387d --- /dev/null +++ b/python/test/files/trx/xunit/dotnet-trx.results @@ -0,0 +1,185 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=11, + suite_skipped=1, + suite_failures=5, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='MSTestSuite', + tests=11, + skipped=1, + failures=5, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='xunit/dotnet-trx.trx', + test_file=None, + line=None, + class_name='DotnetTests.XUnitTests.CalculatorTests', + test_name='Timeout_Test', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.1084258 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/dotnet-trx.trx', + test_file=None, + line=None, + class_name='DotnetTests.XUnitTests.CalculatorTests', + test_name='Passing_Test', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001365 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/dotnet-trx.trx', + test_file=None, + line=None, + class_name='DotnetTests.XUnitTests.CalculatorTests', + test_name='Should be even number(i: 3)', + result='failure', + message='Assert.True() Failure\r\nExpected: True\r\nActual: False', + content='Assert.True() Failure\r\nExpected: True\r\nActual: False at ' + 'DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 ' + 'i) in ' + 'C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTe' + 'sts.XUnitTests\\CalculatorTests.cs:line 67', + stdout=None, + stderr=None, + time=0.0006537 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/dotnet-trx.trx', + test_file=None, + line=None, + class_name='DotnetTests.XUnitTests.CalculatorTests', + test_name='Skipped_Test', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/dotnet-trx.trx', + test_file=None, + line=None, + class_name='DotnetTests.XUnitTests.CalculatorTests', + test_name='Custom Name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0001371 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/dotnet-trx.trx', + test_file=None, + line=None, + class_name='DotnetTests.XUnitTests.CalculatorTests', + test_name='Should be even number(i: 2)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=9.7e-06 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/dotnet-trx.trx', + test_file=None, + line=None, + class_name='DotnetTests.XUnitTests.CalculatorTests', + test_name='Exception_In_TargetTest', + result='failure', + message='System.DivideByZeroException : Attempted to divide by zero.', + content='System.DivideByZeroException : Attempted to divide by zero. at ' + 'DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in ' + 'C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTe' + 'sts.Unit\\Calculator.cs:line 9\r\n at ' + 'DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in ' + 'C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTe' + 'sts.XUnitTests\\CalculatorTests.cs:line 33', + stdout=None, + stderr=None, + time=0.0008377 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/dotnet-trx.trx', + test_file=None, + line=None, + class_name='DotnetTests.XUnitTests.CalculatorTests', + test_name='Exception_In_Test', + result='failure', + message='System.Exception : Test', + content='System.Exception : Test at ' + 'DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in ' + 'C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTe' + 'sts.XUnitTests\\CalculatorTests.cs:line 39', + stdout=None, + stderr=None, + time=0.0025175 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/dotnet-trx.trx', + test_file=None, + line=None, + class_name='DotnetTests.XUnitTests.CalculatorTests', + test_name='Failing_Test', + result='failure', + message='Assert.Equal() Failure\r\nExpected: 3\r\nActual: 2', + content='Assert.Equal() Failure\r\nExpected: 3\r\nActual: 2 at ' + 'DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in ' + 'C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTe' + 'sts.XUnitTests\\CalculatorTests.cs:line 27', + stdout=None, + stderr=None, + time=0.0038697 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/dotnet-trx.trx', + test_file=None, + line=None, + class_name='DotnetTests.XUnitTests.CalculatorTests', + test_name='Is_Even_Number(i: 2)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=7.8e-06 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/dotnet-trx.trx', + test_file=None, + line=None, + class_name='DotnetTests.XUnitTests.CalculatorTests', + test_name='Is_Even_Number(i: 3)', + result='failure', + message='Assert.True() Failure\r\nExpected: True\r\nActual: False', + content='Assert.True() Failure\r\nExpected: True\r\nActual: False at ' + 'DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in ' + 'C:\\Users\\Michal\\Workspace\\dorny\\test-reporter\\reports\\dotnet\\DotnetTe' + 'sts.XUnitTests\\CalculatorTests.cs:line 59', + stdout=None, + stderr=None, + time=0.0004141 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/trx/xunit/dotnet-trx.trx b/python/test/files/trx/xunit/dotnet-trx.trx new file mode 100644 index 0000000..bf112b8 --- /dev/null +++ b/python/test/files/trx/xunit/dotnet-trx.trx @@ -0,0 +1,184 @@ + + + + + + + + + + + + + Assert.True() Failure +Expected: True +Actual: False + at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 67 + + + + + + Skipped test + + + + + + + + System.DivideByZeroException : Attempted to divide by zero. + at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.Unit\Calculator.cs:line 9 + at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 33 + + + + + + + System.Exception : Test + at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 39 + + + + + + + Assert.Equal() Failure +Expected: 3 +Actual: 2 + at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 27 + + + + + + + + Assert.True() Failure +Expected: True +Actual: False + at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) in C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs:line 59 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + [xUnit.net 00:00:00.00] xUnit.net VSTest Adapter v2.4.0 (64-bit .NET Core 3.1.14) +[xUnit.net 00:00:00.27] Discovering: DotnetTests.XUnitTests +[xUnit.net 00:00:00.32] Discovered: DotnetTests.XUnitTests +[xUnit.net 00:00:00.32] Starting: DotnetTests.XUnitTests +[xUnit.net 00:00:00.39] System.Exception : Test +[xUnit.net 00:00:00.39] Stack Trace: +[xUnit.net 00:00:00.39] C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs(39,0): at DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test() +[xUnit.net 00:00:00.51] Assert.Equal() Failure +[xUnit.net 00:00:00.51] Expected: 3 +[xUnit.net 00:00:00.51] Actual: 2 +[xUnit.net 00:00:00.51] Stack Trace: +[xUnit.net 00:00:00.51] C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs(27,0): at DotnetTests.XUnitTests.CalculatorTests.Failing_Test() +[xUnit.net 00:00:00.51] Assert.True() Failure +[xUnit.net 00:00:00.51] Expected: True +[xUnit.net 00:00:00.51] Actual: False +[xUnit.net 00:00:00.51] Stack Trace: +[xUnit.net 00:00:00.51] C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs(67,0): at DotnetTests.XUnitTests.CalculatorTests.Theory_With_Custom_Name(Int32 i) +[xUnit.net 00:00:00.51] System.DivideByZeroException : Attempted to divide by zero. +[xUnit.net 00:00:00.51] Stack Trace: +[xUnit.net 00:00:00.51] C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.Unit\Calculator.cs(9,0): at DotnetTests.Unit.Calculator.Div(Int32 a, Int32 b) +[xUnit.net 00:00:00.51] C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs(33,0): at DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest() +[xUnit.net 00:00:00.51] Assert.True() Failure +[xUnit.net 00:00:00.51] Expected: True +[xUnit.net 00:00:00.51] Actual: False +[xUnit.net 00:00:00.51] Stack Trace: +[xUnit.net 00:00:00.51] C:\Users\Michal\Workspace\dorny\test-reporter\reports\dotnet\DotnetTests.XUnitTests\CalculatorTests.cs(59,0): at DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(Int32 i) +[xUnit.net 00:00:00.54] Skipped test +[xUnit.net 00:00:00.54] Finished: DotnetTests.XUnitTests +Test 'DotnetTests.XUnitTests.CalculatorTests.Skipped_Test' was skipped in the test run. + + + + + [xUnit.net 00:00:00.39] DotnetTests.XUnitTests.CalculatorTests.Exception_In_Test [FAIL] + + + [xUnit.net 00:00:00.51] DotnetTests.XUnitTests.CalculatorTests.Failing_Test [FAIL] + + + [xUnit.net 00:00:00.51] Should be even number(i: 3) [FAIL] + + + [xUnit.net 00:00:00.51] DotnetTests.XUnitTests.CalculatorTests.Exception_In_TargetTest [FAIL] + + + [xUnit.net 00:00:00.51] DotnetTests.XUnitTests.CalculatorTests.Is_Even_Number(i: 3) [FAIL] + + + [xUnit.net 00:00:00.54] DotnetTests.XUnitTests.CalculatorTests.Skipped_Test [SKIP] + + + + \ No newline at end of file diff --git a/python/test/files/trx/xunit/xUnit-net461-sample.annotations b/python/test/files/trx/xunit/xUnit-net461-sample.annotations new file mode 100644 index 0000000..0cbec31 --- /dev/null +++ b/python/test/files/trx/xunit/xUnit-net461-sample.annotations @@ -0,0 +1,102 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '3 fail, 1 skipped, 2 pass in 0s', + 'summary': + '6 tests\u2002\u2003\u20032 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20031 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20033 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MOw6AIBAFr' + '0KoLfwkFl7GEJS4UcEsUBnv7spH6N7MZufmCo7V8ol1DePWg/th8SgcGE3YEtLBfacx79' + 'l6KUn0RexwpecolICDxPCLFdFgCqLXuffNOhe51AJXscB1S5rzBEeQFrOb4M8LVJ41VN0' + 'AAAA=\n', + 'annotations': [ + { + 'path': 'SampleProject.xUnit.TestServiceTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'xunit/xUnit-net461-sample.trx\u2003[took 0s]', + 'title': + 'TestTheory(expected: False) (SampleProject.xUnit.TestServiceTests) ' + 'failed', + 'raw_details': + 'Assert.Equal() Failure\r\nExpected: False\r\nActual: True at ' + 'SampleProject.xUnit.TestServiceTests.TestTheory(Boolean expected) ' + 'in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.xUn' + 'it\\TestServiceTests.cs:line 29' + }, + { + 'path': 'SampleProject.xUnit.TestServiceTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'xunit/xUnit-net461-sample.trx\u2003[took 0s]', + 'title': 'FailTest (SampleProject.xUnit.TestServiceTests) failed', + 'raw_details': + 'Assert.True() Failure\r\nExpected: True\r\nActual: False at ' + 'SampleProject.xUnit.TestServiceTests.FailTest() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.xUn' + 'it\\TestServiceTests.cs:line 65' + }, + { + 'path': 'SampleProject.xUnit.TestServiceTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'xunit/xUnit-net461-sample.trx\u2003[took 0s]', + 'title': 'TestThrowingException (SampleProject.xUnit.TestServiceTests) failed', + 'raw_details': + 'System.Exception : Pretty good exception at ' + 'SampleProject.TestService.GetException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject\\TestServi' + 'ce.cs:line 19\r\n at ' + 'SampleProject.xUnit.TestServiceTests.TestThrowingException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.xUn' + 'it\\TestServiceTests.cs:line 54' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'SampleProject.xUnit.TestServiceTests ‑ SkipTest' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 6 tests, see "Raw output" for the full list of tests.', + 'title': '6 tests found', + 'raw_details': + 'SampleProject.xUnit.TestServiceTests ‑ FailTest\n' + 'SampleProject.xUnit.TestServiceTests ‑ PassingTest\n' + 'SampleProject.xUnit.TestServiceTests ‑ SkipTest\n' + 'SampleProject.xUnit.TestServiceTests ‑ TestTheory(expected: False)\n' + 'SampleProject.xUnit.TestServiceTests ‑ TestTheory(expected: True)\n' + 'SampleProject.xUnit.TestServiceTests ‑ TestThrowingException' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/trx/xunit/xUnit-net461-sample.junit-xml b/python/test/files/trx/xunit/xUnit-net461-sample.junit-xml new file mode 100644 index 0000000..794d427 --- /dev/null +++ b/python/test/files/trx/xunit/xUnit-net461-sample.junit-xml @@ -0,0 +1,24 @@ + + + + + Assert.Equal() Failure +Expected: False +Actual: True at SampleProject.xUnit.TestServiceTests.TestTheory(Boolean expected) in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs:line 29 + + + Assert.True() Failure +Expected: True +Actual: False at SampleProject.xUnit.TestServiceTests.FailTest() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs:line 65 + + + + + + System.Exception : Pretty good exception at SampleProject.TestService.GetException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject\TestService.cs:line 19 + at SampleProject.xUnit.TestServiceTests.TestThrowingException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs:line 54 + + + + + diff --git a/python/test/files/trx/xunit/xUnit-net461-sample.results b/python/test/files/trx/xunit/xUnit-net461-sample.results new file mode 100644 index 0000000..5f7ac1f --- /dev/null +++ b/python/test/files/trx/xunit/xUnit-net461-sample.results @@ -0,0 +1,113 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=6, + suite_skipped=1, + suite_failures=3, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='MSTestSuite', + tests=6, + skipped=1, + failures=3, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='xunit/xUnit-net461-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.xUnit.TestServiceTests', + test_name='TestTheory(expected: False)', + result='failure', + message='Assert.Equal() Failure\r\nExpected: False\r\nActual: True', + content='Assert.Equal() Failure\r\nExpected: False\r\nActual: True at ' + 'SampleProject.xUnit.TestServiceTests.TestTheory(Boolean expected) in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.xUnit' + '\\TestServiceTests.cs:line 29', + stdout=None, + stderr=None, + time=0.02 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/xUnit-net461-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.xUnit.TestServiceTests', + test_name='FailTest', + result='failure', + message='Assert.True() Failure\r\nExpected: True\r\nActual: False', + content='Assert.True() Failure\r\nExpected: True\r\nActual: False at ' + 'SampleProject.xUnit.TestServiceTests.FailTest() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.xUnit' + '\\TestServiceTests.cs:line 65', + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/xUnit-net461-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.xUnit.TestServiceTests', + test_name='SkipTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/xUnit-net461-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.xUnit.TestServiceTests', + test_name='TestThrowingException', + result='failure', + message='System.Exception : Pretty good exception', + content='System.Exception : Pretty good exception at ' + 'SampleProject.TestService.GetException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject\\TestService' + '.cs:line 19\r\n at ' + 'SampleProject.xUnit.TestServiceTests.TestThrowingException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.xUnit' + '\\TestServiceTests.cs:line 54', + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/xUnit-net461-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.xUnit.TestServiceTests', + test_name='TestTheory(expected: True)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.025 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/xUnit-net461-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.xUnit.TestServiceTests', + test_name='PassingTest', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/trx/xunit/xUnit-net461-sample.trx b/python/test/files/trx/xunit/xUnit-net461-sample.trx new file mode 100644 index 0000000..5371142 --- /dev/null +++ b/python/test/files/trx/xunit/xUnit-net461-sample.trx @@ -0,0 +1,143 @@ + + + + + + + + + + Running SampleProject.Tests.xUnit tests + + Assert.Equal() Failure +Expected: False +Actual: True + at SampleProject.xUnit.TestServiceTests.TestTheory(Boolean expected) in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs:line 29 + + + + + + Running SampleProject.Tests.xUnit tests +This test will fail + + Assert.True() Failure +Expected: True +Actual: False + at SampleProject.xUnit.TestServiceTests.FailTest() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs:line 65 + + + + + + Skipped + + + + + Running SampleProject.Tests.xUnit tests + + System.Exception : Pretty good exception + at SampleProject.TestService.GetException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject\TestService.cs:line 19 + at SampleProject.xUnit.TestServiceTests.TestThrowingException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs:line 54 + + + + + + Running SampleProject.Tests.xUnit tests + + + + + Running SampleProject.Tests.xUnit tests + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + [xUnit.net 00:00:00.00] xUnit.net VSTest Adapter v2.4.3+1b45f5407b (32-bit Desktop .NET 4.0.30319.42000) +[xUnit.net 00:00:00.65] Discovering: SampleProject.xUnit +[xUnit.net 00:00:00.77] Discovered: SampleProject.xUnit +[xUnit.net 00:00:00.78] Starting: SampleProject.xUnit +[xUnit.net 00:00:01.01] Assert.Equal() Failure +[xUnit.net 00:00:01.01] Expected: False +[xUnit.net 00:00:01.01] Actual: True +[xUnit.net 00:00:01.01] Stack Trace: +[xUnit.net 00:00:01.01] TestServiceTests.cs(29,0): at SampleProject.xUnit.TestServiceTests.TestTheory(Boolean expected) +[xUnit.net 00:00:01.01] Output: +[xUnit.net 00:00:01.01] Running SampleProject.Tests.xUnit tests +[xUnit.net 00:00:01.01] Skipped +[xUnit.net 00:00:01.24] System.Exception : Pretty good exception +[xUnit.net 00:00:01.24] Stack Trace: +[xUnit.net 00:00:01.24] C:\Dev\LiquidTestReports\test\SampleProject\SampleProject\TestService.cs(19,0): at SampleProject.TestService.GetException() +[xUnit.net 00:00:01.24] TestServiceTests.cs(54,0): at SampleProject.xUnit.TestServiceTests.TestThrowingException() +[xUnit.net 00:00:01.24] Output: +[xUnit.net 00:00:01.24] Running SampleProject.Tests.xUnit tests +[xUnit.net 00:00:01.25] Assert.True() Failure +[xUnit.net 00:00:01.25] Expected: True +[xUnit.net 00:00:01.25] Actual: False +[xUnit.net 00:00:01.25] Stack Trace: +[xUnit.net 00:00:01.25] TestServiceTests.cs(65,0): at SampleProject.xUnit.TestServiceTests.FailTest() +[xUnit.net 00:00:01.25] Output: +[xUnit.net 00:00:01.25] Running SampleProject.Tests.xUnit tests +[xUnit.net 00:00:01.25] This test will fail +[xUnit.net 00:00:01.25] Finished: SampleProject.xUnit +Test 'SampleProject.xUnit.TestServiceTests.SkipTest' was skipped in the test run. + + + + + [xUnit.net 00:00:01.01] SampleProject.xUnit.TestServiceTests.TestTheory(expected: False) [FAIL] + + + [xUnit.net 00:00:01.01] SampleProject.xUnit.TestServiceTests.SkipTest [SKIP] + + + [xUnit.net 00:00:01.24] SampleProject.xUnit.TestServiceTests.TestThrowingException [FAIL] + + + [xUnit.net 00:00:01.25] SampleProject.xUnit.TestServiceTests.FailTest [FAIL] + + + + \ No newline at end of file diff --git a/python/test/files/trx/xunit/xUnit-netcoreapp3.1-sample.annotations b/python/test/files/trx/xunit/xUnit-netcoreapp3.1-sample.annotations new file mode 100644 index 0000000..ac910e7 --- /dev/null +++ b/python/test/files/trx/xunit/xUnit-netcoreapp3.1-sample.annotations @@ -0,0 +1,102 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '3 fail, 1 skipped, 2 pass in 0s', + 'summary': + '6 tests\u2002\u2003\u20032 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20031 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20033 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MOw6AIBAFr' + '0KoLfwkFl7GEJS4UcEsUBnv7spH6N7MZufmCo7V8ol1DePWg/th8SgcGE3YEtLBfacx79' + 'l6KUn0RexwpecolICDxPCLFdFgCqLXuffNOhe51AJXscB1S5rzBEeQFrOb4M8LVJ41VN0' + 'AAAA=\n', + 'annotations': [ + { + 'path': 'SampleProject.xUnit.TestServiceTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'xunit/xUnit-netcoreapp3.1-sample.trx\u2003[took 0s]', + 'title': 'FailTest (SampleProject.xUnit.TestServiceTests) failed', + 'raw_details': + 'Assert.True() Failure\r\nExpected: True\r\nActual: False at ' + 'SampleProject.xUnit.TestServiceTests.FailTest() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.xUn' + 'it\\TestServiceTests.cs:line 65' + }, + { + 'path': 'SampleProject.xUnit.TestServiceTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'xunit/xUnit-netcoreapp3.1-sample.trx\u2003[took 0s]', + 'title': + 'TestTheory(expected: False) (SampleProject.xUnit.TestServiceTests) ' + 'failed', + 'raw_details': + 'Assert.Equal() Failure\r\nExpected: False\r\nActual: True at ' + 'SampleProject.xUnit.TestServiceTests.TestTheory(Boolean expected) ' + 'in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.xUn' + 'it\\TestServiceTests.cs:line 29' + }, + { + 'path': 'SampleProject.xUnit.TestServiceTests', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'xunit/xUnit-netcoreapp3.1-sample.trx\u2003[took 0s]', + 'title': 'TestThrowingException (SampleProject.xUnit.TestServiceTests) failed', + 'raw_details': + 'System.Exception : Pretty good exception at ' + 'SampleProject.TestService.GetException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject\\TestServi' + 'ce.cs:line 19\r\n at ' + 'SampleProject.xUnit.TestServiceTests.TestThrowingException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.xUn' + 'it\\TestServiceTests.cs:line 54' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'SampleProject.xUnit.TestServiceTests ‑ SkipTest' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 6 tests, see "Raw output" for the full list of tests.', + 'title': '6 tests found', + 'raw_details': + 'SampleProject.xUnit.TestServiceTests ‑ FailTest\n' + 'SampleProject.xUnit.TestServiceTests ‑ PassingTest\n' + 'SampleProject.xUnit.TestServiceTests ‑ SkipTest\n' + 'SampleProject.xUnit.TestServiceTests ‑ TestTheory(expected: False)\n' + 'SampleProject.xUnit.TestServiceTests ‑ TestTheory(expected: True)\n' + 'SampleProject.xUnit.TestServiceTests ‑ TestThrowingException' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/trx/xunit/xUnit-netcoreapp3.1-sample.junit-xml b/python/test/files/trx/xunit/xUnit-netcoreapp3.1-sample.junit-xml new file mode 100644 index 0000000..78f5e1b --- /dev/null +++ b/python/test/files/trx/xunit/xUnit-netcoreapp3.1-sample.junit-xml @@ -0,0 +1,24 @@ + + + + + Assert.True() Failure +Expected: True +Actual: False at SampleProject.xUnit.TestServiceTests.FailTest() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs:line 65 + + + Assert.Equal() Failure +Expected: False +Actual: True at SampleProject.xUnit.TestServiceTests.TestTheory(Boolean expected) in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs:line 29 + + + + + + + + System.Exception : Pretty good exception at SampleProject.TestService.GetException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject\TestService.cs:line 19 + at SampleProject.xUnit.TestServiceTests.TestThrowingException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs:line 54 + + + diff --git a/python/test/files/trx/xunit/xUnit-netcoreapp3.1-sample.results b/python/test/files/trx/xunit/xUnit-netcoreapp3.1-sample.results new file mode 100644 index 0000000..f90b82d --- /dev/null +++ b/python/test/files/trx/xunit/xUnit-netcoreapp3.1-sample.results @@ -0,0 +1,113 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=6, + suite_skipped=1, + suite_failures=3, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='MSTestSuite', + tests=6, + skipped=1, + failures=3, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='xunit/xUnit-netcoreapp3.1-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.xUnit.TestServiceTests', + test_name='FailTest', + result='failure', + message='Assert.True() Failure\r\nExpected: True\r\nActual: False', + content='Assert.True() Failure\r\nExpected: True\r\nActual: False at ' + 'SampleProject.xUnit.TestServiceTests.FailTest() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.xUnit' + '\\TestServiceTests.cs:line 65', + stdout=None, + stderr=None, + time=0.0011482 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/xUnit-netcoreapp3.1-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.xUnit.TestServiceTests', + test_name='TestTheory(expected: False)', + result='failure', + message='Assert.Equal() Failure\r\nExpected: False\r\nActual: True', + content='Assert.Equal() Failure\r\nExpected: False\r\nActual: True at ' + 'SampleProject.xUnit.TestServiceTests.TestTheory(Boolean expected) in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.xUnit' + '\\TestServiceTests.cs:line 29', + stdout=None, + stderr=None, + time=0.0024407 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/xUnit-netcoreapp3.1-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.xUnit.TestServiceTests', + test_name='SkipTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/xUnit-netcoreapp3.1-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.xUnit.TestServiceTests', + test_name='PassingTest', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0008798 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/xUnit-netcoreapp3.1-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.xUnit.TestServiceTests', + test_name='TestTheory(expected: True)', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0056919 + ), + publish.unittestresults.UnitTestCase( + result_file='xunit/xUnit-netcoreapp3.1-sample.trx', + test_file=None, + line=None, + class_name='SampleProject.xUnit.TestServiceTests', + test_name='TestThrowingException', + result='failure', + message='System.Exception : Pretty good exception', + content='System.Exception : Pretty good exception at ' + 'SampleProject.TestService.GetException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject\\TestService' + '.cs:line 19\r\n at ' + 'SampleProject.xUnit.TestServiceTests.TestThrowingException() in ' + 'C:\\Dev\\LiquidTestReports\\test\\SampleProject\\SampleProject.Tests.xUnit' + '\\TestServiceTests.cs:line 54', + stdout=None, + stderr=None, + time=0.000522 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/trx/xunit/xUnit-netcoreapp3.1-sample.trx b/python/test/files/trx/xunit/xUnit-netcoreapp3.1-sample.trx new file mode 100644 index 0000000..a2bb57b --- /dev/null +++ b/python/test/files/trx/xunit/xUnit-netcoreapp3.1-sample.trx @@ -0,0 +1,143 @@ + + + + + + + + + + Running SampleProject.Tests.xUnit tests +This test will fail + + Assert.True() Failure +Expected: True +Actual: False + at SampleProject.xUnit.TestServiceTests.FailTest() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs:line 65 + + + + + + Running SampleProject.Tests.xUnit tests + + Assert.Equal() Failure +Expected: False +Actual: True + at SampleProject.xUnit.TestServiceTests.TestTheory(Boolean expected) in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs:line 29 + + + + + + Skipped + + + + + Running SampleProject.Tests.xUnit tests + + + + + Running SampleProject.Tests.xUnit tests + + + + + Running SampleProject.Tests.xUnit tests + + System.Exception : Pretty good exception + at SampleProject.TestService.GetException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject\TestService.cs:line 19 + at SampleProject.xUnit.TestServiceTests.TestThrowingException() in C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs:line 54 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + [xUnit.net 00:00:00.00] xUnit.net VSTest Adapter v2.4.3+1b45f5407b (64-bit .NET Core 3.1.13) +[xUnit.net 00:00:00.38] Discovering: SampleProject.xUnit +[xUnit.net 00:00:00.44] Discovered: SampleProject.xUnit +[xUnit.net 00:00:00.44] Starting: SampleProject.xUnit +[xUnit.net 00:00:00.55] Assert.Equal() Failure +[xUnit.net 00:00:00.55] Expected: False +[xUnit.net 00:00:00.55] Actual: True +[xUnit.net 00:00:00.55] Stack Trace: +[xUnit.net 00:00:00.56] C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs(29,0): at SampleProject.xUnit.TestServiceTests.TestTheory(Boolean expected) +[xUnit.net 00:00:00.56] Output: +[xUnit.net 00:00:00.56] Running SampleProject.Tests.xUnit tests +[xUnit.net 00:00:00.56] Skipped +[xUnit.net 00:00:00.56] System.Exception : Pretty good exception +[xUnit.net 00:00:00.56] Stack Trace: +[xUnit.net 00:00:00.56] C:\Dev\LiquidTestReports\test\SampleProject\SampleProject\TestService.cs(19,0): at SampleProject.TestService.GetException() +[xUnit.net 00:00:00.56] C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs(54,0): at SampleProject.xUnit.TestServiceTests.TestThrowingException() +[xUnit.net 00:00:00.56] Output: +[xUnit.net 00:00:00.56] Running SampleProject.Tests.xUnit tests +[xUnit.net 00:00:00.56] Assert.True() Failure +[xUnit.net 00:00:00.56] Expected: True +[xUnit.net 00:00:00.56] Actual: False +[xUnit.net 00:00:00.56] Stack Trace: +[xUnit.net 00:00:00.56] C:\Dev\LiquidTestReports\test\SampleProject\SampleProject.Tests.xUnit\TestServiceTests.cs(65,0): at SampleProject.xUnit.TestServiceTests.FailTest() +[xUnit.net 00:00:00.56] Output: +[xUnit.net 00:00:00.56] Running SampleProject.Tests.xUnit tests +[xUnit.net 00:00:00.56] This test will fail +[xUnit.net 00:00:00.56] Finished: SampleProject.xUnit +Test 'SampleProject.xUnit.TestServiceTests.SkipTest' was skipped in the test run. + + + + + [xUnit.net 00:00:00.55] SampleProject.xUnit.TestServiceTests.TestTheory(expected: False) [FAIL] + + + [xUnit.net 00:00:00.56] SampleProject.xUnit.TestServiceTests.SkipTest [SKIP] + + + [xUnit.net 00:00:00.56] SampleProject.xUnit.TestServiceTests.TestThrowingException [FAIL] + + + [xUnit.net 00:00:00.56] SampleProject.xUnit.TestServiceTests.FailTest [FAIL] + + + + \ No newline at end of file diff --git a/python/test/files/trx/yami_YAMILEX 2015-10-24 04_18_59.annotations b/python/test/files/trx/yami_YAMILEX 2015-10-24 04_18_59.annotations new file mode 100644 index 0000000..f707256 --- /dev/null +++ b/python/test/files/trx/yami_YAMILEX 2015-10-24 04_18_59.annotations @@ -0,0 +1,140 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '2 fail, 21 skipped, 2 pass in 26s', + 'summary': + '25 tests\u2002\u2003\u2003\u205f\u20042 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '26s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '\u205f\u20041 suites\u2003\u200321 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n\u205f\u2004' + '1 files\u2004\u2002\u2003\u2003\u205f\u20042 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0KoLcRECy9jCErciGAWqIx3d/GD2O3M28zONZjJ856JinEfIWQYI8oAzhI2HTEtIW1N+8' + 'Lgo1LJfGKBLQmRjZZgfi8TokMyNRmMNhfTXQZvLnqXKHIXlzXl1hUCwXMxP0t+nB5bCu/' + 'iAAAA\n', + 'annotations': [ + { + 'path': 'Prueba_Sistema.SIARAlgorithmTest', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'yami_YAMILEX 2015-10-24 04_18_59.trx\u2003[took 19s]', + 'title': 'EST_INTER_TEMPTest (Prueba_Sistema.SIARAlgorithmTest) failed', + 'raw_details': + 'El método de prueba ' + 'Prueba_Sistema.SIARAlgorithmTest.EST_INTER_TEMPTest produjo la ' + 'excepción: \nSystem.ArgumentOutOfRangeException: El índice estaba ' + 'fuera del intervalo. Debe ser un valor no negativo e inferior al ' + 'tamaño de la colección.\nNombre del parámetro: index en ' + 'System.ThrowHelper.ThrowArgumentOutOfRangeException()\n en ' + 'System.Collections.Generic.List`1.set_Item(Int32 index, T value)\n ' + ' en ' + 'TMC.Components.TemperatureControl.SIARAlgorithm.EST_INTER_TEMP(Doub' + 'le TAMP, Int32 TEXTERN) en ' + 'E:\\TMC\\TMC\\Components\\TemperatureControl\\SIARAlgorithms.cs:línea' + ' 363\n en Prueba_Sistema.SIARAlgorithmTest.EST_INTER_TEMPTest() ' + 'en E:\\TMC\\Prueba_Sistema\\SIARAlgorithmTest.cs:línea 199' + }, + { + 'path': 'Prueba_Sistema.SIARAlgorithmTest', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'yami_YAMILEX 2015-10-24 04_18_59.trx\u2003[took 2s]', + 'title': 'EST_LAMB_TEMPTest (Prueba_Sistema.SIARAlgorithmTest) failed', + 'raw_details': + 'El método de prueba ' + 'Prueba_Sistema.SIARAlgorithmTest.EST_LAMB_TEMPTest produjo la ' + 'excepción: \nSystem.ArgumentOutOfRangeException: El índice estaba ' + 'fuera del intervalo. Debe ser un valor no negativo e inferior al ' + 'tamaño de la colección.\nNombre del parámetro: index en ' + 'System.ThrowHelper.ThrowArgumentOutOfRangeException()\n en ' + 'System.Collections.Generic.List`1.get_Item(Int32 index)\n en ' + 'TMC.Components.TemperatureControl.SIARAlgorithm.EST_LAMB_TEMP(Doubl' + 'e TINTPP, Double TAMP, Int32 TEXTERN, Int32 CRPBuscar) en ' + 'E:\\TMC\\TMC\\Components\\TemperatureControl\\SIARAlgorithms.cs:línea' + ' 474\n en Prueba_Sistema.SIARAlgorithmTest.EST_LAMB_TEMPTest() ' + 'en E:\\TMC\\Prueba_Sistema\\SIARAlgorithmTest.cs:línea 217' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There are 21 skipped tests, see "Raw output" for the full list of ' + 'skipped tests.', + 'title': '21 skipped tests found', + 'raw_details': + 'Prueba_Sistema.SIARAlgorithmTest ‑ CompareTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ CostoTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ EST_SEG_TEMPTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ EspesorTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ FlujoCaloricoTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ GetAlphaExternoTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ GetComposicionTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ GetDiseñoTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ GetSpesorTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ LamdaTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ MaterialesTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ PerdidaTotalTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ PesoTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ ProjectObjectTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ ProjectTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ SIARAlgorithmConstructorTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ SiSParcialTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ SpesorTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ TDataSetTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ TempExternaTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ VolumenTest' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 25 tests, see "Raw output" for the full list of tests.', + 'title': '25 tests found', + 'raw_details': + 'Prueba_Sistema.SIARAlgorithmTest ‑ ClearListTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ CompareTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ CostoTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ EST_INTER_TEMPTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ EST_LAMB_TEMPTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ EST_SEG_TEMPTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ EspesorTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ FlujoCaloricoTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ GetAlphaExternoTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ GetComposicionTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ GetDiseñoTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ GetSpesorTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ LamdaTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ MaterialesTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ PerdidaTotalTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ PesoTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ ProjectObjectTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ ProjectTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ SIARAlgorithmConstructorTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ SiSParcialTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ SpesorTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ TDataSetTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ TempExternaTest\n' + 'Prueba_Sistema.SIARAlgorithmTest ‑ VolumenTest\n' + 'Prueba_Sistema.UnitTest1 ‑ TestMethod1' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/trx/yami_YAMILEX 2015-10-24 04_18_59.junit-xml b/python/test/files/trx/yami_YAMILEX 2015-10-24 04_18_59.junit-xml new file mode 100644 index 0000000..6ca34d1 --- /dev/null +++ b/python/test/files/trx/yami_YAMILEX 2015-10-24 04_18_59.junit-xml @@ -0,0 +1,88 @@ + + + + + + UTA007: la firma del método CompareTest definido en la clase Prueba_Sistema.SIARAlgorithmTest no es correcta. El método de prueba marcado con el atributo [TestMethod] debe ser no estático, público, no debe devolver ningún valor ni tomar ningún parámetro. Por ejemplo: public void Test.Class1.Test(). + + + + El método de prueba Prueba_Sistema.SIARAlgorithmTest.EST_INTER_TEMPTest produjo la excepción: +System.ArgumentOutOfRangeException: El índice estaba fuera del intervalo. Debe ser un valor no negativo e inferior al tamaño de la colección. +Nombre del parámetro: index en System.ThrowHelper.ThrowArgumentOutOfRangeException() + en System.Collections.Generic.List`1.set_Item(Int32 index, T value) + en TMC.Components.TemperatureControl.SIARAlgorithm.EST_INTER_TEMP(Double TAMP, Int32 TEXTERN) en E:\TMC\TMC\Components\TemperatureControl\SIARAlgorithms.cs:línea 363 + en Prueba_Sistema.SIARAlgorithmTest.EST_INTER_TEMPTest() en E:\TMC\Prueba_Sistema\SIARAlgorithmTest.cs:línea 199 + + + + El método de prueba Prueba_Sistema.SIARAlgorithmTest.EST_LAMB_TEMPTest produjo la excepción: +System.ArgumentOutOfRangeException: El índice estaba fuera del intervalo. Debe ser un valor no negativo e inferior al tamaño de la colección. +Nombre del parámetro: index en System.ThrowHelper.ThrowArgumentOutOfRangeException() + en System.Collections.Generic.List`1.get_Item(Int32 index) + en TMC.Components.TemperatureControl.SIARAlgorithm.EST_LAMB_TEMP(Double TINTPP, Double TAMP, Int32 TEXTERN, Int32 CRPBuscar) en E:\TMC\TMC\Components\TemperatureControl\SIARAlgorithms.cs:línea 474 + en Prueba_Sistema.SIARAlgorithmTest.EST_LAMB_TEMPTest() en E:\TMC\Prueba_Sistema\SIARAlgorithmTest.cs:línea 217 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/trx/yami_YAMILEX 2015-10-24 04_18_59.results b/python/test/files/trx/yami_YAMILEX 2015-10-24 04_18_59.results new file mode 100644 index 0000000..f7c764c --- /dev/null +++ b/python/test/files/trx/yami_YAMILEX 2015-10-24 04_18_59.results @@ -0,0 +1,387 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=25, + suite_skipped=21, + suite_failures=2, + suite_errors=0, + suite_time=26, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='MSTestSuite', + tests=25, + skipped=21, + failures=2, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.UnitTest1', + test_name='TestMethod1', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0099709 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='CompareTest', + result='skipped', + message='UTA007: la firma del método CompareTest definido en la clase ' + 'Prueba_Sistema.SIARAlgorithmTest no es correcta. El método de prueba ' + 'marcado con el atributo [TestMethod] debe ser no estático, público, ' + 'no debe devolver ningún valor ni tomar ningún parámetro. Por ' + 'ejemplo: public void Test.Class1.Test().', + content='UTA007: la firma del método CompareTest definido en la clase ' + 'Prueba_Sistema.SIARAlgorithmTest no es correcta. El método de prueba ' + 'marcado con el atributo [TestMethod] debe ser no estático, público, ' + 'no debe devolver ningún valor ni tomar ningún parámetro. Por ' + 'ejemplo: public void Test.Class1.Test().', + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='ClearListTest', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.4619928 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='EST_INTER_TEMPTest', + result='failure', + message='El método de prueba ' + 'Prueba_Sistema.SIARAlgorithmTest.EST_INTER_TEMPTest produjo la ' + 'excepción: \nSystem.ArgumentOutOfRangeException: El índice estaba ' + 'fuera del intervalo. Debe ser un valor no negativo e inferior al ' + 'tamaño de la colección.\nNombre del parámetro: index', + content='El método de prueba ' + 'Prueba_Sistema.SIARAlgorithmTest.EST_INTER_TEMPTest produjo la ' + 'excepción: \nSystem.ArgumentOutOfRangeException: El índice estaba ' + 'fuera del intervalo. Debe ser un valor no negativo e inferior al ' + 'tamaño de la colección.\nNombre del parámetro: index en ' + 'System.ThrowHelper.ThrowArgumentOutOfRangeException()\n en ' + 'System.Collections.Generic.List`1.set_Item(Int32 index, T value)\n ' + 'en ' + 'TMC.Components.TemperatureControl.SIARAlgorithm.EST_INTER_TEMP(Double' + ' TAMP, Int32 TEXTERN) en ' + 'E:\\TMC\\TMC\\Components\\TemperatureControl\\SIARAlgorithms.cs:línea' + ' 363\n en Prueba_Sistema.SIARAlgorithmTest.EST_INTER_TEMPTest() en ' + 'E:\\TMC\\Prueba_Sistema\\SIARAlgorithmTest.cs:línea 199\n', + stdout=None, + stderr=None, + time=19.039413 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='EST_LAMB_TEMPTest', + result='failure', + message='El método de prueba ' + 'Prueba_Sistema.SIARAlgorithmTest.EST_LAMB_TEMPTest produjo la ' + 'excepción: \nSystem.ArgumentOutOfRangeException: El índice estaba ' + 'fuera del intervalo. Debe ser un valor no negativo e inferior al ' + 'tamaño de la colección.\nNombre del parámetro: index', + content='El método de prueba ' + 'Prueba_Sistema.SIARAlgorithmTest.EST_LAMB_TEMPTest produjo la ' + 'excepción: \nSystem.ArgumentOutOfRangeException: El índice estaba ' + 'fuera del intervalo. Debe ser un valor no negativo e inferior al ' + 'tamaño de la colección.\nNombre del parámetro: index en ' + 'System.ThrowHelper.ThrowArgumentOutOfRangeException()\n en ' + 'System.Collections.Generic.List`1.get_Item(Int32 index)\n en ' + 'TMC.Components.TemperatureControl.SIARAlgorithm.EST_LAMB_TEMP(Double ' + 'TINTPP, Double TAMP, Int32 TEXTERN, Int32 CRPBuscar) en ' + 'E:\\TMC\\TMC\\Components\\TemperatureControl\\SIARAlgorithms.cs:línea' + ' 474\n en Prueba_Sistema.SIARAlgorithmTest.EST_LAMB_TEMPTest() en ' + 'E:\\TMC\\Prueba_Sistema\\SIARAlgorithmTest.cs:línea 217\n', + stdout=None, + stderr=None, + time=2.5091335 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='EST_SEG_TEMPTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.6492659 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='GetAlphaExternoTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='GetComposicionTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='GetDiseñoTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='GetSpesorTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='LamdaTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='SiSParcialTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='CostoTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='EspesorTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='FlujoCaloricoTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='MaterialesTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='PerdidaTotalTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='PesoTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='ProjectTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='ProjectObjectTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='SpesorTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='TDataSetTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='TempExternaTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='VolumenTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ), + publish.unittestresults.UnitTestCase( + result_file='yami_YAMILEX 2015-10-24 04_18_59.trx', + test_file=None, + line=None, + class_name='Prueba_Sistema.SIARAlgorithmTest', + test_name='SIARAlgorithmConstructorTest', + result='skipped', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.0 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/trx/yami_YAMILEX 2015-10-24 04_18_59.trx b/python/test/files/trx/yami_YAMILEX 2015-10-24 04_18_59.trx new file mode 100755 index 0000000..7967f27 --- /dev/null +++ b/python/test/files/trx/yami_YAMILEX 2015-10-24 04_18_59.trx @@ -0,0 +1,249 @@ + + + + Esta es la configuración de pruebas predeterminada para una ejecución de pruebas local. + + + + + + + + + + + + + + + + + + El proceso del host de pruebas terminó inesperadamente. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + UTA007: la firma del método CompareTest definido en la clase Prueba_Sistema.SIARAlgorithmTest no es correcta. El método de prueba marcado con el atributo [TestMethod] debe ser no estático, público, no debe devolver ningún valor ni tomar ningún parámetro. Por ejemplo: public void Test.Class1.Test(). + + + + + + + + + El método de prueba Prueba_Sistema.SIARAlgorithmTest.EST_INTER_TEMPTest produjo la excepción: +System.ArgumentOutOfRangeException: El índice estaba fuera del intervalo. Debe ser un valor no negativo e inferior al tamaño de la colección. +Nombre del parámetro: index + en System.ThrowHelper.ThrowArgumentOutOfRangeException() + en System.Collections.Generic.List`1.set_Item(Int32 index, T value) + en TMC.Components.TemperatureControl.SIARAlgorithm.EST_INTER_TEMP(Double TAMP, Int32 TEXTERN) en E:\TMC\TMC\Components\TemperatureControl\SIARAlgorithms.cs:línea 363 + en Prueba_Sistema.SIARAlgorithmTest.EST_INTER_TEMPTest() en E:\TMC\Prueba_Sistema\SIARAlgorithmTest.cs:línea 199 + + + + + + + + El método de prueba Prueba_Sistema.SIARAlgorithmTest.EST_LAMB_TEMPTest produjo la excepción: +System.ArgumentOutOfRangeException: El índice estaba fuera del intervalo. Debe ser un valor no negativo e inferior al tamaño de la colección. +Nombre del parámetro: index + en System.ThrowHelper.ThrowArgumentOutOfRangeException() + en System.Collections.Generic.List`1.get_Item(Int32 index) + en TMC.Components.TemperatureControl.SIARAlgorithm.EST_LAMB_TEMP(Double TINTPP, Double TAMP, Int32 TEXTERN, Int32 CRPBuscar) en E:\TMC\TMC\Components\TemperatureControl\SIARAlgorithms.cs:línea 474 + en Prueba_Sistema.SIARAlgorithmTest.EST_LAMB_TEMPTest() en E:\TMC\Prueba_Sistema\SIARAlgorithmTest.cs:línea 217 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/update_expectations.sh b/python/test/files/update_expectations.sh new file mode 100755 index 0000000..d9670b6 --- /dev/null +++ b/python/test/files/update_expectations.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +base=$(dirname "$0") + +python3 $base/../test_junit.py +python3 $base/../test_nunit.py +python3 $base/../test_xunit.py +python3 $base/../test_trx.py +python3 $base/../test_mocha.py +python3 $base/../test_dart.py diff --git a/python/test/files/xml/empty.exception b/python/test/files/xml/empty.exception new file mode 100644 index 0000000..f1984e0 --- /dev/null +++ b/python/test/files/xml/empty.exception @@ -0,0 +1 @@ +ParseError: file='files/xml/empty.xml', message='File is empty.', line=None, column=None, exception=Exception('File is empty.') \ No newline at end of file diff --git a/python/test/files/xml/empty.xml b/python/test/files/xml/empty.xml new file mode 100644 index 0000000..e69de29 diff --git a/python/test/files/xml/non-xml.exception b/python/test/files/xml/non-xml.exception new file mode 100644 index 0000000..8cb5579 --- /dev/null +++ b/python/test/files/xml/non-xml.exception @@ -0,0 +1 @@ +ParseError: file='files/xml/non-xml.xml', message="Start tag expected, '<' not found, line 1, column 1 (non-xml.xml, line 1)", line=None, column=None, exception=XMLSyntaxError("Start tag expected, '<' not found, line 1, column 1") \ No newline at end of file diff --git a/python/test/files/xml/non-xml.xml b/python/test/files/xml/non-xml.xml new file mode 100644 index 0000000..0372789 --- /dev/null +++ b/python/test/files/xml/non-xml.xml @@ -0,0 +1 @@ +this is not an xml file \ No newline at end of file diff --git a/python/test/files/xml/not-existing.exception b/python/test/files/xml/not-existing.exception new file mode 100644 index 0000000..b586ffe --- /dev/null +++ b/python/test/files/xml/not-existing.exception @@ -0,0 +1 @@ +ParseError: file='files/xml/not-existing.xml', message='File does not exist.', line=None, column=None, exception=FileNotFoundError('File does not exist.') \ No newline at end of file diff --git a/python/test/files/xunit/README.md b/python/test/files/xunit/README.md new file mode 100644 index 0000000..c83fd6a --- /dev/null +++ b/python/test/files/xunit/README.md @@ -0,0 +1 @@ +[mstest/fixie.xml](https://raw.githubusercontent.com/fixie/fixie/42b43dc6cc57476958eea8b507aa9d0d72cedae6/src/Fixie.Tests/Reports/XUnitXmlReport.xml) diff --git a/python/test/files/xunit/mstest/fixie.annotations b/python/test/files/xunit/mstest/fixie.annotations new file mode 100644 index 0000000..2e136db --- /dev/null +++ b/python/test/files/xunit/mstest/fixie.annotations @@ -0,0 +1,98 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '3 fail, 1 skipped, 1 pass in 8s', + 'summary': + '1 files\u2004\u20032 suites\u2004\u2003\u20028s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '5 tests\u20031 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '1 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '3 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n7 runs\u2006\u2003' + '3 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '1 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '3 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMQQqAIBBFr' + 'yKuW1QSRZcJsaKh0hh1Fd29Scxs998b5p18hm2yvGdVwbj14ALUBKNH6cBowo6QDu45Ne' + '8erFcqPkaxwvETs4SNhEhiQjRIpiSDXj+9Ns43JxJ/tcBZLHDeUmbfwRHExewi+XUDvcl' + 'Zbt0AAAA=\n', + 'annotations': [ + { + 'path': '[genericTestClass]', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/fixie.xml\u2003[took 1s]', + 'title': '1 out of 3 runs failed: ShouldBeString ([genericTestClass])', + 'raw_details': + 'Expected: System.String\nActual: System.Int32 at ' + '[genericTestClassForStackTrace].ShouldBeString[T](T ' + 'genericArgument) in [fileLocation]:line #' + }, + { + 'path': '[testClass]', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/fixie.xml\u2003[took 1s]', + 'title': 'Fail ([testClass]) failed', + 'raw_details': + "'Fail' failed! at [testClassForStackTrace].Fail() in " + "[fileLocation]:line #" + }, + { + 'path': '[testClass]', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/fixie.xml\u2003[took 1s]', + 'title': 'FailByAssertion ([testClass]) failed', + 'raw_details': + 'Expected: 2\nActual: 1 at ' + '[testClassForStackTrace].FailByAssertion() in [fileLocation]:line ' + '#' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': '[testClass] ‑ Skip' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 5 tests, see "Raw output" for the full list of tests.', + 'title': '5 tests found', + 'raw_details': + '[genericTestClass] ‑ ShouldBeString\n[testClass] ‑ Fail\n' + '[testClass] ‑ FailByAssertion\n[testClass] ‑ Pass\n[testClass] ‑ ' + 'Skip' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/xunit/mstest/fixie.junit-xml b/python/test/files/xunit/mstest/fixie.junit-xml new file mode 100644 index 0000000..dc08dec --- /dev/null +++ b/python/test/files/xunit/mstest/fixie.junit-xml @@ -0,0 +1,26 @@ + + + + + + + + Expected: System.String +Actual: System.Int32 at [genericTestClassForStackTrace].ShouldBeString[T](T genericArgument) in [fileLocation]:line # + + + + + 'Fail' failed! at [testClassForStackTrace].Fail() in [fileLocation]:line # + + + Expected: 2 +Actual: 1 at [testClassForStackTrace].FailByAssertion() in [fileLocation]:line # + + + + + + + + diff --git a/python/test/files/xunit/mstest/fixie.results b/python/test/files/xunit/mstest/fixie.results new file mode 100644 index 0000000..ceaad37 --- /dev/null +++ b/python/test/files/xunit/mstest/fixie.results @@ -0,0 +1,127 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=2, + suite_tests=7, + suite_skipped=1, + suite_failures=3, + suite_errors=0, + suite_time=8, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='[genericTestClass]', + tests=3, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ), + publish.unittestresults.UnitTestSuite( + name='[testClass]', + tests=4, + skipped=1, + failures=2, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='mstest/fixie.xml', + test_file=None, + line=None, + class_name='[genericTestClass]', + test_name='ShouldBeString', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.234 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/fixie.xml', + test_file=None, + line=None, + class_name='[genericTestClass]', + test_name='ShouldBeString', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.234 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/fixie.xml', + test_file=None, + line=None, + class_name='[genericTestClass]', + test_name='ShouldBeString', + result='failure', + message='Expected: System.String\nActual: System.Int32', + content='Expected: System.String\nActual: System.Int32 at ' + '[genericTestClassForStackTrace].ShouldBeString[T](T genericArgument) ' + 'in [fileLocation]:line #', + stdout=None, + stderr=None, + time=1.234 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/fixie.xml', + test_file=None, + line=None, + class_name='[testClass]', + test_name='Fail', + result='failure', + message="'Fail' failed!", + content="'Fail' failed! at [testClassForStackTrace].Fail() in " + "[fileLocation]:line #", + stdout=None, + stderr=None, + time=1.234 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/fixie.xml', + test_file=None, + line=None, + class_name='[testClass]', + test_name='FailByAssertion', + result='failure', + message='Expected: 2\nActual: 1', + content='Expected: 2\nActual: 1 at ' + '[testClassForStackTrace].FailByAssertion() in [fileLocation]:line #', + stdout=None, + stderr=None, + time=1.234 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/fixie.xml', + test_file=None, + line=None, + class_name='[testClass]', + test_name='Pass', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.234 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/fixie.xml', + test_file=None, + line=None, + class_name='[testClass]', + test_name='Skip', + result='skipped', + message='⚠ Skipped with attribute.', + content=None, + stdout=None, + stderr=None, + time=1.234 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/xunit/mstest/fixie.xml b/python/test/files/xunit/mstest/fixie.xml new file mode 100644 index 0000000..77715ea --- /dev/null +++ b/python/test/files/xunit/mstest/fixie.xml @@ -0,0 +1,35 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase1.annotations b/python/test/files/xunit/mstest/jenkinsci/testcase1.annotations new file mode 100644 index 0000000..d54efcc --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase1.annotations @@ -0,0 +1,66 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail, 1 skipped, 1 pass in 0s', + 'summary': + '3 tests\u2002\u2003\u20031 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20031 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBBEr' + '0KoLTR2XsYQhLiRj1mgMt5dQFDo5s1M3kUlKOHoQqaBUBfAf7AFZB6siThGjINP01zz6g' + 'Ln5VuKA86ukAxUVwhEi0WIwVRfiq3u5d+WuZFlbl3cag0+QknE7YzeD2gV0DndAAAA\n', + 'annotations': [ + { + 'path': 'MyProject.Tests.SampleFact', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/jenkinsci/testcase1.xml\u2003[took 0s]', + 'title': 'FailedTest (MyProject.Tests.SampleFact) failed', + 'raw_details': + 'Assert.True() Failure at MyProject.Tests.SampleFact.FailedTest() ' + 'in ' + 'c:\\Jenkins\\jobs\\my-project\\workspace\\MyProject\\MyProject.Tests\\Samp' + 'leFact.cs:line 16' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'MyProject.Tests.SampleFact ‑ SkippedTest' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 3 tests, see "Raw output" for the full list of tests.', + 'title': '3 tests found', + 'raw_details': + 'MyProject.Tests.SampleFact ‑ FailedTest\n' + 'MyProject.Tests.SampleFact ‑ SkippedTest\n' + 'MyProject.Tests.SampleFact ‑ SuccessfulTest' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase1.junit-xml b/python/test/files/xunit/mstest/jenkinsci/testcase1.junit-xml new file mode 100644 index 0000000..6a0d81a --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase1.junit-xml @@ -0,0 +1,14 @@ + + + + + + Assert.True() Failure at MyProject.Tests.SampleFact.FailedTest() in c:\Jenkins\jobs\my-project\workspace\MyProject\MyProject.Tests\SampleFact.cs:line 16 + + + + + + + + diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase1.results b/python/test/files/xunit/mstest/jenkinsci/testcase1.results new file mode 100644 index 0000000..8d2f248 --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase1.results @@ -0,0 +1,65 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=3, + suite_skipped=1, + suite_failures=1, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='Test collection for MyProject.Tests.SampleFact', + tests=3, + skipped=1, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='mstest/jenkinsci/testcase1.xml', + test_file=None, + line=None, + class_name='MyProject.Tests.SampleFact', + test_name='FailedTest', + result='failure', + message='Assert.True() Failure', + content='Assert.True() Failure at MyProject.Tests.SampleFact.FailedTest() ' + 'in ' + 'c:\\Jenkins\\jobs\\my-project\\workspace\\MyProject\\MyProject.Tests\\Sample' + 'Fact.cs:line 16', + stdout=None, + stderr=None, + time=0.014 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/jenkinsci/testcase1.xml', + test_file=None, + line=None, + class_name='MyProject.Tests.SampleFact', + test_name='SuccessfulTest', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.01 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/jenkinsci/testcase1.xml', + test_file=None, + line=None, + class_name='MyProject.Tests.SampleFact', + test_name='SkippedTest', + result='skipped', + message='On Purpose', + content=None, + stdout=None, + stderr=None, + time=0.0 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase1.xml b/python/test/files/xunit/mstest/jenkinsci/testcase1.xml new file mode 100644 index 0000000..cc9cab4 --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase1.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase2.annotations b/python/test/files/xunit/mstest/jenkinsci/testcase2.annotations new file mode 100644 index 0000000..2f3a6d0 --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase2.annotations @@ -0,0 +1,48 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': 'All 1 tests pass in 0s', + 'summary': + '1 files\u2004\u20031 suites\u2004\u2003\u20020s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 tests\u20031 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '0 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n3 runs\u2006\u2003' + '1 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '1 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '1 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBBEr' + '0KoLTR2XoYQhLiRj1mWynh3UZBgN29m8k5uwOrIFzYNjMcE1GBNKAmCzzhmzAN905tFTE' + 'r9ix2O/i2MBPsrNGLA2mDyj2+usdcVLrbGVda4d6ngHFCGmljcJL9uwvXP6N0AAAA=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There is 1 test, see "Raw output" for the name of the test.', + 'title': '1 test found', + 'raw_details': 'MyProject.Tests.SampleFact ‑ SuccessfulTestWithTrait' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase2.junit-xml b/python/test/files/xunit/mstest/jenkinsci/testcase2.junit-xml new file mode 100644 index 0000000..7f95608 --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase2.junit-xml @@ -0,0 +1,8 @@ + + + + + + + + diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase2.results b/python/test/files/xunit/mstest/jenkinsci/testcase2.results new file mode 100644 index 0000000..23e4e49 --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase2.results @@ -0,0 +1,36 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=3, + suite_skipped=1, + suite_failures=1, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='Test collection for MyProject.Tests.SampleFact', + tests=1, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='mstest/jenkinsci/testcase2.xml', + test_file=None, + line=None, + class_name='MyProject.Tests.SampleFact', + test_name='SuccessfulTestWithTrait', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=35.5236617 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase2.xml b/python/test/files/xunit/mstest/jenkinsci/testcase2.xml new file mode 100644 index 0000000..ef72019 --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase2.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase3.annotations b/python/test/files/xunit/mstest/jenkinsci/testcase3.annotations new file mode 100644 index 0000000..c8a0e31 --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase3.annotations @@ -0,0 +1,67 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail, 1 skipped, 1 pass in 4m 48s', + 'summary': + '3 tests\u2002\u2003\u20031 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '4m 48s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20031 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0KoLfw0xMsYgho3opgFKuPdXRAUuzezmzn5DHqyvGdNxbj14F4YPUoHZidshSBBJxeOXd' + '6D9Uql7yRWOH5ilqB/YkI0SKYmg37PvTDL3MNfLXIRi1y2lNk2cARpMbtIft14m53n3wA' + 'AAA==\n', + 'annotations': [ + { + 'path': 'MyProject.Tests.SampleFact', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/jenkinsci/testcase3.xml\u2003[took 14s]', + 'title': 'FailedTest (MyProject.Tests.SampleFact) failed', + 'raw_details': + 'Assert.True() Failure at MyProject.Tests.SampleFact.FailedTest() ' + 'in ' + 'c:\\Jenkins\\jobs\\my-project\\workspace\\MyProject\\MyProject.Tests\\Samp' + 'leFact.cs:line 16' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': + 'There is 1 skipped test, see "Raw output" for the name of the ' + 'skipped test.', + 'title': '1 skipped test found', + 'raw_details': 'MyProject.Tests.SampleFact ‑ SkippedTest' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 3 tests, see "Raw output" for the full list of tests.', + 'title': '3 tests found', + 'raw_details': + 'MyProject.Tests.SampleFact ‑ FailedTest\n' + 'MyProject.Tests.SampleFact ‑ SkippedTest\n' + 'MyProject.Tests.SampleFact ‑ SuccessfulTest' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase3.junit-xml b/python/test/files/xunit/mstest/jenkinsci/testcase3.junit-xml new file mode 100644 index 0000000..d4596ac --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase3.junit-xml @@ -0,0 +1,14 @@ + + + + + + Assert.True() Failure at MyProject.Tests.SampleFact.FailedTest() in c:\Jenkins\jobs\my-project\workspace\MyProject\MyProject.Tests\SampleFact.cs:line 16 + + + + + + + + diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase3.results b/python/test/files/xunit/mstest/jenkinsci/testcase3.results new file mode 100644 index 0000000..b09af88 --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase3.results @@ -0,0 +1,65 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=3, + suite_skipped=1, + suite_failures=1, + suite_errors=0, + suite_time=288, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='Test collection for MyProject.Tests.SampleFact', + tests=3, + skipped=1, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='mstest/jenkinsci/testcase3.xml', + test_file=None, + line=None, + class_name='MyProject.Tests.SampleFact', + test_name='FailedTest', + result='failure', + message='Assert.True() Failure', + content='Assert.True() Failure at MyProject.Tests.SampleFact.FailedTest() ' + 'in ' + 'c:\\Jenkins\\jobs\\my-project\\workspace\\MyProject\\MyProject.Tests\\Sample' + 'Fact.cs:line 16', + stdout=None, + stderr=None, + time=14.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/jenkinsci/testcase3.xml', + test_file=None, + line=None, + class_name='MyProject.Tests.SampleFact', + test_name='SuccessfulTest', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=10.0 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/jenkinsci/testcase3.xml', + test_file=None, + line=None, + class_name='MyProject.Tests.SampleFact', + test_name='SkippedTest', + result='skipped', + message='On Purpose', + content=None, + stdout=None, + stderr=None, + time=0.0 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase3.xml b/python/test/files/xunit/mstest/jenkinsci/testcase3.xml new file mode 100644 index 0000000..21a9c53 --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase3.xml @@ -0,0 +1,18 @@ + + + + + + + + + + + + + + + + + + diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase4.annotations b/python/test/files/xunit/mstest/jenkinsci/testcase4.annotations new file mode 100644 index 0000000..e49dc25 --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase4.annotations @@ -0,0 +1,29 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'No tests found', + 'summary': + '0 tests\u2002\u2003\u20030 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMOw6AIBAFr' + '0K2ttDWyxiCEDfyMbtQGe8uQaNL92ZeMic49JZhVtOggAvmD9ZCOmOKFceK9cgs98LFmF' + '7seHTCafSdsESJXkMlspgy9/BfayxijWXLpBAwV3iX4k3DdQOuuvQ/3QAAAA==\n', + 'annotations': [] + } + } +] \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase4.junit-xml b/python/test/files/xunit/mstest/jenkinsci/testcase4.junit-xml new file mode 100644 index 0000000..80b072f --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase4.junit-xml @@ -0,0 +1,4 @@ + + + + diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase4.results b/python/test/files/xunit/mstest/jenkinsci/testcase4.results new file mode 100644 index 0000000..fe3178a --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase4.results @@ -0,0 +1,23 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=0, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='E:\\Jenkins2\\workspace\\KundM.Api.Csv\\KundM.Api.Csv-Build\\build\\' + 'bin\\Debug\\KundM.Api.Csv.UnitTest.DLL', + tests=0, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[] +) \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase4.xml b/python/test/files/xunit/mstest/jenkinsci/testcase4.xml new file mode 100644 index 0000000..c43c2bc --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase4.xml @@ -0,0 +1,8 @@ + + + + + + \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase5.annotations b/python/test/files/xunit/mstest/jenkinsci/testcase5.annotations new file mode 100644 index 0000000..6e4d68d --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase5.annotations @@ -0,0 +1,44 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'success', + 'output': { + 'title': 'All 5 tests pass in 1m 32s', + 'summary': + '5 tests\u2002\u2003\u20035 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '1m 32s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20030 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMwQqAIBAFf' + '0U8d6igQ/1MiBktpcaqp+jf20pLb2/mwRx8hk05PrCmYtwF8B9MAYUHawj7lpgef39d2q' + 'MLUpZihZ1E/YlZwFYIhWgxGgwm9e6Z517+aw9nsYfzlrRagyeIi7lF8PMCmAJ3I94AAAA' + '=\n', + 'annotations': [ + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 5 tests, see "Raw output" for the full list of tests.', + 'title': '5 tests found', + 'raw_details': + 'UnitTest.UnitTest ‑ CheckIfAllFilesAreThere\nUnitTest.UnitTest ‑ ' + 'CheckIfAppIsRunning\nUnitTest.UnitTest ‑ CheckIfFilesWereSinged\n' + 'UnitTest.UnitTest ‑ InstallApplication\nUnitTest.UnitTest ‑ ' + 'UninstallApplicationIfInstalled' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase5.junit-xml b/python/test/files/xunit/mstest/jenkinsci/testcase5.junit-xml new file mode 100644 index 0000000..d5f5b09 --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase5.junit-xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase5.results b/python/test/files/xunit/mstest/jenkinsci/testcase5.results new file mode 100644 index 0000000..fae84be --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase5.results @@ -0,0 +1,88 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=5, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=92, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='Test collection for UnitTest.UnitTest', + tests=5, + skipped=0, + failures=0, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='mstest/jenkinsci/testcase5.xml', + test_file=None, + line=None, + class_name='UnitTest.UnitTest', + test_name='UninstallApplicationIfInstalled', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=48.3914131 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/jenkinsci/testcase5.xml', + test_file=None, + line=None, + class_name='UnitTest.UnitTest', + test_name='InstallApplication', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=33.1446488 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/jenkinsci/testcase5.xml', + test_file=None, + line=None, + class_name='UnitTest.UnitTest', + test_name='CheckIfAllFilesAreThere', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.0399824 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/jenkinsci/testcase5.xml', + test_file=None, + line=None, + class_name='UnitTest.UnitTest', + test_name='CheckIfFilesWereSinged', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=4.7233194 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/jenkinsci/testcase5.xml', + test_file=None, + line=None, + class_name='UnitTest.UnitTest', + test_name='CheckIfAppIsRunning', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=3.0279026 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase5.xml b/python/test/files/xunit/mstest/jenkinsci/testcase5.xml new file mode 100644 index 0000000..3ab35ec --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase5.xml @@ -0,0 +1,44 @@ + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase6.annotations b/python/test/files/xunit/mstest/jenkinsci/testcase6.annotations new file mode 100644 index 0000000..d706dea --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase6.annotations @@ -0,0 +1,50 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail in 0s', + 'summary': + '1 tests\u2002\u2003\u20030 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003\u2003' + '0s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '1 suites\u2003\u20030 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\n1 ' + 'files\u2004\u2002\u2003\u20031 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/1WMSw6AIAxEr' + '0JYu9CtlzEEITbyMS2sjHcXERR28zrTd3INRhGf2TQwThHCB2tEEcC7hGPCVIRa5bxQlL' + 'ItF9rh6A5agOleFKLHMsHoqu+Jre7l35a5kWVuXdJbCyFBSYw2wa8bniF3vN0AAAA=\n', + 'annotations': [ + { + 'path': '/', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/jenkinsci/testcase6.xml\u2003[took 36s]', + 'title': 'Unknown test failed', + 'raw_details': + 'OK: TestProcess.Execution.ExitCode == 0\r\nFAIL: Output differs ' + 'from reference.\r\n Left: data\\ref\r\n Right: results\\out' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There is 1 test, see "Raw output" for the name of the test.', + 'title': '1 test found', + 'raw_details': 'Unknown test' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase6.junit-xml b/python/test/files/xunit/mstest/jenkinsci/testcase6.junit-xml new file mode 100644 index 0000000..a16cb19 --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase6.junit-xml @@ -0,0 +1,15 @@ + + + + + + OK: TestProcess.Execution.ExitCode == 0 +FAIL: Output differs from reference. + Left: data\ref + Right: results\out + + + + + + diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase6.results b/python/test/files/xunit/mstest/jenkinsci/testcase6.results new file mode 100644 index 0000000..8a5c78e --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase6.results @@ -0,0 +1,40 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=1, + suite_skipped=0, + suite_failures=1, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='collection1', + tests=1, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='mstest/jenkinsci/testcase6.xml', + test_file=None, + line=None, + class_name='', + test_name='', + result='failure', + message='OK: TestProcess.Execution.ExitCode == 0\r\nFAIL: Output differs from ' + 'reference.\r\n Left: data\\ref\r\n Right: results\\out\r\n \r\n ' + ' ', + content='OK: TestProcess.Execution.ExitCode == 0\r\nFAIL: Output differs from ' + 'reference.\r\n Left: data\\ref\r\n Right: results\\out\r\n \r\n ' + ' ', + stdout=None, + stderr=None, + time=36.7820544 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/xunit/mstest/jenkinsci/testcase6.xml b/python/test/files/xunit/mstest/jenkinsci/testcase6.xml new file mode 100644 index 0000000..30f598a --- /dev/null +++ b/python/test/files/xunit/mstest/jenkinsci/testcase6.xml @@ -0,0 +1,17 @@ + + + + + + + OK: TestProcess.Execution.ExitCode == 0 +FAIL: Output differs from reference. + Left: data\ref + Right: results\out + + + + + + + \ No newline at end of file diff --git a/python/test/files/xunit/mstest/pickles.annotations b/python/test/files/xunit/mstest/pickles.annotations new file mode 100644 index 0000000..d2f61b8 --- /dev/null +++ b/python/test/files/xunit/mstest/pickles.annotations @@ -0,0 +1,89 @@ +[ + { + 'name': 'Test Results', + 'head_sha': 'commit sha', + 'status': 'completed', + 'conclusion': 'failure', + 'output': { + 'title': '1 fail, 2 pass in 0s', + 'summary': + '1 files\u2004\u20031 suites\u2004\u2003\u20020s ' + '[:stopwatch:](https://github.com/step-security/publish-unit-test-resu' + 'lt-action/blob/VERSION/README.md#the-symbols "duration of all tests")\n' + '3 tests\u20032 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '1 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n4 runs\u2006\u2003' + '3 ' + '[:heavy_check_mark:](https://github.com/step-security/publish-unit-te' + 'st-result-action/blob/VERSION/README.md#the-symbols "passed tests")\u2003' + '0 ' + '[:zzz:](https://github.com/step-security/publish-unit-test-result-act' + 'ion/blob/VERSION/README.md#the-symbols "skipped / disabled tests")\u2003' + '1 ' + '[:x:](https://github.com/step-security/publish-unit-test-result-actio' + 'n/blob/VERSION/README.md#the-symbols "failed tests")\n\nResults for ' + 'commit commit s.\n\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MOw6AIBBEr' + '0KoLVSsvIwhqHEjH7NAZby7qwLSzZuZvJOvoBfPR9Y1jPsIocAcUQZwlrAlpCE8k8h58l' + 'EpKvq/2OGo39MqQSfbVyyIDtMFo318Q4pZJwr/tpcr2cu1SzljIBCkxPwm+XUDYSIL8t0' + 'AAAA=\n', + 'annotations': [ + { + 'path': 'Pickles.TestHarness.xUnit.AdditionFeature', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'warning', + 'message': 'mstest/pickles.xml\u2003[took 0s]', + 'title': + 'FailToAddTwoNumbers (Pickles.TestHarness.xUnit.AdditionFeature) ' + 'failed', + 'raw_details': + '\n System.InvalidOperationException : This is a fake ' + 'failure message\n \n at ' + 'Pickles.TestHarness.xUnit.Steps.ThenTheResultShouldBePass(Int32 ' + 'result) in ' + 'C:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarn' + 'ess.xUnit\\Steps.cs:line 26\n at lambda_method(Closure , ' + 'IContextManager , Int32 )\n at ' + 'TechTalk.SpecFlow.Bindings.MethodBinding.InvokeAction(IContextManag' + 'er contextManager, Object[] arguments, ITestTracer testTracer, ' + 'TimeSpan& duration)\n at ' + 'TechTalk.SpecFlow.Bindings.StepDefinitionBinding.Invoke(IContextMan' + 'ager contextManager, ITestTracer testTracer, Object[] arguments, ' + 'TimeSpan& duration)\n at ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStepMat' + 'ch(BindingMatch match, Object[] arguments)\n at ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStep(St' + 'epArgs stepArgs)\n at ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.OnAfterLastSte' + 'p()\n at ' + 'TechTalk.SpecFlow.TestRunner.CollectScenarioErrors()\n at ' + 'Pickles.TestHarness.xUnit.AdditionFeature.ScenarioCleanup() in ' + 'C:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarn' + 'ess.xUnit\\Addition.feature.cs:line 0\n at ' + 'Pickles.TestHarness.xUnit.AdditionFeature.FailToAddTwoNumbers() in ' + 'c:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarn' + 'ess.xUnit\\Addition.feature:line 18' + }, + { + 'path': '.github', + 'start_line': 0, + 'end_line': 0, + 'annotation_level': 'notice', + 'message': 'There are 3 tests, see "Raw output" for the full list of tests.', + 'title': '3 tests found', + 'raw_details': + 'Pickles.TestHarness.xUnit.AdditionFeature ‑ AddTwoNumbers\n' + 'Pickles.TestHarness.xUnit.AdditionFeature ‑ AddingSeveralNumbers\n' + 'Pickles.TestHarness.xUnit.AdditionFeature ‑ FailToAddTwoNumbers' + } + ] + } + } +] \ No newline at end of file diff --git a/python/test/files/xunit/mstest/pickles.junit-xml b/python/test/files/xunit/mstest/pickles.junit-xml new file mode 100644 index 0000000..28faba4 --- /dev/null +++ b/python/test/files/xunit/mstest/pickles.junit-xml @@ -0,0 +1,26 @@ + + + + + + + + + + System.InvalidOperationException : This is a fake failure message + + at Pickles.TestHarness.xUnit.Steps.ThenTheResultShouldBePass(Int32 result) in C:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.xUnit\Steps.cs:line 26 + at lambda_method(Closure , IContextManager , Int32 ) + at TechTalk.SpecFlow.Bindings.MethodBinding.InvokeAction(IContextManager contextManager, Object[] arguments, ITestTracer testTracer, TimeSpan& duration) + at TechTalk.SpecFlow.Bindings.StepDefinitionBinding.Invoke(IContextManager contextManager, ITestTracer testTracer, Object[] arguments, TimeSpan& duration) + at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStepMatch(BindingMatch match, Object[] arguments) + at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStep(StepArgs stepArgs) + at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.OnAfterLastStep() + at TechTalk.SpecFlow.TestRunner.CollectScenarioErrors() + at Pickles.TestHarness.xUnit.AdditionFeature.ScenarioCleanup() in C:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.xUnit\Addition.feature.cs:line 0 + at Pickles.TestHarness.xUnit.AdditionFeature.FailToAddTwoNumbers() in c:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.xUnit\Addition.feature:line 18 + + + + + diff --git a/python/test/files/xunit/mstest/pickles.results b/python/test/files/xunit/mstest/pickles.results new file mode 100644 index 0000000..b977bfd --- /dev/null +++ b/python/test/files/xunit/mstest/pickles.results @@ -0,0 +1,101 @@ +publish.unittestresults.ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=4, + suite_skipped=0, + suite_failures=1, + suite_errors=0, + suite_time=0, + suite_details=[ + publish.unittestresults.UnitTestSuite( + name='Pickles.TestHarness.xUnit.AdditionFeature', + tests=4, + skipped=0, + failures=1, + errors=0, + stdout=None, + stderr=None + ) + ], + cases=[ + publish.unittestresults.UnitTestCase( + result_file='mstest/pickles.xml', + test_file=None, + line=None, + class_name='Pickles.TestHarness.xUnit.AdditionFeature', + test_name='AddTwoNumbers', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.153 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/pickles.xml', + test_file=None, + line=None, + class_name='Pickles.TestHarness.xUnit.AdditionFeature', + test_name='AddingSeveralNumbers', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.006 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/pickles.xml', + test_file=None, + line=None, + class_name='Pickles.TestHarness.xUnit.AdditionFeature', + test_name='AddingSeveralNumbers', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.003 + ), + publish.unittestresults.UnitTestCase( + result_file='mstest/pickles.xml', + test_file=None, + line=None, + class_name='Pickles.TestHarness.xUnit.AdditionFeature', + test_name='FailToAddTwoNumbers', + result='failure', + message='\n System.InvalidOperationException : This is a fake ' + 'failure message\n ', + content='\n System.InvalidOperationException : This is a fake ' + 'failure message\n \n at ' + 'Pickles.TestHarness.xUnit.Steps.ThenTheResultShouldBePass(Int32 ' + 'result) in ' + 'C:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarnes' + 's.xUnit\\Steps.cs:line 26\n at lambda_method(Closure , ' + 'IContextManager , Int32 )\n at ' + 'TechTalk.SpecFlow.Bindings.MethodBinding.InvokeAction(IContextManager' + ' contextManager, Object[] arguments, ITestTracer testTracer, ' + 'TimeSpan& duration)\n at ' + 'TechTalk.SpecFlow.Bindings.StepDefinitionBinding.Invoke(IContextManag' + 'er contextManager, ITestTracer testTracer, Object[] arguments, ' + 'TimeSpan& duration)\n at ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStepMatch' + '(BindingMatch match, Object[] arguments)\n at ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStep(Step' + 'Args stepArgs)\n at ' + 'TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.OnAfterLastStep(' + ')\n at TechTalk.SpecFlow.TestRunner.CollectScenarioErrors()\n' + ' at ' + 'Pickles.TestHarness.xUnit.AdditionFeature.ScenarioCleanup() in ' + 'C:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarnes' + 's.xUnit\\Addition.feature.cs:line 0\n at ' + 'Pickles.TestHarness.xUnit.AdditionFeature.FailToAddTwoNumbers() in ' + 'c:\\dev\\pickles-results-harness\\Pickles.TestHarness\\Pickles.TestHarnes' + 's.xUnit\\Addition.feature:line 18\n ', + stdout=None, + stderr=None, + time=0.023 + ) + ] +) \ No newline at end of file diff --git a/python/test/files/xunit/mstest/pickles.xml b/python/test/files/xunit/mstest/pickles.xml new file mode 100644 index 0000000..a90f339 --- /dev/null +++ b/python/test/files/xunit/mstest/pickles.xml @@ -0,0 +1,92 @@ + + + + + + + + + + Given I have entered 50 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(50) (0.0s) + And I have entered 70 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(70) (0.0s) + When I press add + -> done: Steps.WhenIPressAdd() (0.0s) + Then the result should be 120 on the screen + -> done: Steps.ThenTheResultShouldBePass(120) (0.0s) + + + + + + + + + Given I have entered 40 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(40) (0.0s) + And I have entered 50 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(50) (0.0s) + When I press add + -> done: Steps.WhenIPressAdd() (0.0s) + Then the result should be 90 on the screen + -> done: Steps.ThenTheResultShouldBePass(90) (0.0s) + + + + + + + + + Given I have entered 60 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(60) (0.0s) + And I have entered 70 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(70) (0.0s) + When I press add + -> done: Steps.WhenIPressAdd() (0.0s) + Then the result should be 130 on the screen + -> done: Steps.ThenTheResultShouldBePass(130) (0.0s) + + + + + + + + + Given I have entered 50 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(50) (0.0s) + And I have entered -1 into the calculator + -> done: Steps.GivenIHaveEnteredSomethingIntoTheCalculator(-1) (0.0s) + When I press add + -> done: Steps.WhenIPressAdd() (0.0s) + Then the result should be -50 on the screen + -> error: This is a fake failure message + + + + System.InvalidOperationException : This is a fake failure message + + + at Pickles.TestHarness.xUnit.Steps.ThenTheResultShouldBePass(Int32 result) in C:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.xUnit\Steps.cs:line 26 + at lambda_method(Closure , IContextManager , Int32 ) + at TechTalk.SpecFlow.Bindings.MethodBinding.InvokeAction(IContextManager contextManager, Object[] arguments, ITestTracer testTracer, TimeSpan& duration) + at TechTalk.SpecFlow.Bindings.StepDefinitionBinding.Invoke(IContextManager contextManager, ITestTracer testTracer, Object[] arguments, TimeSpan& duration) + at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStepMatch(BindingMatch match, Object[] arguments) + at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.ExecuteStep(StepArgs stepArgs) + at TechTalk.SpecFlow.Infrastructure.TestExecutionEngine.OnAfterLastStep() + at TechTalk.SpecFlow.TestRunner.CollectScenarioErrors() + at Pickles.TestHarness.xUnit.AdditionFeature.ScenarioCleanup() in C:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.xUnit\Addition.feature.cs:line 0 + at Pickles.TestHarness.xUnit.AdditionFeature.FailToAddTwoNumbers() in c:\dev\pickles-results-harness\Pickles.TestHarness\Pickles.TestHarness.xUnit\Addition.feature:line 18 + + + + + diff --git a/python/test/requirements.txt b/python/test/requirements.txt new file mode 100644 index 0000000..a509062 --- /dev/null +++ b/python/test/requirements.txt @@ -0,0 +1,6 @@ +flask +mock +prettyprinter +pytest +pyyaml>=5.1 +requests diff --git a/python/test/test_action_script.py b/python/test/test_action_script.py new file mode 100644 index 0000000..ea0f6a9 --- /dev/null +++ b/python/test/test_action_script.py @@ -0,0 +1,1311 @@ +import io +import json +import os +import pathlib +import platform +import re +import sys +import tempfile +import unittest +from typing import Optional, Union, List, Type + +import mock +from packaging.version import Version + +from publish import __version__, pull_request_build_mode_merge, fail_on_mode_failures, fail_on_mode_errors, \ + fail_on_mode_nothing, comment_modes, comment_mode_always, report_suite_out_log, report_suite_err_log, \ + report_suite_logs, report_no_suite_logs, default_report_suite_logs, \ + default_annotations, all_tests_list, skipped_tests_list, none_annotations, \ + pull_request_build_modes, punctuation_space +from publish.github_action import GithubAction +from publish.unittestresults import UnitTestSuite, ParsedUnitTestResults, ParseError +from publish_test_results import action_fail_required, get_conclusion, get_commit_sha, get_var, \ + check_var, check_var_condition, deprecate_var, deprecate_val, log_parse_errors, \ + get_settings, get_annotations_config, Settings, get_files, is_float, parse_files, \ + main, prettify_glob_pattern +from test_utils import chdir + +test_files_path = pathlib.Path(__file__).resolve().parent / 'files' + +event = dict(pull_request=dict(head=dict(sha='event_sha'))) + + +class Test(unittest.TestCase): + details = [UnitTestSuite('suite', 7, 3, 2, 1, 'std-out', 'std-err')] + + def test_get_conclusion_success(self): + for fail_on_errors in [True, False]: + for fail_on_failures in [True, False]: + with self.subTest(fail_on_errors=fail_on_errors, fail_on_failures=fail_on_failures): + actual = get_conclusion(ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=4, + suite_skipped=1, + suite_failures=0, + suite_errors=0, + suite_time=10, + suite_details=self.details, + cases=[] + ), fail_on_errors=fail_on_errors, fail_on_failures=fail_on_failures) + self.assertEqual('success', actual) + + def test_get_conclusion_failures(self): + for fail_on_errors in [True, False]: + for fail_on_failures in [True, False]: + with self.subTest(fail_on_errors=fail_on_errors, fail_on_failures=fail_on_failures): + actual = get_conclusion(ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=4, + suite_skipped=1, + suite_failures=1, + suite_errors=0, + suite_time=10, + suite_details=self.details, + cases=[] + ), fail_on_errors=fail_on_errors, fail_on_failures=fail_on_failures) + self.assertEqual('failure' if fail_on_failures else 'success', actual) + + def test_get_conclusion_errors(self): + for fail_on_errors in [True, False]: + for fail_on_failures in [True, False]: + with self.subTest(fail_on_errors=fail_on_errors, fail_on_failures=fail_on_failures): + actual = get_conclusion(ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=4, + suite_skipped=1, + suite_failures=0, + suite_errors=1, + suite_time=10, + suite_details=self.details, + cases=[] + ), fail_on_errors=fail_on_errors, fail_on_failures=fail_on_failures) + self.assertEqual('failure' if fail_on_errors else 'success', actual) + + def test_get_conclusion_no_files(self): + for fail_on_errors in [True, False]: + for fail_on_failures in [True, False]: + with self.subTest(fail_on_errors=fail_on_errors, fail_on_failures=fail_on_failures): + actual = get_conclusion(ParsedUnitTestResults( + files=0, + errors=[], + suites=0, + suite_tests=0, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=0, + suite_details=self.details, + cases=[] + ), fail_on_errors=fail_on_errors, fail_on_failures=fail_on_failures) + self.assertEqual('neutral', actual) + + def test_get_conclusion_parse_errors(self): + for fail_on_errors in [True, False]: + for fail_on_failures in [True, False]: + with self.subTest(fail_on_errors=fail_on_errors, fail_on_failures=fail_on_failures): + actual = get_conclusion(ParsedUnitTestResults( + files=2, + errors=[ParseError(file='file', message='error', exception=ValueError("Invalid value"))], + suites=1, + suite_tests=4, + suite_skipped=1, + suite_failures=0, + suite_errors=0, + suite_time=10, + suite_details=self.details, + cases=[] + ), fail_on_errors=fail_on_errors, fail_on_failures=fail_on_failures) + self.assertEqual('failure' if fail_on_errors else 'success', actual) + + def test_env_sha_events(self): + options = dict(GITHUB_SHA='env_sha') + for event_name in ['push', 'workflow_dispatch', 'repository_dispatch', + 'release', 'schedule', 'future_event']: + actual = get_commit_sha(event, event_name, options) + self.assertEqual('env_sha', actual) + + def test_event_sha(self): + options = dict(GITHUB_SHA='env_sha') + for event_name in ['pull_request', 'pull_request_target', + 'pull_request_review', 'pull_request_review_comment', + 'pull_request_future_event']: + actual = get_commit_sha(event, event_name, options) + self.assertEqual('event_sha', actual) + + def test_get_var(self): + self.assertIsNone(get_var('NAME', dict())) + self.assertIsNone(get_var('NAME', dict(name='case sensitive'))) + self.assertEqual(get_var('NAME', dict(NAME='value')), 'value') + self.assertEqual(get_var('NAME', dict(INPUT_NAME='precedence', NAME='value')), 'precedence') + self.assertIsNone(get_var('NAME', dict(NAME=''))) + + @classmethod + def get_settings_no_default_files(cls, + files_glob=None, + junit_files_glob=None, + nunit_files_glob=None, + xunit_files_glob=None, + trx_files_glob=None) -> Settings: + return cls.get_settings(files_glob=files_glob, + junit_files_glob=junit_files_glob, + nunit_files_glob=nunit_files_glob, + xunit_files_glob=xunit_files_glob, + trx_files_glob=trx_files_glob) + + @staticmethod + def get_settings(token='token', + actor='actor', + api_url='http://github.api.url/', + graphql_url='http://github.graphql.url/', + retries=2, + event={}, + event_file=None, + event_name='event name', + is_fork=False, + repo='repo', + commit='commit', + fail_on_errors=True, + fail_on_failures=True, + action_fail=False, + action_fail_on_inconclusive=False, + files_glob='all-files', + junit_files_glob='junit-files', + nunit_files_glob='nunit-files', + xunit_files_glob='xunit-files', + trx_files_glob='trx-files', + time_factor=1.0, + test_file_prefix=None, + check_name='check name', + comment_title='title', + comment_mode=comment_mode_always, + job_summary=True, + compare_earlier=True, + test_changes_limit=10, + pull_request_build=pull_request_build_mode_merge, + report_individual_runs=True, + report_suite_out_logs=False, + report_suite_err_logs=False, + dedup_classes_by_file_name=True, + large_files=False, + ignore_runs=False, + check_run_annotation=default_annotations, + seconds_between_github_reads=1.5, + seconds_between_github_writes=2.5, + secondary_rate_limit_wait_seconds=6.0, + json_file=None, + json_thousands_separator=punctuation_space, + json_suite_details=False, + json_test_case_results=False, + search_pull_requests=False) -> Settings: + return Settings( + token=token, + actor=actor, + api_url=api_url, + graphql_url=graphql_url, + api_retries=retries, + event=event.copy(), + event_file=event_file, + event_name=event_name, + is_fork=is_fork, + repo=repo, + commit=commit, + json_file=json_file, + json_thousands_separator=json_thousands_separator, + json_suite_details=json_suite_details, + json_test_case_results=json_test_case_results, + fail_on_errors=fail_on_errors, + fail_on_failures=fail_on_failures, + action_fail=action_fail, + action_fail_on_inconclusive=action_fail_on_inconclusive, + files_glob=files_glob, + junit_files_glob=junit_files_glob, + nunit_files_glob=nunit_files_glob, + xunit_files_glob=xunit_files_glob, + trx_files_glob=trx_files_glob, + time_factor=time_factor, + test_file_prefix=test_file_prefix, + check_name=check_name, + comment_title=comment_title, + comment_mode=comment_mode, + job_summary=job_summary, + compare_earlier=compare_earlier, + pull_request_build=pull_request_build, + test_changes_limit=test_changes_limit, + report_individual_runs=report_individual_runs, + report_suite_out_logs=report_suite_out_logs, + report_suite_err_logs=report_suite_err_logs, + dedup_classes_by_file_name=dedup_classes_by_file_name, + large_files=large_files, + ignore_runs=ignore_runs, + check_run_annotation=check_run_annotation.copy(), + seconds_between_github_reads=seconds_between_github_reads, + seconds_between_github_writes=seconds_between_github_writes, + secondary_rate_limit_wait_seconds=secondary_rate_limit_wait_seconds, + search_pull_requests=search_pull_requests, + ) + + def test_get_settings(self): + options = self.do_test_get_settings() + options = {f'INPUT_{key}': value + for key, value in options.items() + if key not in {'GITHUB_API_URL', 'GITHUB_GRAPHQL_URL', 'GITHUB_SHA', 'GITHUB_EVENT_PATH'}} + self.do_test_get_settings(**options) + + def test_get_settings_event_file(self): + self.do_test_get_settings(expected=self.get_settings(event_file=None)) + self.do_test_get_settings(EVENT_FILE='', expected=self.get_settings(event_file=None)) + self.do_test_get_settings(EVENT_FILE=None, expected=self.get_settings(event_file=None)) + + with tempfile.TemporaryDirectory() as path: + event = {"key": "val"} + + filepath = os.path.join(path, 'event.json') + with open(filepath, 'wt', encoding='utf-8') as w: + w.write(json.dumps(event, ensure_ascii=False)) + + self.do_test_get_settings(EVENT_FILE=filepath, expected=self.get_settings(event=event, event_file=filepath)) + + def test_get_settings_github_token(self): + self.do_test_get_settings(GITHUB_TOKEN='token-one', expected=self.get_settings(token='token-one')) + self.do_test_get_settings(GITHUB_TOKEN='token-two', expected=self.get_settings(token='token-two')) + # see test_get_settings_missing_github_vars + + def test_get_settings_github_token_actor(self): + self.do_test_get_settings(GITHUB_TOKEN_ACTOR='other-actor', expected=self.get_settings(actor='other-actor')) + self.do_test_get_settings(GITHUB_TOKEN_ACTOR=None, expected=self.get_settings(actor='github-actions')) + + def test_get_settings_github_api_url(self): + self.do_test_get_settings(GITHUB_API_URL='https://api.github.onpremise.com', expected=self.get_settings(api_url='https://api.github.onpremise.com')) + self.do_test_get_settings(GITHUB_API_URL=None, expected=self.get_settings(api_url='https://api.github.com')) + + def test_get_settings_github_graphql_url(self): + self.do_test_get_settings(GITHUB_GRAPHQL_URL='https://api.github.onpremise.com/graphql', expected=self.get_settings(graphql_url='https://api.github.onpremise.com/graphql')) + self.do_test_get_settings(GITHUB_GRAPHQL_URL=None, expected=self.get_settings(graphql_url='https://api.github.com/graphql')) + + def test_get_settings_github_retries(self): + self.do_test_get_settings(GITHUB_RETRIES='0', expected=self.get_settings(retries=0)) + self.do_test_get_settings(GITHUB_RETRIES='1', expected=self.get_settings(retries=1)) + self.do_test_get_settings(GITHUB_RETRIES='123', expected=self.get_settings(retries=123)) + self.do_test_get_settings(GITHUB_RETRIES=None, expected=self.get_settings(retries=10)) + + for retries in ['-1', '12e', 'none']: + with self.subTest(retries=retries): + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(GITHUB_RETRIES=retries, expected=None) + self.assertIn(f'GITHUB_RETRIES must be a positive integer or 0: {retries}', re.exception.args) + + def test_get_settings_any_files(self): + for files in [None, 'file']: + for junit in [None, 'junit-file']: + for nunit in [None, 'nunit-file']: + for xunit in [None, 'xunit-file']: + for trx in [None, 'trx-file']: + with self.subTest(files=files, junit=junit, nunit=nunit, xunit=xunit, trx=trx): + any_flavour_set = any([flavour is not None for flavour in [files, junit, nunit, xunit, trx]]) + expected = self.get_settings(files_glob=files if any_flavour_set else '*.xml', + junit_files_glob=junit, + nunit_files_glob=nunit, + xunit_files_glob=xunit, + trx_files_glob=trx) + warnings = None if any_flavour_set else 'At least one of the FILES, JUNIT_FILES, NUNIT_FILES, ' \ + 'XUNIT_FILES, or TRX_FILES options has to be set! ' \ + 'Falling back to deprecated default "*.xml"' + + self.do_test_get_settings(FILES=files, JUNIT_FILES=junit, NUNIT_FILES=nunit, XUNIT_FILES=xunit, TRX_FILES=trx, + expected=expected, warning=warnings) + + def test_get_settings_files(self): + self.do_test_get_settings_no_default_files(FILES='file', expected=self.get_settings_no_default_files(files_glob='file')) + self.do_test_get_settings_no_default_files(FILES='file\nfile2', expected=self.get_settings_no_default_files(files_glob='file\nfile2')) + self.do_test_get_settings_no_default_files(FILES=None, expected=self.get_settings_no_default_files(files_glob='*.xml'), warning='At least one of the FILES, JUNIT_FILES, NUNIT_FILES, XUNIT_FILES, or TRX_FILES options has to be set! Falling back to deprecated default "*.xml"') + + def test_get_settings_junit_files(self): + self.do_test_get_settings_no_default_files(JUNIT_FILES='file', expected=self.get_settings_no_default_files(junit_files_glob='file')) + self.do_test_get_settings_no_default_files(JUNIT_FILES='file\nfile2', expected=self.get_settings_no_default_files(junit_files_glob='file\nfile2')) + self.do_test_get_settings_no_default_files(JUNIT_FILES=None, expected=self.get_settings_no_default_files(files_glob='*.xml'), warning='At least one of the FILES, JUNIT_FILES, NUNIT_FILES, XUNIT_FILES, or TRX_FILES options has to be set! Falling back to deprecated default "*.xml"') + + def test_get_settings_nunit_files(self): + self.do_test_get_settings_no_default_files(NUNIT_FILES='file', expected=self.get_settings_no_default_files(nunit_files_glob='file')) + self.do_test_get_settings_no_default_files(NUNIT_FILES='file\nfile2', expected=self.get_settings_no_default_files(nunit_files_glob='file\nfile2')) + self.do_test_get_settings_no_default_files(NUNIT_FILES=None, expected=self.get_settings_no_default_files(files_glob='*.xml'), warning='At least one of the FILES, JUNIT_FILES, NUNIT_FILES, XUNIT_FILES, or TRX_FILES options has to be set! Falling back to deprecated default "*.xml"') + + def test_get_settings_xunit_files(self): + self.do_test_get_settings_no_default_files(XUNIT_FILES='file', expected=self.get_settings_no_default_files(xunit_files_glob='file')) + self.do_test_get_settings_no_default_files(XUNIT_FILES='file\nfile2', expected=self.get_settings_no_default_files(xunit_files_glob='file\nfile2')) + self.do_test_get_settings_no_default_files(XUNIT_FILES=None, expected=self.get_settings_no_default_files(files_glob='*.xml'), warning='At least one of the FILES, JUNIT_FILES, NUNIT_FILES, XUNIT_FILES, or TRX_FILES options has to be set! Falling back to deprecated default "*.xml"') + + def test_get_settings_trx_files(self): + self.do_test_get_settings_no_default_files(TRX_FILES='file', expected=self.get_settings_no_default_files(trx_files_glob='file')) + self.do_test_get_settings_no_default_files(TRX_FILES='file\nfile2', expected=self.get_settings_no_default_files(trx_files_glob='file\nfile2')) + self.do_test_get_settings_no_default_files(TRX_FILES=None, expected=self.get_settings_no_default_files(files_glob='*.xml'), warning='At least one of the FILES, JUNIT_FILES, NUNIT_FILES, XUNIT_FILES, or TRX_FILES options has to be set! Falling back to deprecated default "*.xml"') + + def test_get_settings_time_unit(self): + self.do_test_get_settings(TIME_UNIT=None, expected=self.get_settings(time_factor=1.0)) + self.do_test_get_settings(TIME_UNIT='milliseconds', expected=self.get_settings(time_factor=0.001)) + self.do_test_get_settings(TIME_UNIT='seconds', expected=self.get_settings(time_factor=1.0)) + + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(TIME_UNIT='minutes', expected=None) + self.assertIn('TIME_UNIT minutes is not supported. It is optional, ' + 'but when given must be one of these values: seconds, milliseconds', re.exception.args) + + def test_get_settings_test_file_prefix(self): + self.do_test_get_settings(TEST_FILE_PREFIX=None, expected=self.get_settings(test_file_prefix=None)) + self.do_test_get_settings(TEST_FILE_PREFIX='', expected=self.get_settings(test_file_prefix=None)) + self.do_test_get_settings(TEST_FILE_PREFIX='+src/', expected=self.get_settings(test_file_prefix='+src/')) + self.do_test_get_settings(TEST_FILE_PREFIX='-./', expected=self.get_settings(test_file_prefix='-./')) + + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(TEST_FILE_PREFIX='path/', expected=None) + self.assertIn("TEST_FILE_PREFIX is optional, but when given, it must start with '-' or '+': path/", re.exception.args) + + def test_get_settings_commit(self): + event = {'pull_request': {'head': {'sha': 'sha2'}}} + self.do_test_get_settings(INPUT_COMMIT='sha', GITHUB_EVENT_NAME='pull_request', event=event, GITHUB_SHA='default', expected=self.get_settings(commit='sha', event=event, event_name='pull_request', is_fork=True)) + self.do_test_get_settings(COMMIT='sha', GITHUB_EVENT_NAME='pull_request', event=event, GITHUB_SHA='default', expected=self.get_settings(commit='sha', event=event, event_name='pull_request', is_fork=True)) + self.do_test_get_settings(COMMIT=None, GITHUB_EVENT_NAME='pull_request', event=event, GITHUB_SHA='default', expected=self.get_settings(commit='sha2', event=event, event_name='pull_request', is_fork=True)) + self.do_test_get_settings(COMMIT=None, INPUT_GITHUB_EVENT_NAME='pull_request', event=event, GITHUB_SHA='default', expected=self.get_settings(commit='sha2', event=event, event_name='pull_request', is_fork=True)) + self.do_test_get_settings(COMMIT=None, GITHUB_EVENT_NAME='push', event=event, GITHUB_SHA='default', expected=self.get_settings(commit='default', event=event, event_name='push', is_fork=False)) + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(COMMIT=None, GITHUB_EVENT_NAME='pull_request', event={}, GITHUB_SHA='default', expected=None) + self.assertIn('Commit SHA must be provided via action input or environment variable COMMIT, GITHUB_SHA or event file', re.exception.args) + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(COMMIT=None, GITHUB_EVENT_NAME='push', event=event, GITHUB_SHA=None, expected=None) + self.assertIn('Commit SHA must be provided via action input or environment variable COMMIT, GITHUB_SHA or event file', re.exception.args) + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(COMMIT=None, expected=None) + self.assertEqual('Commit SHA must be provided via action input or environment variable COMMIT, GITHUB_SHA or event file', str(re.exception)) + + def test_get_settings_fail_on(self): + self.do_test_get_settings(FAIL_ON=None, expected=self.get_settings(fail_on_errors=True, fail_on_failures=True)) + self.do_test_get_settings(FAIL_ON=fail_on_mode_failures, expected=self.get_settings(fail_on_errors=True, fail_on_failures=True)) + self.do_test_get_settings(FAIL_ON=fail_on_mode_errors, expected=self.get_settings(fail_on_errors=True, fail_on_failures=False)) + self.do_test_get_settings(FAIL_ON=fail_on_mode_nothing, expected=self.get_settings(fail_on_errors=False, fail_on_failures=False)) + + def test_get_settings_action_fail_on(self): + warning = 'Option action_fail has to be boolean, so either "true" or "false": foo' + self.do_test_get_settings(ACTION_FAIL='true', expected=self.get_settings(action_fail=True)) + self.do_test_get_settings(ACTION_FAIL='True', expected=self.get_settings(action_fail=True)) + self.do_test_get_settings(ACTION_FAIL='false', expected=self.get_settings(action_fail=False)) + self.do_test_get_settings(ACTION_FAIL='False', expected=self.get_settings(action_fail=False)) + self.do_test_get_settings(ACTION_FAIL='foo', expected=self.get_settings(action_fail=False), warning=warning, exception=RuntimeError) + self.do_test_get_settings(ACTION_FAIL=None, expected=self.get_settings(action_fail=False)) + + def test_get_settings_action_fail_on_inconclusive(self): + warning = 'Option action_fail_on_inconclusive has to be boolean, so either "true" or "false": foo' + self.do_test_get_settings(ACTION_FAIL_ON_INCONCLUSIVE='true', expected=self.get_settings(action_fail_on_inconclusive=True)) + self.do_test_get_settings(ACTION_FAIL_ON_INCONCLUSIVE='True', expected=self.get_settings(action_fail_on_inconclusive=True)) + self.do_test_get_settings(ACTION_FAIL_ON_INCONCLUSIVE='false', expected=self.get_settings(action_fail_on_inconclusive=False)) + self.do_test_get_settings(ACTION_FAIL_ON_INCONCLUSIVE='False', expected=self.get_settings(action_fail_on_inconclusive=False)) + self.do_test_get_settings(ACTION_FAIL_ON_INCONCLUSIVE='foo', expected=self.get_settings(action_fail_on_inconclusive=False), warning=warning, exception=RuntimeError) + self.do_test_get_settings(ACTION_FAIL_ON_INCONCLUSIVE=None, expected=self.get_settings(action_fail_on_inconclusive=False)) + + def test_get_settings_pull_request_build(self): + for mode in pull_request_build_modes: + with self.subTest(mode=mode): + self.do_test_get_settings(PULL_REQUEST_BUILD=mode, expected=self.get_settings(pull_request_build=mode)) + self.do_test_get_settings(PULL_REQUEST_BUILD=None, expected=self.get_settings(pull_request_build=pull_request_build_mode_merge)) + + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(PULL_REQUEST_BUILD='build') + self.assertEqual("Value 'build' is not supported for variable PULL_REQUEST_BUILD, expected: commit, merge", str(re.exception)) + + def test_get_settings_test_changes_limit(self): + self.do_test_get_settings(TEST_CHANGES_LIMIT='0', expected=self.get_settings(test_changes_limit=0)) + self.do_test_get_settings(TEST_CHANGES_LIMIT='1', expected=self.get_settings(test_changes_limit=1)) + self.do_test_get_settings(TEST_CHANGES_LIMIT=None, expected=self.get_settings(test_changes_limit=10)) + + for limit in ['-1', '1.0', '12e', 'string']: + with self.subTest(limit=limit): + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(TEST_CHANGES_LIMIT=limit, expected=self.get_settings(test_changes_limit=10)) + self.assertIn(f'TEST_CHANGES_LIMIT must be a positive integer or 0: {limit}', re.exception.args) + + def test_get_settings_check_name(self): + self.do_test_get_settings(CHECK_NAME='name', expected=self.get_settings(check_name='name')) + self.do_test_get_settings(CHECK_NAME=None, expected=self.get_settings(check_name='Test Results')) + + def test_get_settings_comment_title(self): + self.do_test_get_settings(COMMENT_TITLE=None, CHECK_NAME=None, expected=self.get_settings(comment_title='Test Results', check_name='Test Results')) + self.do_test_get_settings(COMMENT_TITLE='title', CHECK_NAME=None, expected=self.get_settings(comment_title='title', check_name='Test Results')) + self.do_test_get_settings(COMMENT_TITLE='title', CHECK_NAME='name', expected=self.get_settings(comment_title='title', check_name='name')) + self.do_test_get_settings(COMMENT_TITLE=None, CHECK_NAME='name', expected=self.get_settings(comment_title='name', check_name='name')) + + def test_get_settings_comment_mode(self): + for mode in comment_modes: + with self.subTest(mode=mode): + self.do_test_get_settings(COMMENT_MODE=mode, expected=self.get_settings(comment_mode=mode)) + self.do_test_get_settings(COMMENT_MODE=None, expected=self.get_settings(comment_mode=comment_mode_always)) + + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(COMMENT_MODE='mode') + self.assertEqual("Value 'mode' is not supported for variable COMMENT_MODE, expected: off, always, changes, changes in failures, changes in errors, failures, errors", str(re.exception)) + + def test_get_settings_compare_to_earlier_commit(self): + warning = 'Option compare_to_earlier_commit has to be boolean, so either "true" or "false": foo' + self.do_test_get_settings(COMPARE_TO_EARLIER_COMMIT='false', expected=self.get_settings(compare_earlier=False)) + self.do_test_get_settings(COMPARE_TO_EARLIER_COMMIT='False', expected=self.get_settings(compare_earlier=False)) + self.do_test_get_settings(COMPARE_TO_EARLIER_COMMIT='true', expected=self.get_settings(compare_earlier=True)) + self.do_test_get_settings(COMPARE_TO_EARLIER_COMMIT='True', expected=self.get_settings(compare_earlier=True)) + self.do_test_get_settings(COMPARE_TO_EARLIER_COMMIT='foo', expected=self.get_settings(compare_earlier=True), warning=warning, exception=RuntimeError) + self.do_test_get_settings(COMPARE_TO_EARLIER_COMMIT=None, expected=self.get_settings(compare_earlier=True)) + + def test_get_settings_job_summary(self): + warning = 'Option job_summary has to be boolean, so either "true" or "false": foo' + self.do_test_get_settings(JOB_SUMMARY='false', expected=self.get_settings(job_summary=False)) + self.do_test_get_settings(JOB_SUMMARY='False', expected=self.get_settings(job_summary=False)) + self.do_test_get_settings(JOB_SUMMARY='true', expected=self.get_settings(job_summary=True)) + self.do_test_get_settings(JOB_SUMMARY='True', expected=self.get_settings(job_summary=True)) + self.do_test_get_settings(JOB_SUMMARY='foo', expected=self.get_settings(job_summary=True), warning=warning, exception=RuntimeError) + self.do_test_get_settings(JOB_SUMMARY=None, expected=self.get_settings(job_summary=True)) + + def test_get_settings_report_individual_runs(self): + warning = 'Option report_individual_runs has to be boolean, so either "true" or "false": foo' + self.do_test_get_settings(REPORT_INDIVIDUAL_RUNS='false', expected=self.get_settings(report_individual_runs=False)) + self.do_test_get_settings(REPORT_INDIVIDUAL_RUNS='False', expected=self.get_settings(report_individual_runs=False)) + self.do_test_get_settings(REPORT_INDIVIDUAL_RUNS='true', expected=self.get_settings(report_individual_runs=True)) + self.do_test_get_settings(REPORT_INDIVIDUAL_RUNS='True', expected=self.get_settings(report_individual_runs=True)) + self.do_test_get_settings(REPORT_INDIVIDUAL_RUNS='foo', expected=self.get_settings(report_individual_runs=False), warning=warning, exception=RuntimeError) + self.do_test_get_settings(REPORT_INDIVIDUAL_RUNS=None, expected=self.get_settings(report_individual_runs=False)) + + def test_get_settings_report_suite_logs(self): + self.do_test_get_settings(REPORT_SUITE_LOGS=None, expected=self.get_settings(report_suite_out_logs=False, report_suite_err_logs=False)) + self.do_test_get_settings(REPORT_SUITE_LOGS=default_report_suite_logs, expected=self.get_settings(report_suite_out_logs=False, report_suite_err_logs=False)) + self.do_test_get_settings(REPORT_SUITE_LOGS=report_no_suite_logs, expected=self.get_settings(report_suite_out_logs=False, report_suite_err_logs=False)) + self.do_test_get_settings(REPORT_SUITE_LOGS=report_suite_out_log, expected=self.get_settings(report_suite_out_logs=True, report_suite_err_logs=False)) + self.do_test_get_settings(REPORT_SUITE_LOGS=report_suite_err_log, expected=self.get_settings(report_suite_out_logs=False, report_suite_err_logs=True)) + self.do_test_get_settings(REPORT_SUITE_LOGS=report_suite_logs, expected=self.get_settings(report_suite_out_logs=True, report_suite_err_logs=True)) + + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(REPORT_SUITE_LOGS='logs') + self.assertEqual("Value 'logs' is not supported for variable REPORT_SUITE_LOGS, expected: info, error, any, none", str(re.exception)) + + def test_get_settings_dedup_classes_by_file_name(self): + warning = 'Option deduplicate_classes_by_file_name has to be boolean, so either "true" or "false": foo' + self.do_test_get_settings(DEDUPLICATE_CLASSES_BY_FILE_NAME='false', expected=self.get_settings(dedup_classes_by_file_name=False)) + self.do_test_get_settings(DEDUPLICATE_CLASSES_BY_FILE_NAME='False', expected=self.get_settings(dedup_classes_by_file_name=False)) + self.do_test_get_settings(DEDUPLICATE_CLASSES_BY_FILE_NAME='true', expected=self.get_settings(dedup_classes_by_file_name=True)) + self.do_test_get_settings(DEDUPLICATE_CLASSES_BY_FILE_NAME='True', expected=self.get_settings(dedup_classes_by_file_name=True)) + self.do_test_get_settings(DEDUPLICATE_CLASSES_BY_FILE_NAME='foo', expected=self.get_settings(dedup_classes_by_file_name=False), warning=warning, exception=RuntimeError) + self.do_test_get_settings(DEDUPLICATE_CLASSES_BY_FILE_NAME=None, expected=self.get_settings(dedup_classes_by_file_name=False)) + + def test_get_settings_large_files(self): + warning = 'Option large_files has to be boolean, so either "true" or "false": foo' + self.do_test_get_settings(LARGE_FILES='false', expected=self.get_settings(large_files=False, ignore_runs=False)) + self.do_test_get_settings(LARGE_FILES='False', expected=self.get_settings(large_files=False, ignore_runs=False)) + self.do_test_get_settings(LARGE_FILES='true', expected=self.get_settings(large_files=True, ignore_runs=False)) + self.do_test_get_settings(LARGE_FILES='True', expected=self.get_settings(large_files=True, ignore_runs=False)) + self.do_test_get_settings(LARGE_FILES='foo', expected=self.get_settings(large_files=False, ignore_runs=False), warning=warning, exception=RuntimeError) + self.do_test_get_settings(LARGE_FILES=None, expected=self.get_settings(large_files=False, ignore_runs=False)) + + def test_get_settings_ignore_runs(self): + warning = 'Option ignore_runs has to be boolean, so either "true" or "false": foo' + self.do_test_get_settings(IGNORE_RUNS='false', expected=self.get_settings(ignore_runs=False, large_files=False)) + self.do_test_get_settings(IGNORE_RUNS='False', expected=self.get_settings(ignore_runs=False, large_files=False)) + self.do_test_get_settings(IGNORE_RUNS='true', expected=self.get_settings(ignore_runs=True, large_files=True)) + self.do_test_get_settings(IGNORE_RUNS='True', expected=self.get_settings(ignore_runs=True, large_files=True)) + self.do_test_get_settings(IGNORE_RUNS='true', LARGE_FILES='false', expected=self.get_settings(ignore_runs=True, large_files=False)) + self.do_test_get_settings(IGNORE_RUNS='foo', expected=self.get_settings(ignore_runs=False, large_files=False), warning=warning, exception=RuntimeError) + self.do_test_get_settings(IGNORE_RUNS=None, expected=self.get_settings(ignore_runs=False, large_files=False)) + + def test_get_settings_check_run_annotations(self): + self.do_test_get_settings(CHECK_RUN_ANNOTATIONS=None, expected=self.get_settings(check_run_annotation=default_annotations)) + self.do_test_get_settings(CHECK_RUN_ANNOTATIONS=','.join(default_annotations), expected=self.get_settings(check_run_annotation=default_annotations)) + self.do_test_get_settings(CHECK_RUN_ANNOTATIONS=all_tests_list, expected=self.get_settings(check_run_annotation=[all_tests_list])) + self.do_test_get_settings(CHECK_RUN_ANNOTATIONS=skipped_tests_list, expected=self.get_settings(check_run_annotation=[skipped_tests_list])) + self.do_test_get_settings(CHECK_RUN_ANNOTATIONS=none_annotations, expected=self.get_settings(check_run_annotation=[none_annotations])) + + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(CHECK_RUN_ANNOTATIONS=','.join([all_tests_list, skipped_tests_list, none_annotations]), expected=self.get_settings(check_run_annotation=[])) + self.assertEqual("CHECK_RUN_ANNOTATIONS 'none' cannot be combined with other annotations: all tests, skipped tests, none", str(re.exception)) + + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(CHECK_RUN_ANNOTATIONS='annotations') + self.assertEqual("Some values in 'annotations' are not supported for variable CHECK_RUN_ANNOTATIONS, allowed: all tests, skipped tests, none", str(re.exception)) + + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(CHECK_RUN_ANNOTATIONS=','.join([all_tests_list, skipped_tests_list, "more"])) + self.assertEqual("Some values in 'all tests, skipped tests, more' are not supported for variable CHECK_RUN_ANNOTATIONS, allowed: all tests, skipped tests, none", str(re.exception)) + + def test_get_settings_seconds_between_github_reads(self): + self.do_test_get_settings_seconds('SECONDS_BETWEEN_GITHUB_READS', 'seconds_between_github_reads', 1.0) + + def test_get_settings_seconds_between_github_writes(self): + self.do_test_get_settings_seconds('SECONDS_BETWEEN_GITHUB_WRITES', 'seconds_between_github_writes', 2.0) + + def test_get_settings_secondary_rate_limit_wait_seconds(self): + self.do_test_get_settings_seconds('SECONDARY_RATE_LIMIT_WAIT_SECONDS', 'secondary_rate_limit_wait_seconds', 60) + + def do_test_get_settings_seconds(self, env_var_name: str, settings_var_name: str, default: float): + self.do_test_get_settings(**{env_var_name: '0.001', 'expected': self.get_settings(**{settings_var_name: 0.001})}) + self.do_test_get_settings(**{env_var_name: '1', 'expected': self.get_settings(**{settings_var_name: 1.0})}) + self.do_test_get_settings(**{env_var_name: '1.0', 'expected': self.get_settings(**{settings_var_name: 1.0})}) + self.do_test_get_settings(**{env_var_name: '2.5', 'expected': self.get_settings(**{settings_var_name: 2.5})}) + self.do_test_get_settings(**{env_var_name: None, 'expected': self.get_settings(**{settings_var_name: default})}) + + for val in ['none', '12e']: + with self.subTest(reads=val): + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(**{env_var_name: val, 'expected': None}) + self.assertIn(f'{env_var_name} must be an integer or float number: {val}', re.exception.args) + + for val in ['0', '0.0', '-1']: + with self.subTest(reads=val): + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(**{env_var_name: val, 'expected': None}) + self.assertIn(f'{env_var_name} must be a positive number: {val}', re.exception.args) + + def test_get_settings_json_file(self): + for json_file in [None, 'file.json', '/path/file.json']: + with self.subTest(json_file=json_file): + self.do_test_get_settings(JSON_FILE=json_file, expected=self.get_settings(json_file=json_file)) + + def test_get_settings_json_thousands_separator(self): + self.do_test_get_settings(JSON_THOUSANDS_SEPARATOR=None, expected=self.get_settings(json_thousands_separator=punctuation_space)) + self.do_test_get_settings(JSON_THOUSANDS_SEPARATOR=',', expected=self.get_settings(json_thousands_separator=',')) + self.do_test_get_settings(JSON_THOUSANDS_SEPARATOR='.', expected=self.get_settings(json_thousands_separator='.')) + self.do_test_get_settings(JSON_THOUSANDS_SEPARATOR=' ', expected=self.get_settings(json_thousands_separator=' ')) + + def test_get_settings_json_test_case_results(self): + warning = 'Option json_test_case_results has to be boolean, so either "true" or "false": foo' + self.do_test_get_settings(JSON_TEST_CASE_RESULTS='false', expected=self.get_settings(json_test_case_results=False)) + self.do_test_get_settings(JSON_TEST_CASE_RESULTS='False', expected=self.get_settings(json_test_case_results=False)) + self.do_test_get_settings(JSON_TEST_CASE_RESULTS='true', expected=self.get_settings(json_test_case_results=True)) + self.do_test_get_settings(JSON_TEST_CASE_RESULTS='True', expected=self.get_settings(json_test_case_results=True)) + self.do_test_get_settings(JSON_TEST_CASE_RESULTS='foo', expected=self.get_settings(json_test_case_results=False), warning=warning, exception=RuntimeError) + self.do_test_get_settings(JSON_TEST_CASE_RESULTS=None, expected=self.get_settings(json_test_case_results=False)) + + def test_get_settings_json_suite_details(self): + warning = 'Option json_suite_details has to be boolean, so either "true" or "false": foo' + self.do_test_get_settings(JSON_SUITE_DETAILS='false', expected=self.get_settings(json_suite_details=False)) + self.do_test_get_settings(JSON_SUITE_DETAILS='False', expected=self.get_settings(json_suite_details=False)) + self.do_test_get_settings(JSON_SUITE_DETAILS='true', expected=self.get_settings(json_suite_details=True)) + self.do_test_get_settings(JSON_SUITE_DETAILS='True', expected=self.get_settings(json_suite_details=True)) + self.do_test_get_settings(JSON_SUITE_DETAILS='foo', expected=self.get_settings(json_suite_details=False), warning=warning, exception=RuntimeError) + self.do_test_get_settings(JSON_SUITE_DETAILS=None, expected=self.get_settings(json_suite_details=False)) + + def test_get_settings_search_pull_requests(self): + warning = 'Option search_pull_requests has to be boolean, so either "true" or "false": foo' + self.do_test_get_settings(SEARCH_PULL_REQUESTS='false', expected=self.get_settings(search_pull_requests=False)) + self.do_test_get_settings(SEARCH_PULL_REQUESTS='False', expected=self.get_settings(search_pull_requests=False)) + self.do_test_get_settings(SEARCH_PULL_REQUESTS='true', expected=self.get_settings(search_pull_requests=True)) + self.do_test_get_settings(SEARCH_PULL_REQUESTS='True', expected=self.get_settings(search_pull_requests=True)) + self.do_test_get_settings(SEARCH_PULL_REQUESTS='foo', expected=self.get_settings(search_pull_requests=False), warning=warning, exception=RuntimeError) + self.do_test_get_settings(SEARCH_PULL_REQUESTS=None, expected=self.get_settings(search_pull_requests=False)) + + def test_get_settings_missing_github_vars(self): + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(GITHUB_EVENT_PATH=None) + self.assertEqual('GitHub event file path must be provided via action input or environment variable GITHUB_EVENT_PATH', str(re.exception)) + + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(GITHUB_TOKEN=None) + self.assertEqual('GitHub token must be provided via action input or environment variable GITHUB_TOKEN', str(re.exception)) + + with self.assertRaises(RuntimeError) as re: + self.do_test_get_settings(GITHUB_REPOSITORY=None) + self.assertEqual('GitHub repository must be provided via action input or environment variable GITHUB_REPOSITORY', str(re.exception)) + + def test_get_settings_fork(self): + event = {"pull_request": {"head": {"repo": {"full_name": "fork/repo"}}}} + self.do_test_get_settings(event=event, + EVENT_NAME='pull_request', + GITHUB_REPOSITORY='repo', + expected=self.get_settings(is_fork=True, event=event, event_name='pull_request'), + warning=[]) + + def do_test_get_settings_no_default_files(self, + event: dict = {}, + gha: Optional[GithubAction] = None, + warning: Optional[Union[str, List[str]]] = None, + expected: Settings = get_settings.__func__(), + **kwargs): + options = dict(**kwargs) + if 'FILES' not in kwargs: + options[f'FILES'] = None + for flavour in ['JUNIT', 'NUNIT', 'XUNIT', 'TRX']: + if f'{flavour}_FILES' not in kwargs: + options[f'{flavour}_FILES'] = None + + self.do_test_get_settings(event, gha, warning=warning, expected=expected, **options) + + def do_test_get_settings(self, + event: Optional[dict] = None, + gha: Optional[GithubAction] = None, + warning: Optional[Union[str, List[str]]] = None, + exception: Optional[Type[Exception]] = None, + expected: Settings = get_settings.__func__(), + **kwargs): + if event is None: + event = {} + + with tempfile.TemporaryDirectory() as path: + # default options + options = dict( + GITHUB_EVENT_NAME='event name', + GITHUB_API_URL='http://github.api.url/', #defaults to github + GITHUB_GRAPHQL_URL='http://github.graphql.url/', #defaults to github + GITHUB_RETRIES='2', + TEST_CHANGES_LIMIT='10', # not an int + CHECK_NAME='check name', # defaults to 'Test Results' + GITHUB_TOKEN='token', + GITHUB_TOKEN_ACTOR='actor', + GITHUB_REPOSITORY='repo', + COMMIT='commit', # defaults to get_commit_sha(event, event_name) + FILES='all-files', + JUNIT_FILES='junit-files', + NUNIT_FILES='nunit-files', + XUNIT_FILES='xunit-files', + TRX_FILES='trx-files', + COMMENT_TITLE='title', # defaults to check name + COMMENT_MODE='always', + JOB_SUMMARY='true', + REPORT_INDIVIDUAL_RUNS='true', # false unless 'true' + DEDUPLICATE_CLASSES_BY_FILE_NAME='true', # false unless 'true' + # annotations config tested in test_get_annotations_config* + SECONDS_BETWEEN_GITHUB_READS='1.5', + SECONDS_BETWEEN_GITHUB_WRITES='2.5', + SECONDARY_RATE_LIMIT_WAIT_SECONDS='6.0', + ) + + # provide event via GITHUB_EVENT_PATH when there is no EVENT_FILE given + if 'EVENT_FILE' not in kwargs or not kwargs['EVENT_FILE']: + filepath = os.path.join(path, 'event.json') + with open(filepath, 'wt', encoding='utf-8') as w: + w.write(json.dumps(event, ensure_ascii=False)) + options.update(GITHUB_EVENT_PATH=filepath) + + # overwrite default options + options.update(**kwargs) + for arg in kwargs: + if arg.startswith('INPUT_'): + del options[arg[6:]] + + # Note: functionality of get_annotations_config is simplified here, + # its true behaviour is tested in get_annotations_config* + annotations_config = options.get('CHECK_RUN_ANNOTATIONS').split(',') \ + if options.get('CHECK_RUN_ANNOTATIONS') is not None else default_annotations + with mock.patch('publish_test_results.get_annotations_config', return_value=annotations_config) as m: + if gha is None: + gha = mock.MagicMock() + + if exception: + with self.assertRaises(exception) as e: + get_settings(options, gha) + self.assertEqual((warning, ), e.exception.args) + return None + + actual = get_settings(options, gha) + m.assert_called_once_with(options, expected.event) + if warning: + if isinstance(warning, list): + gha.warning.assert_has_calls([mock.call(w) for w in warning], any_order=False) + else: + gha.warning.assert_called_once_with(warning) + else: + gha.warning.assert_not_called() + + self.assertEqual(expected, actual) + + return options + + def test_get_annotations_config(self): + options = { + 'CHECK_RUN_ANNOTATIONS': 'one,two, three' + } + config = get_annotations_config(options, None) + self.assertEqual(['one', 'two', 'three'], config) + + def test_get_annotations_config_in_specific_branch(self): + options = { + 'CHECK_RUN_ANNOTATIONS': 'one,two, three', + 'CHECK_RUN_ANNOTATIONS_BRANCH': 'release, develop', + 'GITHUB_REF': 'refs/heads/release' + } + config = get_annotations_config(options, None) + self.assertEqual(['one', 'two', 'three'], config) + + def test_get_annotations_config_not_in_specific_branch(self): + options = { + 'CHECK_RUN_ANNOTATIONS': 'one,two, three', + 'CHECK_RUN_ANNOTATIONS_BRANCH': 'release, develop', + 'GITHUB_REF': 'refs/heads/branch' + } + config = get_annotations_config(options, None) + self.assertEqual([], config) + + def test_get_annotations_config_in_default_branch(self): + options = { + 'CHECK_RUN_ANNOTATIONS': 'one,two, three', + 'GITHUB_REF': 'refs/heads/develop' + } + event = {'repository': {'default_branch': 'develop'}} + config = get_annotations_config(options, event) + self.assertEqual(['one', 'two', 'three'], config) + + def test_get_annotations_config_not_in_default_branch(self): + options = { + 'CHECK_RUN_ANNOTATIONS': 'one,two, three', + 'GITHUB_REF': 'refs/heads/branch' + } + event = {'repository': {'default_branch': 'develop'}} + config = get_annotations_config(options, event) + self.assertEqual([], config) + + def test_get_annotations_config_in_standard_default_branch(self): + options = { + 'CHECK_RUN_ANNOTATIONS': 'one,two, three', + 'GITHUB_REF': 'refs/heads/main' + } + config = get_annotations_config(options, None) + self.assertEqual(['one', 'two', 'three'], config) + + def test_get_annotations_config_not_in_standard_default_branch(self): + options = { + 'CHECK_RUN_ANNOTATIONS': 'one,two, three', + 'GITHUB_REF': 'refs/heads/branch' + } + config = get_annotations_config(options, None) + self.assertEqual([], config) + + def test_get_annotations_config_in_all_branches(self): + options = { + 'CHECK_RUN_ANNOTATIONS': 'one,two, three', + 'CHECK_RUN_ANNOTATIONS_BRANCH': '*', + 'GITHUB_REF': 'refs/heads/release' + } + config = get_annotations_config(options, None) + self.assertEqual(['one', 'two', 'three'], config) + + def test_get_annotations_config_default(self): + config = get_annotations_config({}, None) + self.assertEqual(['all tests', 'skipped tests'], config) + + def test_get_files_single(self): + filenames = ['file1.txt', 'file2.txt', 'file3.bin'] + with tempfile.TemporaryDirectory() as path: + with chdir(path): + for filename in filenames: + with open(filename, mode='w'): + pass + + files, _ = get_files('file1.txt') + self.assertEqual(['file1.txt'], sorted(files)) + + def test_get_files_multi(self): + for sep in ['\n', '\r\n', '\n\r', '\n\n', '\r\n\r\n']: + with self.subTest(sep=sep): + filenames = ['file1.txt', 'file2.txt', 'file3.bin'] + with tempfile.TemporaryDirectory() as path: + with chdir(path): + for filename in filenames: + with open(filename, mode='w'): + pass + + files, _ = get_files(f'file1.txt{sep}file2.txt') + self.assertEqual(['file1.txt', 'file2.txt'], sorted(files)) + + def test_get_files_single_wildcard(self): + filenames = ['file1.txt', 'file2.txt', 'file3.bin'] + for wildcard in ['*.txt', 'file?.txt']: + with self.subTest(wildcard=wildcard): + with tempfile.TemporaryDirectory() as path: + with chdir(path): + for filename in filenames: + with open(filename, mode='w'): + pass + + files, _ = get_files(wildcard) + self.assertEqual(['file1.txt', 'file2.txt'], sorted(files)) + + def test_get_files_multi_wildcard(self): + for sep in ['\n', '\r\n', '\n\r', '\n\n', '\r\n\r\n']: + with self.subTest(sep=sep): + filenames = ['file1.txt', 'file2.txt', 'file3.bin'] + with tempfile.TemporaryDirectory() as path: + with chdir(path): + for filename in filenames: + with open(filename, mode='w'): + pass + + files, absolute = get_files(f'*1.txt{sep}*3.bin') + self.assertEqual(['file1.txt', 'file3.bin'], sorted(files)) + self.assertFalse(absolute) + + def test_get_files_subdir_and_wildcard(self): + filenames = [os.path.join('sub', 'file1.txt'), + os.path.join('sub', 'file2.txt'), + os.path.join('sub', 'file3.bin'), + os.path.join('sub2', 'file4.txt'), + 'file5.txt'] + with tempfile.TemporaryDirectory() as path: + with chdir(path): + os.mkdir('sub') + os.mkdir('sub2') + for filename in filenames: + with open(filename, mode='w'): + pass + + files, _ = get_files('sub/*.txt') + self.assertEqual([os.path.join('sub', 'file1.txt'), + os.path.join('sub', 'file2.txt')], sorted(files)) + + def test_get_files_recursive_wildcard(self): + for pattern, expected in [('**/*.txt', ['file6.txt', os.path.join('sub', 'file1.txt'), os.path.join('sub', 'file2.txt'), os.path.join('sub2', 'file4.txt'), os.path.join('sub2', 'sub3', 'sub4', 'file5.txt')]), + ('./**/*.txt', [os.path.join('.', 'file6.txt'), os.path.join('.', 'sub', 'file1.txt'), os.path.join('.', 'sub', 'file2.txt'), os.path.join('.', 'sub2', 'file4.txt'), os.path.join('.', 'sub2', 'sub3', 'sub4', 'file5.txt')]), + ('*/**/*.txt', [os.path.join('sub', 'file1.txt'), os.path.join('sub', 'file2.txt'), os.path.join('sub2', 'file4.txt'), os.path.join('sub2', 'sub3', 'sub4', 'file5.txt')])]: + with self.subTest(pattern=pattern): + filenames = [os.path.join('sub', 'file1.txt'), + os.path.join('sub', 'file2.txt'), + os.path.join('sub', 'file3.bin'), + os.path.join('sub2', 'file4.txt'), + os.path.join('sub2', 'sub3', 'sub4', 'file5.txt'), + 'file6.txt'] + with tempfile.TemporaryDirectory() as path: + with chdir(path): + os.mkdir('sub') + os.mkdir('sub2') + os.mkdir(os.path.join('sub2', 'sub3')) + os.mkdir(os.path.join('sub2', 'sub3', 'sub4')) + for filename in filenames: + with open(filename, mode='w'): + pass + + files, _ = get_files(pattern) + self.assertEqual(sorted(expected), sorted(files)) + + def test_get_files_symlinks(self): + for pattern, expected in [('**/*.txt', [os.path.join('sub1', 'file1.txt'), os.path.join('sub2', 'file2.txt'), os.path.join('sub1', 'sub2', 'file2.txt')]), + ('./**/*.txt', [os.path.join('.', 'sub1', 'file1.txt'), os.path.join('.', 'sub2', 'file2.txt'), os.path.join('.', 'sub1', 'sub2', 'file2.txt')]), + ('*/*.txt', [os.path.join('sub1', 'file1.txt'), os.path.join('sub2', 'file2.txt')])]: + with self.subTest(pattern=pattern): + with tempfile.TemporaryDirectory() as path: + filenames = [os.path.join('sub1', 'file1.txt'), + os.path.join('sub2', 'file2.txt')] + with chdir(path): + os.mkdir('sub1') + os.mkdir('sub2') + for filename in filenames: + with open(filename, mode='w'): + pass + os.symlink(os.path.join(path, 'sub2'), os.path.join(path, 'sub1', 'sub2'), target_is_directory=True) + + files, _ = get_files(pattern) + self.assertEqual(sorted(expected), sorted(files)) + + def test_get_files_character_range(self): + filenames = ['file1.txt', 'file2.txt', 'file3.bin'] + with tempfile.TemporaryDirectory() as path: + with chdir(path): + for filename in filenames: + with open(filename, mode='w'): + pass + + files, _ = get_files('file[0-2].*') + self.assertEqual(['file1.txt', 'file2.txt'], sorted(files)) + + def test_get_files_multi_match(self): + filenames = ['file1.txt', 'file2.txt', 'file3.bin'] + with tempfile.TemporaryDirectory() as path: + with chdir(path): + for filename in filenames: + with open(filename, mode='w'): + pass + + files, _ = get_files('*.txt\nfile*.txt\nfile2.*') + self.assertEqual(['file1.txt', 'file2.txt'], sorted(files)) + + def test_get_files_absolute_path_and_wildcard(self): + filenames = ['file1.txt', 'file2.txt', 'file3.bin'] + with tempfile.TemporaryDirectory() as path: + with chdir(path): + for filename in filenames: + with open(filename, mode='w'): + pass + + files, absolute = get_files(os.path.join(path, '*')) + self.assertEqual([os.path.join(path, file) for file in filenames], sorted(files)) + self.assertTrue(absolute) + + def test_get_files_exclude_only(self): + filenames = ['file1.txt', 'file2.txt', 'file3.bin'] + with tempfile.TemporaryDirectory() as path: + with chdir(path): + for filename in filenames: + with open(filename, mode='w'): + pass + + files, _ = get_files('!file*.txt') + self.assertEqual([], sorted(files)) + + def test_get_files_include_and_exclude(self): + filenames = ['file1.txt', 'file2.txt', 'file3.bin'] + with tempfile.TemporaryDirectory() as path: + with chdir(path): + for filename in filenames: + with open(filename, mode='w'): + pass + + files, _ = get_files('*.txt\n!file1.txt') + self.assertEqual(['file2.txt'], sorted(files)) + + def test_get_files_with_mock(self): + with mock.patch('publish_test_results.glob') as m: + files, _ = get_files('*.txt\n!file1.txt') + self.assertEqual([], files) + self.assertEqual([mock.call('*.txt', recursive=True), mock.call('file1.txt', recursive=True)], m.call_args_list) + + def test_prettify_glob_pattern(self): + self.assertEqual(None, prettify_glob_pattern(None)) + self.assertEqual('', prettify_glob_pattern('')) + self.assertEqual('*.xml', prettify_glob_pattern('*.xml')) + self.assertEqual('*.xml, *.trx', prettify_glob_pattern('*.xml\n*.trx')) + self.assertEqual('*.xml, *.trx', prettify_glob_pattern(' *.xml\n *.trx\n ')) + + def test_parse_files(self): + gha = mock.MagicMock() + settings = self.get_settings(files_glob='\n'.join([str(test_files_path / '**' / '*.xml'), str(test_files_path / '**' / '*.trx'), str(test_files_path / '**' / '*.json')]), + junit_files_glob=str(test_files_path / 'junit-xml' / '**' / '*.xml'), + nunit_files_glob=str(test_files_path / 'nunit' / '**' / '*.xml'), + xunit_files_glob=str(test_files_path / 'xunit' / '**' / '*.xml'), + trx_files_glob=str(test_files_path / 'trx' / '**' / '*.trx')) + with mock.patch('publish_test_results.logger') as l: + actual = parse_files(settings, gha) + + for call in l.info.call_args_list: + print(call.args[0]) + + self.assertEqual(17, len(l.info.call_args_list)) + self.assertTrue(any([call.args[0].startswith(f"Reading files {prettify_glob_pattern(settings.files_glob)} (76 files, ") for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].startswith(f'Reading JUnit XML files {prettify_glob_pattern(settings.junit_files_glob)} (28 files, ') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].startswith(f'Reading NUnit XML files {prettify_glob_pattern(settings.nunit_files_glob)} (24 files, ') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].startswith(f'Reading XUnit XML files {prettify_glob_pattern(settings.xunit_files_glob)} (8 files, ') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].startswith(f'Reading TRX files {prettify_glob_pattern(settings.trx_files_glob)} (9 files, ') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].startswith(f'Detected 27 JUnit XML files (') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].startswith(f'Detected 24 NUnit XML files (') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].startswith(f'Detected 8 XUnit XML files (') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].startswith(f'Detected 9 TRX files (') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].startswith(f'Detected 1 Dart JSON file (') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].startswith(f'Detected 1 Mocha JSON file (') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].startswith(f'Detected 4 unsupported files (') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].startswith(f'Unsupported file: ') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].endswith(f'python{os.sep}test{os.sep}files{os.sep}xml{os.sep}non-xml.xml') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].endswith(f'python{os.sep}test{os.sep}files{os.sep}junit-xml{os.sep}non-junit.xml') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].endswith(f'python{os.sep}test{os.sep}files{os.sep}json{os.sep}non-json.json') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].endswith(f'python{os.sep}test{os.sep}files{os.sep}json{os.sep}malformed-json.json') for call in l.info.call_args_list])) + self.assertTrue(any([call.args[0].startswith(f'Finished reading 145 files in ') for call in l.info.call_args_list])) + + for call in l.debug.call_args_list: + print(call.args[0]) + + self.assertEqual(11, len(l.debug.call_args_list)) + self.assertTrue(any([call.args[0].startswith('reading files [') for call in l.debug.call_args_list])) + self.assertTrue(any([call.args[0].startswith('reading JUnit XML files [') for call in l.debug.call_args_list])) + self.assertTrue(any([call.args[0].startswith('reading NUnit XML files [') for call in l.debug.call_args_list])) + self.assertTrue(any([call.args[0].startswith('reading XUnit XML files [') for call in l.debug.call_args_list])) + self.assertTrue(any([call.args[0].startswith('reading TRX files [') for call in l.debug.call_args_list])) + self.assertTrue(any([call.args[0].startswith('detected JUnit XML files [') for call in l.debug.call_args_list])) + self.assertTrue(any([call.args[0].startswith('detected NUnit XML files [') for call in l.debug.call_args_list])) + self.assertTrue(any([call.args[0].startswith('detected XUnit XML files [') for call in l.debug.call_args_list])) + self.assertTrue(any([call.args[0].startswith('detected TRX files [') for call in l.debug.call_args_list])) + self.assertTrue(any([call.args[0].startswith('detected Dart JSON files [') for call in l.debug.call_args_list])) + self.assertTrue(any([call.args[0].startswith('detected Mocha JSON files [') for call in l.debug.call_args_list])) + + self.assertEqual([], gha.method_calls) + + self.assertEqual(145, actual.files) + if Version(sys.version.split(' ')[0]) < Version('3.9.0') and sys.platform.startswith('darwin') and \ + (platform.mac_ver()[0].startswith("11.") or platform.mac_ver()[0].startswith("12.")): + # on macOS and below Python 3.9 we see one particular error + self.assertEqual(17, len(actual.errors)) + self.assertEqual(731, actual.suites) + self.assertEqual(4109, actual.suite_tests) + self.assertEqual(214, actual.suite_skipped) + self.assertEqual(450, actual.suite_failures) + self.assertEqual(21, actual.suite_errors) + self.assertEqual(7956, actual.suite_time) + self.assertEqual(0, len(actual.suite_details)) + self.assertEqual(4085, len(actual.cases)) + else: + self.assertEqual(13, len(actual.errors)) + self.assertEqual(735, actual.suites) + self.assertEqual(4117, actual.suite_tests) + self.assertEqual(214, actual.suite_skipped) + self.assertEqual(454, actual.suite_failures) + self.assertEqual(21, actual.suite_errors) + self.assertEqual(7957, actual.suite_time) + self.assertEqual(0, len(actual.suite_details)) + self.assertEqual(4093, len(actual.cases)) + self.assertEqual('commit', actual.commit) + + with io.StringIO() as string: + gha = GithubAction(file=string) + with mock.patch('publish.github_action.logger') as m: + log_parse_errors(actual.errors, gha) + expected = [ + # these occur twice, once from FILES and once from *_FILES options + "::error::lxml.etree.XMLSyntaxError: Premature end of data in tag skipped line 9, line 11, column 22", + "::error file=corrupt-xml.xml::Error processing result file: Premature end of data in tag skipped line 9, line 11, column 22 (corrupt-xml.xml, line 11)", + "::error::lxml.etree.XMLSyntaxError: Char 0x0 out of allowed range, line 33, column 16", + "::error file=NUnit-issue17521.xml::Error processing result file: Char 0x0 out of allowed range, line 33, column 16 (NUnit-issue17521.xml, line 33)", + "::error::lxml.etree.XMLSyntaxError: attributes construct error, line 5, column 109", + "::error file=NUnit-issue47367.xml::Error processing result file: attributes construct error, line 5, column 109 (NUnit-issue47367.xml, line 5)" + ] * 2 + [ + # these occur once, either from FILES and or from *_FILES options + "::error::Exception: File is empty.", + '::error::Exception: File is empty.', # once from xml, once from json + '::error::RuntimeError: Unsupported file format: malformed-json.json', + '::error::RuntimeError: Unsupported file format: non-json.json', + "::error file=empty.xml::Error processing result file: File is empty.", + "::error file=non-junit.xml::Error processing result file: Unsupported file format: non-junit.xml", + "::error file=non-junit.xml::Error processing result file: Invalid format.", + "::error file=non-xml.xml::Error processing result file: Unsupported file format: non-xml.xml", + "::error::junitparser.junitparser.JUnitXmlError: Invalid format.", + "::error::RuntimeError: Unsupported file format: non-junit.xml", + '::error::RuntimeError: Unsupported file format: non-xml.xml', + '::error file=empty.json::Error processing result file: File is empty.', + '::error file=malformed-json.json::Error processing result file: Unsupported file format: malformed-json.json', + '::error file=non-json.json::Error processing result file: Unsupported file format: non-json.json', + ] + if Version(sys.version.split(' ')[0]) < Version('3.9.0') and sys.platform.startswith('darwin') and \ + (platform.mac_ver()[0].startswith("11.") or platform.mac_ver()[0].startswith("12.")): + expected.extend([ + '::error::lxml.etree.XMLSyntaxError: Failure to process entity xxe, line 17, column 51', + '::error file=NUnit-sec1752-file.xml::Error processing result file: Failure to process entity xxe, line 17, column 51 (NUnit-sec1752-file.xml, line 17)', + '::error::lxml.etree.XMLSyntaxError: Failure to process entity xxe, line 17, column 51', + '::error file=NUnit-sec1752-https.xml::Error processing result file: Failure to process entity xxe, line 17, column 51 (NUnit-sec1752-https.xml, line 17)', + ] * 2) + self.assertEqual( + sorted(expected), + sorted([re.sub(r'file=.*[/\\]', 'file=', re.sub(r'[(]file:.*/', '(', re.sub(r'format: .*[/\\]', 'format: ', line))) + for line in string.getvalue().split(os.linesep) if line]) + ) + # self.assertEqual([], m.method_calls) + + def test_parse_files_with_suite_details(self): + for options in [ + {'report_suite_out_logs': True, 'report_suite_err_logs': False}, + {'report_suite_out_logs': False, 'report_suite_err_logs': True}, + {'report_suite_out_logs': True, 'report_suite_err_logs': True}, + {'json_suite_details': True} + ]: + with self.subTest(**options): + gha = mock.MagicMock() + settings = self.get_settings(junit_files_glob=str(test_files_path / 'junit-xml' / '**' / '*.xml'), + nunit_files_glob=str(test_files_path / 'nunit' / '**' / '*.xml'), + xunit_files_glob=str(test_files_path / 'xunit' / '**' / '*.xml'), + trx_files_glob=str(test_files_path / 'trx' / '**' / '*.trx'), + **options) + actual = parse_files(settings, gha) + + if Version(sys.version.split(' ')[0]) < Version('3.9.0') and sys.platform.startswith('darwin') and \ + (platform.mac_ver()[0].startswith("11.") or platform.mac_ver()[0].startswith("12.")): + # on macOS (below macOS 13) and Python below 3.9 we see one particular error + self.assertEqual(363, len(actual.suite_details)) + else: + self.assertEqual(365, len(actual.suite_details)) + + def test_parse_files_no_matches(self): + gha = mock.MagicMock() + with tempfile.TemporaryDirectory() as path: + missing_junit = str(pathlib.Path(path) / 'junit-not-there') + missing_nunit = str(pathlib.Path(path) / 'nunit-not-there') + missing_xunit = str(pathlib.Path(path) / 'xunit-not-there') + missing_trx = str(pathlib.Path(path) / 'trx-not-there') + settings = self.get_settings(junit_files_glob=missing_junit, + nunit_files_glob=missing_nunit, + xunit_files_glob=missing_xunit, + trx_files_glob=missing_trx) + actual = parse_files(settings, gha) + + gha.warning.assert_has_calls([ + mock.call(f'Could not find any JUnit XML files for {missing_junit}'), + mock.call(f'Your file pattern contains absolute paths, please read the notes on absolute paths:'), + mock.call(f'https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#running-with-absolute-paths'), + mock.call(f'Could not find any NUnit XML files for {missing_nunit}'), + mock.call(f'Your file pattern contains absolute paths, please read the notes on absolute paths:'), + mock.call(f'https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#running-with-absolute-paths'), + mock.call(f'Could not find any XUnit XML files for {missing_xunit}'), + mock.call(f'Your file pattern contains absolute paths, please read the notes on absolute paths:'), + mock.call(f'https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#running-with-absolute-paths'), + mock.call(f'Could not find any TRX files for {missing_trx}'), + mock.call(f'Your file pattern contains absolute paths, please read the notes on absolute paths:'), + mock.call(f'https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#running-with-absolute-paths'), + ]) + gha.error.assert_not_called() + + self.assertEqual(0, actual.files) + self.assertEqual(0, len(actual.errors)) + self.assertEqual(0, actual.suites) + self.assertEqual(0, actual.suite_tests) + self.assertEqual(0, actual.suite_skipped) + self.assertEqual(0, actual.suite_failures) + self.assertEqual(0, actual.suite_errors) + self.assertEqual(0, actual.suite_time) + self.assertEqual(0, len(actual.cases)) + self.assertEqual('commit', actual.commit) + + def test_is_float(self): + for value, expected in [ + ('0', True), ('0.0', True), ('.0', True), ('0.', True), + ('1.2', True), ('-2.3', True), ('+1.3', True), + ('.', False), ('+1', True), ('-2', True), ('+1.', True), ('-2.', True), ('+.1', True), ('-.2', True), + ('a1', False), ('1a', False), ('1a2', False), ('12e45', False), + ]: + with self.subTest(value=value): + self.assertEqual(expected, is_float(value), value) + + def test_main(self): + with tempfile.TemporaryDirectory() as path: + filepath = os.path.join(path, 'file') + with open(filepath, 'wt', encoding='utf-8') as file: + file.write('{}') + + gha = mock.MagicMock() + settings = get_settings(dict( + COMMIT='commit', + GITHUB_TOKEN='********', + GITHUB_EVENT_PATH=file.name, + GITHUB_EVENT_NAME='push', + GITHUB_REPOSITORY='repo', + EVENT_FILE=None, + FILES='\n'.join(str(path) for path in [test_files_path / '**' / '*.xml', + test_files_path / '**' / '*.trx', + test_files_path / '**' / '*.json']), + JUNIT_FILES=str(test_files_path / 'junit-xml' / '**' / '*.xml'), + NUNIT_FILES=str(test_files_path / 'nunit' / '**' / '*.xml'), + XUNIT_FILES=str(test_files_path / 'xunit' / '**' / '*.xml'), + TRX_FILES=str(test_files_path / 'trx' / '**' / '*.trx'), + REPORT_SUITE_LOGS='info' + ), gha) + + with mock.patch('publish_test_results.get_github'), \ + mock.patch('publish.publisher.Publisher.publish') as m: + main(settings, gha) + + # Publisher.publish is expected to have been called once + self.assertEqual(1, len(m.call_args_list)) + self.assertEqual(3, len(m.call_args_list[0].args)) + + # Publisher.publish is expected to have been called with these arguments + results, cases, conclusion = m.call_args_list[0].args + self.assertEqual(145, results.files) + if Version(sys.version.split(' ')[0]) < Version('3.9.0') and sys.platform.startswith('darwin') and \ + (platform.mac_ver()[0].startswith("11.") or platform.mac_ver()[0].startswith("12.")): + # on macOS and below Python 3.9 we see one particular error + self.assertEqual(731, results.suites) + self.assertEqual(731, len(results.suite_details)) + self.assertEqual(1811, len(cases)) + else: + self.assertEqual(735, results.suites) + self.assertEqual(735, len(results.suite_details)) + self.assertEqual(1811, len(cases)) + self.assertEqual('failure', conclusion) + + def test_main_fork_pr_check_wo_summary(self): + with tempfile.TemporaryDirectory() as path: + filepath = os.path.join(path, 'file') + with open(filepath, 'wt', encoding='utf-8') as file: + file.write('{ "pull_request": { "head": { "repo": { "full_name": "fork/repo" } } } }') + + gha = mock.MagicMock() + settings = get_settings(dict( + COMMIT='commit', + GITHUB_TOKEN='********', + GITHUB_EVENT_PATH=file.name, + GITHUB_EVENT_NAME='pull_request', + GITHUB_REPOSITORY='repo', + EVENT_FILE=None, + JOB_SUMMARY='false' + ), gha) + + def do_raise(*args): + # if this is raised, the tested main method did not return where expected but continued + raise RuntimeError('This is not expected to be called') + + with mock.patch('publish_test_results.get_files') as m: + m.side_effect = do_raise + main(settings, gha) + + gha.warning.assert_has_calls([ + mock.call('This action is running on a pull_request event for a fork repository. ' + 'The only useful thing it can do in this situation is creating a job summary, ' + 'which is disabled in settings. To fully run the action on fork repository pull requests, ' + f'see https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}' + '/README.md#support-fork-repositories-and-dependabot-branches'), + mock.call('At least one of the FILES, JUNIT_FILES, NUNIT_FILES, XUNIT_FILES, ' + 'or TRX_FILES options has to be set! ' + 'Falling back to deprecated default "*.xml"') + ], any_order=True) + + def test_check_var(self): + with self.assertRaises(RuntimeError) as e: + check_var(None, 'var', 'Option') + self.assertEqual(('Option must be provided via action input or environment variable var', ), e.exception.args) + + check_var('value', 'var', 'Option', ['value', 'val']) + check_var('value', 'var', 'Option', ['value', 'val'], ['deprecated', 'dep']) + + with self.assertRaises(RuntimeError) as e: + check_var('deprecated', 'var', 'Option', ['value', 'val']) + self.assertEqual(("Value 'deprecated' is not supported for variable var, expected: value, val", ), e.exception.args) + + with self.assertRaises(RuntimeError) as e: + check_var(['value', 'deprecated', 'dep', 'val'], 'var', 'Option', ['value', 'val']) + self.assertEqual(("Some values in 'value, deprecated, dep, val' are not supported for variable var, " + "allowed: value, val", ), e.exception.args) + + def test_check_var_condition(self): + check_var_condition(True, 'message') + + with self.assertRaises(RuntimeError) as e: + check_var_condition(False, 'message') + self.assertEqual(("message", ), e.exception.args) + + def test_deprecate_var(self): + gha = mock.MagicMock() + deprecate_var(None, 'deprecated_var', 'replacement', gha) + gha.assert_not_called() + + deprecate_var('set', 'deprecated_var', 'replacement', gha) + gha.warning.assert_called_once_with('Option deprecated_var is deprecated! replacement') + + with mock.patch('publish_test_results.logger') as l: + deprecate_var('set', 'deprecated_var', 'replacement', None) + l.warning.assert_called_once_with('Option deprecated_var is deprecated! replacement') + + def test_deprecate_val(self): + gha = mock.MagicMock() + deprecate_val(None, 'deprecated_var', {}, gha) + gha.assert_not_called() + + deprecate_val('set', 'deprecated_var', {'deprecated': 'replace'}, gha) + gha.assert_not_called() + + deprecate_val('deprecated', 'deprecated_var', {'deprecated': 'replace'}, gha) + gha.warning.assert_called_once_with('Value "deprecated" for option deprecated_var is deprecated! Instead, use value "replace".') + + with mock.patch('publish_test_results.logger') as l: + deprecate_val('deprecated', 'deprecated_var', {'deprecated': 'replace'}, gha) + l.assert_not_called() + + deprecate_val('deprecated', 'deprecated_var', {'deprecated': 'replace'}, None) + l.warning.assert_called_once_with('Value "deprecated" for option deprecated_var is deprecated! Instead, use value "replace".') + + def test_action_fail(self): + for action_fail, action_fail_on_inconclusive, expecteds in [ + (False, False, [False] * 3), + (False, True, [True, False, False]), + (True, False, [False, False, True]), + (True, True, [True, False, True]), + ]: + for expected, conclusion in zip(expecteds, ['inconclusive', 'success', 'failure']): + with self.subTest(action_fail=action_fail, action_fail_on_inconclusive=action_fail_on_inconclusive, conclusion=conclusion): + actual = action_fail_required(conclusion, action_fail, action_fail_on_inconclusive) + self.assertEqual(expected, actual) diff --git a/python/test/test_action_yml.py b/python/test/test_action_yml.py new file mode 100644 index 0000000..23529e9 --- /dev/null +++ b/python/test/test_action_yml.py @@ -0,0 +1,68 @@ +import hashlib +import pathlib +import sys +import unittest + +import yaml + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) + +from publish import __version__ + +project_root = pathlib.Path(__file__).resolve().parent.parent.parent + + +class TestActionYml(unittest.TestCase): + + def test_action_version(self): + with open(project_root / 'action.yml', encoding='utf-8') as r: + dockerfile_action = yaml.safe_load(r) + + image = dockerfile_action.get('runs', {}).get('image', '') + self.assertTrue(image.startswith('docker://'), image) + version = image.split(':')[-1] + self.assertEqual(__version__, version, 'version in action.yml must match __version__ in python/publish/__init__.py') + + def test_composite_action(self): + with open(project_root / 'action.yml', encoding='utf-8') as r: + dockerfile_action = yaml.safe_load(r) + + with open(project_root / 'composite/action.yml', encoding='utf-8') as r: + composite_action = yaml.safe_load(r) + + self.assertIn('runs', dockerfile_action) + self.assertIn('runs', composite_action) + dockerfile_action_wo_runs = {k: v for k, v in dockerfile_action.items() if k != 'runs'} + composite_action_wo_runs = {k: v for k, v in composite_action.items() if k != 'runs'} + + # composite action has outputs.json.value, which does not exist for dockerfile action + self.assertIn('value', composite_action.get('outputs', {}).get('json', {})) + del composite_action.get('outputs', {}).get('json', {})['value'] + + # compare dockerfile action with composite action + self.assertEqual(dockerfile_action_wo_runs, composite_action_wo_runs) + self.assertIn(('using', 'composite'), composite_action.get('runs', {}).items()) + + # check cache key hash is up-to-date in composite action + # this md5 is linux-based (on Windows, git uses different newlines, which changes the hash) + if sys.platform != 'win32': + with open(project_root / 'python' / 'requirements.txt', mode='rb') as r: + expected_hash = hashlib.md5(r.read()).hexdigest() + cache_hash = next(step.get('with', {}).get('key', '').split('-')[-1] + for step in composite_action.get('runs', {}).get('steps', []) + if step.get('uses', '').startswith('actions/cache/restore@')) + self.assertEqual(expected_hash, cache_hash, msg='Changing python/requirements.txt requires ' + 'to update the MD5 hash in composite/action.yaml') + + def test_composite_inputs(self): + with open(project_root / 'composite/action.yml', encoding='utf-8') as r: + action = yaml.safe_load(r) + + # these are not documented in the action.yml files but still needs to be forwarded + extra_inputs = ['files', 'root_log_level', 'log_level'] + expected = {key.upper(): f'${{{{ inputs.{key} }}}}' for key in list(action.get('inputs', {}).keys()) + extra_inputs} + + steps = action.get('runs', {}).get('steps', []) + step = next((step for step in steps if step.get('name') == 'Publish Test Results'), {}) + inputs = {key.upper(): value for key, value in step.get('env', {}).items()} + self.assertEqual(expected, inputs) diff --git a/python/test/test_cicd_yml.py b/python/test/test_cicd_yml.py new file mode 100644 index 0000000..6e695db --- /dev/null +++ b/python/test/test_cicd_yml.py @@ -0,0 +1,31 @@ +import pathlib +import unittest + +import yaml + +project_root = pathlib.Path(__file__).resolve().parent.parent.parent + + +class TestActionYml(unittest.TestCase): + + def test_cicd_workflow(self): + with open(project_root / 'action.yml', encoding='utf-8') as r: + action = yaml.safe_load(r) + + with open(project_root / '.github/workflows/publish.yml', encoding='utf-8') as r: + cicd = yaml.safe_load(r) + + docker_image_steps = cicd.get('jobs', []).get('publish-docker-image', {}).get('steps', []) + docker_image_step = [step + for step in docker_image_steps + if step.get('name') == 'Publish Test Results'] + self.assertEqual(1, len(docker_image_step)) + docker_image_run = docker_image_step[0].get('run') + self.assertTrue(docker_image_run) + vars = [var[7:-1].lower() if var.startswith('"') and var.endswith('"') else var[6:].lower() + for line in docker_image_run.split('\n') + for part in line.split(' ') + for var in [part.strip()] + if var.startswith('INPUT_') or var.startswith('"INPUT_')] + + self.assertEqual(sorted(list(action.get('inputs', {}).keys()) + ['log_level', 'root_log_level']), sorted(vars)) diff --git a/python/test/test_dart.py b/python/test/test_dart.py new file mode 100644 index 0000000..44ec617 --- /dev/null +++ b/python/test/test_dart.py @@ -0,0 +1,65 @@ +import json +import os +import pathlib +import sys +import tempfile +import unittest +from glob import glob +from typing import List + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) +sys.path.append(str(pathlib.Path(__file__).resolve().parent)) + +from publish.junit import JUnitTreeOrParseError, safe_parse_xml_file +from publish.dart import parse_dart_json_file, is_dart_json +from test_junit import JUnitXmlParseTest + +test_path = pathlib.Path(__file__).resolve().parent +test_files_path = test_path / 'files' / 'dart' + + +class TestDartJson(unittest.TestCase, JUnitXmlParseTest): + maxDiff = None + + @property + def test(self): + return self + + @staticmethod + def unsupported_files() -> List[str]: + return [ + str(test_path / 'files' / 'json' / 'not-existing.json'), + str(test_path / 'files' / 'json' / 'empty.json'), + str(test_path / 'files' / 'json' / 'malformed-json.json'), + ] + + def is_supported(self, path: str) -> bool: + return is_dart_json(path) + + @staticmethod + def _test_files_path() -> pathlib.Path: + return test_files_path + + @staticmethod + def get_test_files() -> List[str]: + return glob(str(test_files_path / '**' / '*.json'), recursive=True) + + @staticmethod + def parse_file(filename) -> JUnitTreeOrParseError: + return safe_parse_xml_file(filename, parse_dart_json_file) + + def test_is_dart_json(self): + with tempfile.TemporaryDirectory() as path: + self.assertFalse(is_dart_json(os.path.join(path, 'file'))) + + filepath = os.path.join(path, 'file.json') + with open(filepath, mode='wt') as w: + json.dump({"protocolVersion": "0.1.1", "type": "start"}, w) + self.assertTrue(is_dart_json(filepath)) + + os.rename(filepath, os.path.join(path, 'file.xml')) + self.assertFalse(is_dart_json(os.path.join(path, 'file.xml'))) + + +if __name__ == "__main__": + TestDartJson.update_expectations() diff --git a/python/test/test_github.py b/python/test/test_github.py new file mode 100644 index 0000000..5e4f4b0 --- /dev/null +++ b/python/test/test_github.py @@ -0,0 +1,392 @@ +import contextlib +import logging +import sys +import time +import unittest +from datetime import datetime, timezone +from json import JSONDecodeError +from multiprocessing import Process +from typing import Union, Tuple, Optional + +import github.GithubException +import mock +import requests.exceptions +from flask import Flask, Response + +from publish_test_results import get_github + + +@unittest.skipIf(sys.platform != 'linux', 'Pickling the mock REST endpoint only works Linux') +class TestGitHub(unittest.TestCase): + + base_url = f'http://localhost:12380/api' + auth = github.Auth.Token('login or token') + gh = get_github(auth, base_url, retries=1, backoff_factor=0.1, seconds_between_requests=None, seconds_between_writes=None, secondary_rate_wait=3) + + @classmethod + def start_api(cls, app: Flask) -> Process: + def run(): + app.run(host='localhost', port=12380) + + server = Process(target=run) + server.start() + attempt = 0 + while attempt < 100: + try: + attempt += 1 + requests.get('http://localhost:12380/health') + return server + except requests.exceptions.ConnectionError as e: + if attempt % 10 == 0: + logging.warning(f'mock api server is not up yet, tried {attempt} times: {str(e)}') + time.sleep(0.01) + cls.stop_api(server) + raise RuntimeError('Failed to start mock api server, could not connect to health endpoint') + + @staticmethod + def stop_api(server: Process) -> None: + server.terminate() + server.join(2) + + @contextlib.contextmanager + def api_server(self, + app_name: str, + repo_response: Optional[Union[Tuple[str, int], Response]] = None, + check_runs_response: Optional[Union[Tuple[str, int], Response]] = None, + pulls_response: Optional[Union[Tuple[str, int], Response]] = None, + issues_response: Optional[Union[Tuple[str, int], Response]] = None, + graphql_response: Optional[Union[Tuple[str, int], Response]] = None): + app = Flask(app_name) + + @app.route('/health') + def health(): + return {'health': 'alive'} + + @app.route('/api/repos//') + def repo(owner: str, repo: str): + if repo_response is None: + return {'id': 1234, 'name': repo, 'full_name': '/'.join([owner, repo]), 'url': '/'.join([self.base_url, 'repos', owner, repo])} + return repo_response + + @app.route('/api/repos///check-runs', methods=['POST']) + def check_runs(owner: str, repo: str): + return check_runs_response + + @app.route('/api/repos///pulls/') + def pull(owner: str, repo: str, number: int): + if pulls_response is None: + return {'id': 12345, 'number': number, 'issue_url': '/'.join([self.base_url, 'repos', owner, repo, 'issues', str(number)])} + return pulls_response + + @app.route('/api/repos///issues//comments', methods=['POST']) + def comment(owner: str, repo: str, number: int): + return issues_response + + @app.route('/api/graphql', methods=['POST']) + def graphql(): + return graphql_response + + server = self.start_api(app) + try: + yield server + finally: + self.stop_api(server) + + test_http_status_to_retry = [500, 502, 503, 504] + test_http_status_to_not_retry = [400, 401, 404, 429] + + def test_github_get_retry(self): + for status in self.test_http_status_to_retry: + with self.subTest(status=status): + with self.api_server(self.test_github_get_retry.__name__, + repo_response=(f'{{"message": "{status}"}}', status)): + with mock.patch('github.GithubRetry._GithubRetry__log') as log: + with self.assertRaises(requests.exceptions.RetryError) as context: + self.gh.get_repo('owner/repo') + self.assertIn(f"Max retries exceeded with url: /api/repos/owner/repo", context.exception.args[0].args[0]) + self.assertIn(f"Caused by ResponseError('too many {status} error responses'", context.exception.args[0].args[0]) + # 5xx http errors are retried by base Retry class, so no logging expected + log.assert_not_called() + + def test_github_get_retry_403_with_retry_after(self): + with self.api_server(self.test_github_get_retry_403_with_retry_after.__name__, + repo_response=Response(response='{"message": "403"}', status=403, headers={'Retry-After': '1'})): + with mock.patch('github.GithubRetry._GithubRetry__log') as log: + with self.assertRaises(requests.exceptions.RetryError) as context: + self.gh.get_repo('owner/repo') + self.assertIn(f"Max retries exceeded with url: /api/repos/owner/repo", context.exception.args[0].args[0]) + self.assertIn(f"Caused by ResponseError('too many 403 error responses'", context.exception.args[0].args[0]) + self.assertEqual(log.call_args_list, [mock.call(logging.INFO, 'Request GET /api/repos/owner/repo failed with 403: FORBIDDEN'), + mock.call(logging.INFO, 'Retrying after 1 seconds'), + mock.call(logging.INFO, 'Request GET /api/repos/owner/repo failed with 403: FORBIDDEN'), + mock.call(logging.INFO, 'Retrying after 1 seconds')]) + + def test_github_get_retry_403_with_primary_error_rate_retry_message(self): + for message in ['api rate limit exceeded, please be gentle']: + with self.subTest(message=message): + with self.api_server(self.test_github_get_retry_403_with_primary_error_rate_retry_message.__name__, + repo_response=(f'{{"message": "{message}"}}', 403)): + with mock.patch('github.GithubRetry._GithubRetry__log') as log, \ + mock.patch('github.GithubRetry._GithubRetry__datetime') as dt, \ + mock.patch('time.sleep') as sleep: + dt.now = mock.Mock(return_value=datetime.fromtimestamp(1644768000, timezone.utc)) + dt.fromtimestamp = datetime.fromtimestamp + with self.assertRaises(requests.exceptions.RetryError) as context: + self.gh.get_repo('owner/repo') + self.assertIn(f"Max retries exceeded with url: /api/repos/owner/repo", context.exception.args[0].args[0]) + self.assertIn(f"Caused by ResponseError('too many 403 error responses'", context.exception.args[0].args[0]) + self.assertListEqual(log.call_args_list, [ + mock.call(logging.INFO, 'Request GET /api/repos/owner/repo failed with 403: FORBIDDEN'), + mock.call(logging.DEBUG, f'Response body indicates retry-able primary rate limit error: {message}'), + # base Retry class backoff + mock.call(logging.INFO, f'Setting next backoff to 0s'), + mock.call(logging.INFO, 'Request GET /api/repos/owner/repo failed with 403: FORBIDDEN'), + mock.call(logging.DEBUG, f'Response body indicates retry-able primary rate limit error: {message}') + ]) + sleep.assert_not_called() + + def test_github_get_retry_403_with_secondary_error_rate_retry_message(self): + for message in ['You have exceeded a secondary rate limit and have been temporarily blocked from content creation.', + 'You are not gentle, please wait a few minutes before you try again.']: + with self.subTest(message=message): + with self.api_server(self.test_github_get_retry_403_with_secondary_error_rate_retry_message.__name__, + repo_response=(f'{{"message": "{message}"}}', 403)): + with mock.patch('github.GithubRetry._GithubRetry__log') as log, \ + mock.patch('github.GithubRetry._GithubRetry__datetime') as dt, \ + mock.patch('time.sleep') as sleep: + dt.now = mock.Mock(return_value=datetime.fromtimestamp(1644768000, timezone.utc)) + dt.fromtimestamp = datetime.fromtimestamp + with self.assertRaises(requests.exceptions.RetryError) as context: + self.gh.get_repo('owner/repo') + self.assertIn(f"Max retries exceeded with url: /api/repos/owner/repo", context.exception.args[0].args[0]) + self.assertIn(f"Caused by ResponseError('too many 403 error responses'", context.exception.args[0].args[0]) + self.assertListEqual(log.call_args_list, [ + mock.call(logging.INFO, 'Request GET /api/repos/owner/repo failed with 403: FORBIDDEN'), + mock.call(logging.DEBUG, f'Response body indicates retry-able secondary rate limit error: {message}'), + # secondary rate wait + mock.call(logging.INFO, f'Setting next backoff to 3s'), + mock.call(logging.INFO, 'Request GET /api/repos/owner/repo failed with 403: FORBIDDEN'), + mock.call(logging.DEBUG, f'Response body indicates retry-able secondary rate limit error: {message}') + ]) + sleep.assert_has_calls([mock.call(3)]) + + def test_github_get_retry_403_with_retry_message_and_reset_time(self): + message = 'api rate limit exceeded, please be gentle' + with self.api_server(self.test_github_get_retry_403_with_retry_message_and_reset_time.__name__, + repo_response=Response(response=f'{{"message": "{message}"}}', status=403, headers={'X-RateLimit-Reset': '1644768030'})): + with mock.patch('github.GithubRetry._GithubRetry__log') as log,\ + mock.patch('github.GithubRetry._GithubRetry__datetime') as dt, \ + mock.patch('time.sleep') as sleep: + dt.now = mock.Mock(return_value=datetime.fromtimestamp(1644768000, timezone.utc)) + dt.fromtimestamp = datetime.fromtimestamp + with self.assertRaises(requests.exceptions.RetryError) as context: + self.gh.get_repo('owner/repo') + self.assertIn(f"Max retries exceeded with url: /api/repos/owner/repo", context.exception.args[0].args[0]) + self.assertIn(f"Caused by ResponseError('too many 403 error responses'", context.exception.args[0].args[0]) + self.assertListEqual(log.call_args_list, [mock.call(logging.INFO, 'Request GET /api/repos/owner/repo failed with 403: FORBIDDEN'), + mock.call(logging.DEBUG, f'Response body indicates retry-able primary rate limit error: {message}'), + mock.call(logging.DEBUG, 'Reset occurs in 0:00:30 (1644768030 / 2022-02-13 16:00:30+00:00)'), + mock.call(logging.INFO, 'Setting next backoff to 31s'), + mock.call(logging.INFO, 'Request GET /api/repos/owner/repo failed with 403: FORBIDDEN'), + mock.call(logging.DEBUG, f'Response body indicates retry-able primary rate limit error: {message}')]) + self.assertEqual(sleep.call_args_list, [mock.call(30.0 + 1)]) # we sleep one extra second + + def test_github_get_retry_403_with_retry_message_and_invalid_reset_time(self): + # reset time is expected to be int, what happens if it is not? + message = 'api rate limit exceeded, please be gentle' + with self.api_server(self.test_github_get_retry_403_with_retry_message_and_invalid_reset_time.__name__, + repo_response=Response(response=f'{{"message": "{message}"}}', status=403, headers={'X-RateLimit-Reset': 'in a few minutes'})): + with mock.patch('github.GithubRetry._GithubRetry__log') as log: + with self.assertRaises(requests.exceptions.RetryError) as context: + self.gh.get_repo('owner/repo') + self.assertIn(f"Max retries exceeded with url: /api/repos/owner/repo", context.exception.args[0].args[0]) + self.assertIn(f"Caused by ResponseError('too many 403 error responses'", context.exception.args[0].args[0]) + self.assertListEqual(log.call_args_list, [mock.call(logging.INFO, 'Request GET /api/repos/owner/repo failed with 403: FORBIDDEN'), + mock.call(logging.DEBUG, 'Response body indicates retry-able primary rate limit error: api rate limit exceeded, please be gentle'), + mock.call(logging.INFO, 'Setting next backoff to 0s'), + mock.call(logging.INFO, 'Request GET /api/repos/owner/repo failed with 403: FORBIDDEN'), + mock.call(logging.DEBUG, 'Response body indicates retry-able primary rate limit error: api rate limit exceeded, please be gentle')]) + + def test_github_get_retry_403_without_message(self): + for content in ["{'info': 'here is no message'}", 'here is no json']: + with self.subTest(content=content): + with self.api_server(self.test_github_get_retry_403_without_message.__name__, + repo_response=(content, 403)): + with mock.patch('github.GithubRetry._GithubRetry__log') as log: + with self.assertRaises(github.GithubException) as context: + self.gh.get_repo('owner/repo') + self.assertEqual(403, context.exception.args[0]) + self.assertEqual(content.encode('utf-8'), context.exception.args[1]) + self.assertIsInstance(context.exception.__cause__, RuntimeError) + self.assertEqual(('Failed to inspect response message', ), context.exception.__cause__.args) + log.assert_called_once_with(logging.INFO, 'Request GET /api/repos/owner/repo failed with 403: FORBIDDEN') + + def test_github_get_no_retry(self): + # 403 does not get retried without special header field or body message + for status in self.test_http_status_to_not_retry + [403]: + with self.subTest(status=status): + with self.api_server(self.test_github_get_no_retry.__name__, + repo_response=(f'{{"message": "{status}"}}', status)): + with mock.patch('github.GithubRetry._GithubRetry__log') as log: + with self.assertRaises(github.GithubException) as context: + self.gh.get_repo('owner/repo') + self.assertEqual(status, context.exception.args[0]) + self.assertEqual({'message': f'{status}'}, context.exception.args[1]) + if status == 403: + self.assertListEqual(log.call_args_list, [mock.call(20, 'Request GET /api/repos/owner/repo failed with 403: FORBIDDEN'), + mock.call(10, 'Response message does not indicate retry-able error')]) + else: + log.assert_not_called() + + def test_github_post_retry(self): + for status in self.test_http_status_to_retry: + with self.subTest(status=status): + response = (f'{{"message": "{status}"}}', status) + with self.api_server(self.test_github_post_retry.__name__, + check_runs_response=response, + issues_response=response, + graphql_response=response): + repo = self.gh.get_repo('owner/repo') + expected = {'full_name': 'owner/repo', 'id': 1234, 'name': 'repo', 'url': 'http://localhost:12380/api/repos/owner/repo'} + self.assertEqual(expected, repo.raw_data) + + with mock.patch('github.GithubRetry._GithubRetry__log') as log: + with self.assertRaises(requests.exceptions.RetryError) as context: + repo.create_check_run(name='check_name', + head_sha='sha', + status='completed', + conclusion='success', + output={}) + self.assertIn(f"Max retries exceeded with url: /api/repos/owner/repo/check-runs", context.exception.args[0].args[0]) + self.assertIn(f"Caused by ResponseError('too many {status} error responses'", context.exception.args[0].args[0]) + + pr = repo.get_pull(1) + expected = {'id': 12345, 'number': 1, 'issue_url': 'http://localhost:12380/api/repos/owner/repo/issues/1'} + self.assertEqual(expected, pr.raw_data) + + with self.assertRaises(requests.exceptions.RetryError) as context: + pr.create_issue_comment('issue comment body') + self.assertIn(f"Max retries exceeded with url: /api/repos/owner/repo/issues/1/comments", context.exception.args[0].args[0]) + self.assertIn(f"Caused by ResponseError('too many {status} error responses'", context.exception.args[0].args[0]) + + with self.assertRaises(requests.exceptions.RetryError) as context: + self.gh._Github__requester.requestJsonAndCheck( + "POST", '/'.join([self.base_url, 'graphql']), input={} + ) + self.assertIn(f"Max retries exceeded with url: /api/graphql", context.exception.args[0].args[0]) + self.assertIn(f"Caused by ResponseError('too many {status} error responses'", context.exception.args[0].args[0]) + + log.assert_not_called() + + def test_github_post_retry_403_with_retry_after(self): + with self.api_server(self.test_github_post_retry_403_with_retry_after.__name__, + check_runs_response=Response(response='{"message": "403"}', status=403, headers={'Retry-After': '1'})): + with mock.patch('github.GithubRetry._GithubRetry__log') as log: + repo = self.gh.get_repo('owner/repo') + + with self.assertRaises(requests.exceptions.RetryError) as context: + repo.create_check_run(name='check_name', + head_sha='sha', + status='completed', + conclusion='success', + output={}) + self.assertIn(f"Max retries exceeded with url: /api/repos/owner/repo/check-runs", context.exception.args[0].args[0]) + self.assertIn(f"Caused by ResponseError('too many 403 error responses'", context.exception.args[0].args[0]) + self.assertListEqual(log.call_args_list, [mock.call(logging.INFO, 'Request POST /api/repos/owner/repo/check-runs failed with 403: FORBIDDEN'), + mock.call(logging.INFO, 'Retrying after 1 seconds'), + mock.call(logging.INFO, 'Request POST /api/repos/owner/repo/check-runs failed with 403: FORBIDDEN'), + mock.call(logging.INFO, 'Retrying after 1 seconds')]) + + def test_github_post_retry_403_with_primary_error_retry_message(self): + for message in ['api rate limit exceeded, please be gentle']: + with self.subTest(message=message): + with self.api_server(self.test_github_post_retry_403_with_primary_error_retry_message.__name__, + check_runs_response=(f'{{"message": "{message}"}}', 403)): + repo = self.gh.get_repo('owner/repo') + + with mock.patch('github.GithubRetry._GithubRetry__log') as log: + with self.assertRaises(requests.exceptions.RetryError) as context: + repo.create_check_run(name='check_name', + head_sha='sha', + status='completed', + conclusion='success', + output={}) + self.assertIn(f"Max retries exceeded with url: /api/repos/owner/repo/check-runs", context.exception.args[0].args[0]) + self.assertIn(f"Caused by ResponseError('too many 403 error responses'", context.exception.args[0].args[0]) + self.assertListEqual(log.call_args_list, [mock.call(logging.INFO, 'Request POST /api/repos/owner/repo/check-runs failed with 403: FORBIDDEN'), + mock.call(logging.DEBUG, f'Response body indicates retry-able primary rate limit error: {message}'), + # base Retry class backoff + mock.call(logging.INFO, f'Setting next backoff to 0s'), + mock.call(logging.INFO, 'Request POST /api/repos/owner/repo/check-runs failed with 403: FORBIDDEN'), + mock.call(logging.DEBUG, f'Response body indicates retry-able primary rate limit error: {message}')]) + + def test_github_post_retry_403_with_secondary_error_retry_message(self): + for message in ['You have exceeded a secondary rate limit and have been temporarily blocked from content creation.', + 'You are not gentle, please wait a few minutes before you try again.']: + with self.subTest(message=message): + with self.api_server(self.test_github_post_retry_403_with_secondary_error_retry_message.__name__, + check_runs_response=(f'{{"message": "{message}"}}', 403)): + repo = self.gh.get_repo('owner/repo') + + with mock.patch('github.GithubRetry._GithubRetry__log') as log, \ + mock.patch('time.sleep') as sleep: + + with self.assertRaises(requests.exceptions.RetryError) as context: + repo.create_check_run(name='check_name', + head_sha='sha', + status='completed', + conclusion='success', + output={}) + self.assertIn(f"Max retries exceeded with url: /api/repos/owner/repo/check-runs", context.exception.args[0].args[0]) + self.assertIn(f"Caused by ResponseError('too many 403 error responses'", context.exception.args[0].args[0]) + self.assertListEqual(log.call_args_list, [mock.call(logging.INFO, 'Request POST /api/repos/owner/repo/check-runs failed with 403: FORBIDDEN'), + mock.call(logging.DEBUG, f'Response body indicates retry-able secondary rate limit error: {message}'), + # secondary rate wait + mock.call(logging.INFO, f'Setting next backoff to 3s'), + mock.call(logging.INFO, 'Request POST /api/repos/owner/repo/check-runs failed with 403: FORBIDDEN'), + mock.call(logging.DEBUG, f'Response body indicates retry-able secondary rate limit error: {message}')]) + sleep.assert_has_calls([mock.call(3)]) + + def test_github_post_retry_403_without_message(self): + for content in ["{'info': 'here is no message'}", 'here is no json']: + with self.subTest(content=content): + with self.api_server(self.test_github_post_retry_403_without_message.__name__, + check_runs_response=(content, 403)): + with mock.patch('github.GithubRetry._GithubRetry__log') as log: + repo = self.gh.get_repo('owner/repo') + + with self.assertRaises(github.GithubException) as context: + repo.create_check_run(name='check_name', + head_sha='sha', + status='completed', + conclusion='success', + output={}) + self.assertEqual(403, context.exception.args[0]) + self.assertEqual(content.encode('utf-8'), context.exception.args[1]) + self.assertIsInstance(context.exception.__cause__, RuntimeError) + self.assertEqual(('Failed to inspect response message', ), context.exception.__cause__.args) + log.assert_called_once_with(logging.INFO, 'Request POST /api/repos/owner/repo/check-runs failed with 403: FORBIDDEN') + + def test_github_post_no_retry(self): + # 403 does not get retried without special header field or body message + for status in self.test_http_status_to_not_retry + [403]: + with self.subTest(status=status): + with self.api_server(self.test_github_post_no_retry.__name__, + check_runs_response=(f'{{"message": "{status}"}}', status)): + repo = self.gh.get_repo('owner/repo') + + with mock.patch('github.GithubRetry._GithubRetry__log') as log: + with self.assertRaises(github.GithubException) as context: + repo.create_check_run(name='check_name', + head_sha='sha', + status='completed', + conclusion='success', + output={}) + self.assertEqual(status, context.exception.args[0]) + if status == 403: + self.assertListEqual(log.call_args_list, [ + mock.call(logging.INFO, 'Request POST /api/repos/owner/repo/check-runs failed with 403: FORBIDDEN'), + mock.call(logging.DEBUG, 'Response message does not indicate retry-able error'), + ]) + else: + log.assert_not_called() diff --git a/python/test/test_github_action.py b/python/test/test_github_action.py new file mode 100644 index 0000000..d8bb7d0 --- /dev/null +++ b/python/test/test_github_action.py @@ -0,0 +1,268 @@ +import io +import os +import re +import tempfile +import unittest +from contextlib import contextmanager +from typing import Optional + +import mock + +from publish.github_action import GithubAction + + +@contextmanager +def gh_action_command_test(test: unittest.TestCase, expected: Optional[str]) -> GithubAction: + with io.StringIO() as string: + yield GithubAction(file=string) + if expected is None: + test.assertEqual('', string.getvalue()) + else: + test.assertEqual(f'{expected}{os.linesep}', string.getvalue()) + + +@contextmanager +def gh_action_env_file_test(test: unittest.TestCase, env_file_var_name: str, expected: Optional[str]) -> GithubAction: + with tempfile.TemporaryDirectory() as path: + filepath = os.path.join(path, 'file') + with mock.patch.dict(os.environ, {env_file_var_name: filepath}): + with gh_action_command_test(test, None) as gha: + yield gha + + test.assertEqual(expected is not None, os.path.exists(filepath), 'Is the file expected to exit now?') + if expected is not None: + with open(filepath, 'r', encoding='utf-8') as file: + content = file.read() + test.assertEqual(expected, content) + + +class TestGithubAction(unittest.TestCase): + + env_file_var_name = None + output_file_var_name = None + path_file_var_name = None + job_summary_file_var_name = None + + @classmethod + def setUpClass(cls) -> None: + cls.env_file_var_name = GithubAction.ENV_FILE_VAR_NAME + cls.output_file_var_name = GithubAction.OUTPUT_FILE_VAR_NAME + cls.path_file_var_name = GithubAction.PATH_FILE_VAR_NAME + cls.job_summary_file_var_name = GithubAction.JOB_SUMMARY_FILE_VAR_NAME + + GithubAction.ENV_FILE_VAR_NAME = 'TEST_' + cls.env_file_var_name + GithubAction.OUTPUT_FILE_VAR_NAME = 'TEST_' + cls.output_file_var_name + GithubAction.PATH_FILE_VAR_NAME = 'TEST_' + cls.path_file_var_name + GithubAction.JOB_SUMMARY_FILE_VAR_NAME = 'TEST_' + cls.job_summary_file_var_name + + @classmethod + def tearDownClass(cls) -> None: + GithubAction.ENV_FILE_VAR_NAME = cls.env_file_var_name + GithubAction.OUTPUT_FILE_VAR_NAME = cls.output_file_var_name + GithubAction.PATH_FILE_VAR_NAME = cls.path_file_var_name + GithubAction.JOB_SUMMARY_FILE_VAR_NAME = cls.job_summary_file_var_name + + def test_add_mask(self): + with gh_action_command_test(self, '::add-mask::the mask') as gha: + gha.add_mask('the mask') + + def test_stop_commands(self): + with gh_action_command_test(self, '::stop-commands::the end token') as gha: + gha.stop_commands('the end token') + + def test_continue_commands(self): + with gh_action_command_test(self, '::the end token::') as gha: + gha.continue_commands('the end token') + + def test_group(self): + with gh_action_command_test(self, '::group::group title') as gha: + gha.group('group title') + + def test_group_end(self): + with gh_action_command_test(self, '::endgroup::') as gha: + gha.group_end() + + def test_debug(self): + with gh_action_command_test(self, '::debug::the message') as gha: + gha.debug('the message') + + def test_warning(self): + with gh_action_command_test(self, '::warning::the message') as gha: + gha.warning('the message') + with gh_action_command_test(self, '::warning file=the file::the message') as gha: + gha.warning('the message', file='the file') + with gh_action_command_test(self, '::warning line=1::the message') as gha: + gha.warning('the message', line=1) + with gh_action_command_test(self, '::warning col=2::the message') as gha: + gha.warning('the message', column=2) + with gh_action_command_test(self, '::warning file=the file,line=1,col=2::the message') as gha: + gha.warning('the message', file='the file', line=1, column=2) + + def test_notice(self): + with gh_action_command_test(self, '::notice::the message') as gha: + gha.notice('the message') + with gh_action_command_test(self, '::notice title=a title,file=the file,col=3,endColumn=4,line=1,endLine=2::the message') as gha: + gha.notice('the message', file='the file', line=1, end_line=2, column=3, end_column=4, title='a title') + + def test_error(self): + with gh_action_command_test(self, '::error::the message') as gha: + gha.error('the message') + with gh_action_command_test(self, '::error file=the file::the message') as gha: + gha.error('the message', file='the file') + with gh_action_command_test(self, '::error line=1::the message') as gha: + gha.error('the message', line=1) + with gh_action_command_test(self, '::error col=2::the message') as gha: + gha.error('the message', column=2) + with gh_action_command_test(self, '::error file=the file,line=1,col=2::the message') as gha: + gha.error('the message', file='the file', line=1, column=2) + + # log exception + with gh_action_command_test(self, f'::error::RuntimeError: failure{os.linesep}' + f'::error file=the file,line=1,col=2::the message') as gha: + try: + raise RuntimeError('failure') + except RuntimeError as e: + error = e + + with mock.patch('publish.github_action.logger') as m: + gha.error('the message', file='the file', line=1, column=2, exception=error) + + self.assertEqual( + [(call[0], re.sub(r'File ".*[/\\]', 'File "', re.sub(r'line \d+', 'line X', call.args[0]))) + for call in m.method_calls], + [ + ('error', 'RuntimeError: failure'), + ('debug', 'Traceback (most recent call last):'), + ('debug', ' File "test_github_action.py", line X, in test_error'), + ('debug', " raise RuntimeError('failure')"), + ('debug', 'RuntimeError: failure') + ] + ) + + # log exceptions related via cause + with gh_action_command_test(self, f'::error::RuntimeError: failed except caused by ValueError: invalid value{os.linesep}' + f'::error::ValueError: invalid value{os.linesep}' + f'::error file=the file,line=1,col=2::the message') as gha: + error = self.get_error_with_cause() + with mock.patch('publish.github_action.logger') as m: + gha.error('the message', file='the file', line=1, column=2, exception=error) + + self.assertEqual( + [(call[0], re.sub(r'File ".*[/\\]', 'File "', re.sub(r'line \d+', 'line X', call.args[0]))) + for call in m.method_calls], + [ + ('error', 'RuntimeError: failed except caused by ValueError: invalid value'), + ('debug', 'Traceback (most recent call last):'), + ('debug', ' File "test_github_action.py", line X, in get_error_with_cause'), + ('debug', " raise RuntimeError('failed except') from ValueError('invalid value')"), + ('debug', 'RuntimeError: failed except'), + ('error', 'ValueError: invalid value'), + ('debug', 'ValueError: invalid value') + ] + ) + + # log exceptions related via context + with gh_action_command_test(self, f'::error::RuntimeError: failed except while handling ValueError: invalid value{os.linesep}' + f'::error::ValueError: invalid value{os.linesep}' + f'::error file=the file,line=1,col=2::the message') as gha: + error = self.get_error_with_context() + with mock.patch('publish.github_action.logger') as m: + gha.error('the message', file='the file', line=1, column=2, exception=error) + + self.assertEqual( + [(call[0], re.sub(r'File ".*[/\\]', 'File "', re.sub(r'line \d+', 'line X', call.args[0]))) + for call in m.method_calls], + [ + ('error', 'RuntimeError: failed except while handling ValueError: invalid value'), + ('debug', 'Traceback (most recent call last):'), + ('debug', ' File "test_github_action.py", line X, in get_error_with_context'), + ('debug', " raise RuntimeError('failed except')"), + ('debug', 'RuntimeError: failed except'), + ('error', 'ValueError: invalid value'), + ('debug', 'Traceback (most recent call last):'), + ('debug', ' File "test_github_action.py", line X, in get_error_with_context'), + ('debug', " raise ValueError('invalid value')"), + ('debug', 'ValueError: invalid value') + ] + ) + + @staticmethod + def get_error_with_cause() -> RuntimeError: + try: + raise RuntimeError('failed except') from ValueError('invalid value') + except RuntimeError as re: + return re + + @staticmethod + def get_error_with_context() -> RuntimeError: + try: + raise ValueError('invalid value') + except ValueError: + try: + raise RuntimeError('failed except') + except RuntimeError as re: + return re + + def test_echo(self): + with gh_action_command_test(self, '::echo::on') as gha: + gha.echo(True) + with gh_action_command_test(self, '::echo::off') as gha: + gha.echo(False) + + def test_add_env(self): + with gh_action_env_file_test(self, GithubAction.ENV_FILE_VAR_NAME, 'var=val\n') as gha: + gha.add_to_env('var', 'val') + with gh_action_env_file_test(self, GithubAction.ENV_FILE_VAR_NAME, 'var1=one\nvar2=two\n') as gha: + gha.add_to_env('var1', 'one') + gha.add_to_env('var2', 'two') + with gh_action_env_file_test(self, GithubAction.ENV_FILE_VAR_NAME, None) as gha: + with self.assertRaisesRegex(ValueError, 'Multiline values not supported for environment variables'): + gha.add_to_env('var', 'multi\nline\nvalue') + + def test_add_path(self): + with gh_action_env_file_test(self, GithubAction.PATH_FILE_VAR_NAME, 'additional-path\n') as gha: + gha.add_to_path('additional-path') + + def test_add_output(self): + with gh_action_env_file_test(self, GithubAction.OUTPUT_FILE_VAR_NAME, 'var=val\n') as gha: + gha.add_to_output('var', 'val') + with gh_action_env_file_test(self, GithubAction.OUTPUT_FILE_VAR_NAME, 'var1=val3\nvar2=val4\n') as gha: + gha.add_to_output('var1', 'val3') + gha.add_to_output('var2', 'val4') + + # if there is no env file, the output is set via command + with gh_action_command_test(self, '::set-output name=varname::varval') as gha: + gha.add_to_output('varname', 'varval') + + def test_add_job_summary(self): + with gh_action_env_file_test(self, GithubAction.JOB_SUMMARY_FILE_VAR_NAME, '# markdown') as gha: + gha.add_to_job_summary('# markdown') + with gh_action_env_file_test(self, GithubAction.JOB_SUMMARY_FILE_VAR_NAME, + '# title\ncontent\n## subtitle\nmore content\n') as gha: + gha.add_to_job_summary('# title\ncontent\n') + gha.add_to_job_summary('## subtitle\nmore content\n') + + def test__command_with_multi_line_value(self): + with io.StringIO() as string: + GithubAction._command(string, 'command', 'multi\nline\nvalue') + self.assertEqual('::command::multi' + os.linesep, string.getvalue()) + + def test__append_to_file_errors(self): + # env variable does not exist + with mock.patch.dict(os.environ, {}, clear=True): + with gh_action_command_test(self, '::warning::Cannot append to environment file ENV_VAR_THAT_DOES_NOT_EXIST as it is not set. ' + 'See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files') as gha: + env_var_name = 'ENV_VAR_THAT_DOES_NOT_EXIST' + self.assertFalse(env_var_name in os.environ, 'that environment variable should not exist') + gha._append_to_file('markdown', env_var_name) + + # path is not writable + with tempfile.TemporaryDirectory() as path: + env_var_name = 'ENV_FILE' + filepath = os.path.join(os.path.join(path, 'sub'), 'file') + with mock.patch.dict(os.environ, {env_var_name: filepath}): + escaped_filepath = filepath.replace('\\', '\\\\') + with gh_action_command_test(self, f"::warning::Failed to write to environment file {filepath}: " + f"[Errno 2] No such file or directory: '{escaped_filepath}'. " + f"See https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#environment-files") as gha: + gha._append_to_file('markdown', env_var_name) diff --git a/python/test/test_junit.py b/python/test/test_junit.py new file mode 100644 index 0000000..3cf6021 --- /dev/null +++ b/python/test/test_junit.py @@ -0,0 +1,520 @@ +import abc +import dataclasses +import os +import pathlib +import re +import sys +import unittest +from glob import glob +from typing import Optional, List +import mock + +import junitparser +import prettyprinter as pp +from junitparser import JUnitXml, Element +from lxml import etree +from packaging.version import Version + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) +sys.path.append(str(pathlib.Path(__file__).resolve().parent)) + +from publish import __version__, available_annotations, none_annotations +from publish.junit import is_junit, parse_junit_xml_files, adjust_prefix, process_junit_xml_elems, get_results, \ + get_result, get_content, get_message, Disabled, JUnitTreeOrParseError, ParseError +from publish.unittestresults import ParsedUnitTestResults, UnitTestCase +from publish_test_results import get_test_results, get_stats, get_conclusion +from publish.publisher import Publisher +from test_action_script import Test +from test_utils import temp_locale + +test_path = pathlib.Path(__file__).resolve().parent +test_files_path = test_path / 'files' / 'junit-xml' +pp.install_extras() + + +class TestElement(Element): + __test__ = False + + def __init__(self, tag: str, message: Optional[str] = None, content: Optional[str] = None): + super().__init__(tag) + self._tag = tag + self.message = message + self._elem.text = content + + @property + def text(self): + return self._elem.text + + +class JUnitXmlParseTest: + @property + def test(self): + raise NotImplementedError() + + @staticmethod + def unsupported_files() -> List[str]: + return [ + str(test_path / 'files' / 'xml' / 'not-existing.xml'), + str(test_path / 'files' / 'xml' / 'empty.xml'), + str(test_path / 'files' / 'xml' / 'non-xml.xml'), + ] + + @abc.abstractmethod + def is_supported(self, path: str) -> bool: + pass + + @staticmethod + @abc.abstractmethod + def _test_files_path() -> pathlib.Path: + pass + + @staticmethod + def get_test_files() -> List[str]: + raise NotImplementedError() + + @staticmethod + @abc.abstractmethod + def parse_file(filename) -> JUnitTreeOrParseError: + pass + + @staticmethod + def assert_expectation(test, actual, filename): + if not os.path.exists(filename): + test.fail(f'file does not exist: {filename}, expected content: {actual}') + with open(filename, 'r', encoding='utf-8') as r: + expected = r.read() + test.assertEqual(expected, actual) + + @classmethod + def shorten_filename(cls, filename, prefix=None): + removed_prefix = prefix or cls._test_files_path() + removed_prefix_str = str(removed_prefix.resolve().as_posix()) + + if filename.startswith(removed_prefix_str): + return filename[len(removed_prefix_str) + 1:] + elif prefix is None: + return cls.shorten_filename(filename, test_path) + else: + return filename + + def test_adjust_prefix(self): + self.assertEqual(adjust_prefix("file", "+"), "file") + self.assertEqual(adjust_prefix("file", "+."), ".file") + self.assertEqual(adjust_prefix("file", "+./"), "./file") + self.assertEqual(adjust_prefix("file", "+path/"), "path/file") + + self.assertEqual(adjust_prefix("file", "-"), "file") + self.assertEqual(adjust_prefix(".file", "-."), "file") + self.assertEqual(adjust_prefix("./file", "-./"), "file") + self.assertEqual(adjust_prefix("path/file", "-path/"), "file") + self.assertEqual(adjust_prefix("file", "-"), "file") + self.assertEqual(adjust_prefix("file", "-."), "file") + self.assertEqual(adjust_prefix("file", "-./"), "file") + self.assertEqual(adjust_prefix("file", "-path/"), "file") + + def do_test_parse_and_process_files(self, filename: str): + for locale in [None, 'en_US.UTF-8', 'de_DE.UTF-8']: + with self.test.subTest(file=self.shorten_filename(filename), locale=locale): + with temp_locale(locale): + actual = self.parse_file(filename) + path = pathlib.Path(filename) + if isinstance(actual, ParseError): + # make file relative so the path in the exception file does not depend on where we checkout the sources + actual = dataclasses.replace(actual, file=pathlib.Path(actual.file).relative_to(test_path).as_posix()) + actual = self.prettify_exception(actual) + expectation_path = path.parent / (path.stem + '.exception') + self.assert_expectation(self.test, actual, expectation_path) + else: + xml_expectation_path = path.parent / (path.stem + '.junit-xml') + actual_tree = etree.tostring(actual, encoding='utf-8', xml_declaration=True, pretty_print=True).decode('utf-8') + self.assert_expectation(self.test, actual_tree, xml_expectation_path) + + results_expectation_path = path.parent / (path.stem + '.results') + actual_results = process_junit_xml_elems([(self.shorten_filename(path.resolve().as_posix()), actual)], add_suite_details=True) + self.assert_expectation(self.test, pp.pformat(actual_results, indent=2), results_expectation_path) + + annotations_expectation_path = path.parent / (path.stem + '.annotations') + actual_annotations = self.get_check_runs(actual_results) + self.assert_expectation(self.test, pp.pformat(actual_annotations, indent=2).replace(__version__, 'VERSION'), annotations_expectation_path) + + def test_parse_and_process_files(self): + for file in self.get_test_files() + self.unsupported_files(): + self.do_test_parse_and_process_files(file) + + @classmethod + def update_expectations(cls): + print('updating expectations') + for filename in cls.get_test_files() + cls.unsupported_files(): + print(f'- updating {filename}') + actual = cls.parse_file(filename) + path = pathlib.Path(filename).resolve() + if isinstance(actual, ParseError): + # make file relative so the path in the exception file does not depend on where we checkout the sources + actual = dataclasses.replace(actual, file=pathlib.Path(actual.file).relative_to(test_path).as_posix()) + with open(path.parent / (path.stem + '.exception'), 'w', encoding='utf-8') as w: + w.write(cls.prettify_exception(actual)) + else: + with open(path.parent / (path.stem + '.junit-xml'), 'w', encoding='utf-8') as w: + xml = etree.tostring(actual, encoding='utf-8', xml_declaration=True, pretty_print=True) + w.write(xml.decode('utf-8')) + with open(path.parent / (path.stem + '.results'), 'w', encoding='utf-8') as w: + results = process_junit_xml_elems([(cls.shorten_filename(path.resolve().as_posix()), actual)], add_suite_details=True) + w.write(pp.pformat(results, indent=2)) + with open(path.parent / (path.stem + '.annotations'), 'w', encoding='utf-8') as w: + check_runs = cls.get_check_runs(results) + w.write(pp.pformat(check_runs, indent=2).replace(__version__, 'VERSION')) + + @classmethod + def get_check_runs(cls, parsed): + check_runs = [] + + def edit(output: dict): + check_runs.append(dict(output=output)) + + def create_check_run(name: str, + head_sha: str, + status: str, + conclusion: str, + output: dict): + check_runs.append( + dict(name=name, head_sha=head_sha, status=status, conclusion=conclusion, output=output) + ) + return mock.MagicMock(html_url='html', edit=mock.Mock(side_effect=edit)) + + commit = 'commit sha' + parsed = parsed.with_commit(commit) + results = get_test_results(parsed, False) + stats = get_stats(results) + conclusion = get_conclusion(parsed, fail_on_failures=True, fail_on_errors=True) + settings = Test.get_settings(check_name='Test Results', + commit=commit, + compare_earlier=False, + report_individual_runs=False, + report_suite_out_logs=True, + report_suite_err_logs=True, + dedup_classes_by_file_name=False, + check_run_annotation=set(available_annotations).difference(set(none_annotations))) + + repo = mock.MagicMock(create_check_run=create_check_run) + gh = mock.MagicMock(get_repo=mock.Mock(return_value=repo)) + gha = mock.MagicMock() + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + Publisher(settings, gh, gha).publish(stats, results.case_results, conclusion) + + return check_runs + + @staticmethod + def prettify_exception(exception) -> str: + exception = exception.__repr__() + exception = re.sub(r'\r?\n\r?', r'\\n', exception) + exception = re.sub(r'\(', ': ', exception, 1) + exception = re.sub(r'file:.*/', '', exception) + exception = re.sub(r',?\s*\)\)$', ')', exception) + return exception + + def test_is_supported_file(self): + test_files = self.get_test_files() + self.test.assertTrue(len(test_files) > 0) + self.do_test_is_supported_file(test_files, []) + + def do_test_is_supported_file(self, + test_files: List[str], + unsupported_files: List[str]): + all_supported_files = set(test_files).difference(unsupported_files or []) + + all_unsupported_files = self.unsupported_files().copy() + all_unsupported_files.extend(TestJunit.get_test_files()) + + from test_nunit import TestNunit + all_unsupported_files.extend(TestNunit.get_test_files()) + + from test_xunit import TestXunit + all_unsupported_files.extend(TestXunit.get_test_files()) + + from test_trx import TestTrx + all_unsupported_files.extend(TestTrx.get_test_files()) + + self.test.assertTrue(len(all_supported_files) > 0) + for file in all_supported_files: + with self.test.subTest(file=self.shorten_filename(file, test_path)): + self.test.assertTrue(self.is_supported(file)) + + all_unsupported_files = set(all_unsupported_files).difference(all_supported_files) + self.test.assertTrue(len(all_unsupported_files) > len(unsupported_files or [])) + for file in all_unsupported_files: + with self.test.subTest(file=self.shorten_filename(file, test_path)): + self.test.assertFalse(self.is_supported(file)) + + +class TestJunit(unittest.TestCase, JUnitXmlParseTest): + maxDiff = None + + @property + def test(self): + return self + + def is_supported(self, path: str) -> bool: + return is_junit(path) + + @staticmethod + def _test_files_path() -> pathlib.Path: + return test_files_path + + @staticmethod + def get_test_files() -> List[str]: + return glob(str(test_files_path / '**' / '*.xml'), recursive=True) + + @staticmethod + def parse_file(filename) -> JUnitTreeOrParseError: + return list(parse_junit_xml_files([filename], False, False))[0][1] + + def test_is_supported_file(self): + test_files = self.get_test_files() + non_junit_files = [ + str(test_files_path / 'non-junit.xml'), + str(test_path / 'xml' / 'non-xml.xml') + ] + self.do_test_is_supported_file(test_files, non_junit_files) + + def test_process_parse_junit_xml_files_with_no_files(self): + self.assertEqual( + process_junit_xml_elems(parse_junit_xml_files([], False, False)), + ParsedUnitTestResults( + files=0, + errors=[], + suites=0, + suite_tests=0, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=0, + suite_details=[], + cases=[] + )) + + # tests https://github.com/weiwei/junitparser/issues/64 + def test_junitparser_locale(self): + junit = JUnitXml.fromfile(str(test_files_path / 'pytest' / 'junit.spark.integration.1.xml')) + self.assertAlmostEqual(162.933, junit.time, 3) + + @unittest.skipIf(Version(junitparser.version) < Version('2.0.0'), + 'multiple results per test case not supported by junitparser') + def test_parse_junit_xml_file_with_multiple_results(self): + junit = process_junit_xml_elems(parse_junit_xml_files([str(test_files_path / 'junit.multiresult.xml')], False, False)) + self.assertEqual(4, len(junit.cases)) + self.assertEqual("error", junit.cases[0].result) + self.assertEqual("failure", junit.cases[1].result) + self.assertEqual("skipped", junit.cases[2].result) + self.assertEqual("success", junit.cases[3].result) + + def test_process_parse_junit_xml_files_with_time_factor(self): + result_file = str(test_files_path / 'scalatest' / 'TEST-uk.co.gresearch.spark.diff.DiffOptionsSuite.xml') + for time_factor in [1.0, 10.0, 60.0, 0.1, 0.001]: + with self.subTest(time_factor=time_factor): + self.assertEqual( + process_junit_xml_elems(parse_junit_xml_files([result_file], False, False), time_factor=time_factor), + ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=5, + suite_skipped=0, + suite_failures=0, + suite_errors=0, + suite_time=int(2.222 * time_factor), + suite_details=[], + cases=[ + UnitTestCase( + class_name='uk.co.gresearch.spark.diff.DiffOptionsSuite', + result_file=result_file, + test_file=None, + line=None, + test_name='diff options with empty diff column name', + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.259 * time_factor + ), + UnitTestCase( + class_name='uk.co.gresearch.spark.diff.DiffOptionsSuite', + result_file=result_file, + test_name='diff options left and right prefixes', + test_file=None, + line=None, + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=1.959 * time_factor + ), + UnitTestCase( + class_name='uk.co.gresearch.spark.diff.DiffOptionsSuite', + result_file=result_file, + test_name='diff options diff value', + test_file=None, + line=None, + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 * time_factor + ), + UnitTestCase( + class_name='uk.co.gresearch.spark.diff.DiffOptionsSuite', + result_file=result_file, + test_name='diff options with change column name same as diff column', + test_file=None, + line=None, + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.002 * time_factor + ), + UnitTestCase( + class_name='uk.co.gresearch.spark.diff.DiffOptionsSuite', + result_file=result_file, + test_name='fluent methods of diff options', + test_file=None, + line=None, + result='success', + message=None, + content=None, + stdout=None, + stderr=None, + time=0.001 * time_factor + ) + ] + )) + + def test_process_parse_junit_xml_files_with_test_file_prefix(self): + result_file = str(test_files_path / 'pytest' / 'junit.fail.xml') + for prefix in ["+python/", "-test/", "-src"]: + with self.subTest(prefix=prefix): + test_file = adjust_prefix('test/test_spark.py', prefix) + self.assertEqual( + process_junit_xml_elems(parse_junit_xml_files([result_file], False, False), test_file_prefix=prefix), + ParsedUnitTestResults( + files=1, + errors=[], + suites=1, + suite_tests=5, + suite_skipped=1, + suite_failures=1, + suite_errors=0, + suite_time=2, + suite_details=[], + cases=[ + UnitTestCase(result_file=result_file, test_file=test_file, line=1412, class_name='test.test_spark.SparkTests', test_name='test_check_shape_compatibility', result='success', message=None, content=None, stdout=None, stderr=None, time=6.435), + UnitTestCase(result_file=result_file, test_file=test_file, line=1641, class_name='test.test_spark.SparkTests', test_name='test_get_available_devices', result='skipped', message='get_available_devices only supported in Spark 3.0 and above', content='/horovod/test/test_spark.py:1642: get_available_devices only\n supported in Spark 3.0 and above\n ', stdout=None, stderr=None, time=0.001), + UnitTestCase(result_file=result_file, test_file=test_file, line=1102, class_name='test.test_spark.SparkTests', test_name='test_get_col_info', result='success', message=None, content=None, stdout=None, stderr=None, time=6.417), + UnitTestCase(result_file=result_file, test_file=test_file, line=819, class_name='test.test_spark.SparkTests', test_name='test_rsh_events', result='failure', message='self = def test_rsh_events(self): > self.do_test_rsh_events(3) test_spark.py:821: _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ test_spark.py:836: in do_test_rsh_events self.do_test_rsh(command, 143, events=events) test_spark.py:852: in do_test_rsh self.assertEqual(expected_result, res) E AssertionError: 143 != 0', content='self = \n\n def test_rsh_events(self):\n > self.do_test_rsh_events(3)\n\n test_spark.py:821:\n _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _\n test_spark.py:836: in do_test_rsh_events\n self.do_test_rsh(command, 143, events=events)\n test_spark.py:852: in do_test_rsh\n self.assertEqual(expected_result, res)\n E AssertionError: 143 != 0\n ', stdout=None, stderr=None, time=7.541), + UnitTestCase(result_file=result_file, test_file=test_file, line=813, class_name='test.test_spark.SparkTests', test_name='test_rsh_with_non_zero_exit_code', result='success', message=None, content=None, stdout=None, stderr=None, time=1.514) + ] + )) + + def test_get_results(self): + success = TestElement('success') + skipped = TestElement('skipped') + failure = TestElement('failure') + error = TestElement('error') + tests = [ + ([], []), + ([success], [success]), + ([skipped], [skipped]), + ([failure], [failure]), + ([error], [error]), + ([success, success], [success, success]), + ([success, success, skipped], [success, success]), + ([success, success, failure], [failure]), + ([success, success, failure, failure], [failure, failure]), + ([success, success, failure, failure, error], [error]), + ([success, success, failure, failure, error, error], [error, error]), + ([success, success, skipped, failure, failure, error, error], [error, error]), + ([skipped, skipped], [skipped, skipped]), + ] + for results, expected in tests: + with self.subTest(results=results): + actual = get_results(results) + self.assertEqual(expected, actual) + + def test_get_results_with_disabled_status(self): + disabled = Disabled() + success = TestElement('success') + skipped = TestElement('skipped') + failure = TestElement('failure') + error = TestElement('error') + tests = [ + ([], [disabled]), + ([success], [success]), + ([skipped], [skipped]), + ([failure], [failure]), + ([error], [error]), + ] + for results, expected in tests: + with self.subTest(results=results): + actual = get_results(results, 'disabled') + self.assertEqual(expected, actual) + + def test_get_result(self): + success = TestElement('success') + skipped = TestElement('skipped') + failure = TestElement('failure') + error = TestElement('error') + tests = [ + ([], 'success'), + ([success], 'success'), + ([skipped], 'skipped'), + ([failure], 'failure'), + ([error], 'error'), + ([success, success], 'success'), + ([skipped, skipped], 'skipped'), + ([failure, failure], 'failure'), + ([error, error], 'error'), + (success, 'success'), + (skipped, 'skipped'), + (failure, 'failure'), + (error, 'error'), + (None, 'success') + ] + for results, expected in tests: + with self.subTest(results=results): + actual = get_result(results) + self.assertEqual(expected, actual) + + def test_get_message(self): + tests = [ + ([], None), + ([TestElement('failure', message=None)], None), + ([TestElement('failure', message='failure')], 'failure'), + ([TestElement('failure', message='failure one'), TestElement('failure', message=None)], 'failure one'), + ([TestElement('failure', message='failure one'), TestElement('failure', message='failure two')], 'failure one\nfailure two'), + ] + for results, expected in tests: + with self.subTest(results=results): + actual = get_message(results) + self.assertEqual(expected, actual) + + def test_get_content(self): + tests = [ + ([], None), + ([TestElement('failure', content=None)], None), + ([TestElement('failure', content='failure')], 'failure'), + ([TestElement('failure', content='failure one'), TestElement('failure', content=None)], 'failure one'), + ([TestElement('failure', content='failure one'), TestElement('failure', content='failure two')], 'failure one\nfailure two'), + ] + for results, expected in tests: + with self.subTest(results=results): + actual = get_content(results) + self.assertEqual(expected, actual) + + +if __name__ == "__main__": + TestJunit.update_expectations() diff --git a/python/test/test_mocha.py b/python/test/test_mocha.py new file mode 100644 index 0000000..3848d12 --- /dev/null +++ b/python/test/test_mocha.py @@ -0,0 +1,65 @@ +import json +import os +import pathlib +import sys +import tempfile +import unittest +from glob import glob +from typing import List + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) +sys.path.append(str(pathlib.Path(__file__).resolve().parent)) + +from publish.junit import JUnitTreeOrParseError, safe_parse_xml_file +from publish.mocha import parse_mocha_json_file, is_mocha_json +from test_junit import JUnitXmlParseTest + +test_path = pathlib.Path(__file__).resolve().parent +test_files_path = test_path / 'files' / 'mocha' + + +class TestMochaJson(unittest.TestCase, JUnitXmlParseTest): + maxDiff = None + + @property + def test(self): + return self + + @staticmethod + def unsupported_files() -> List[str]: + return [ + str(test_path / 'files' / 'json' / 'not-existing.json'), + str(test_path / 'files' / 'json' / 'empty.json'), + str(test_path / 'files' / 'json' / 'malformed-json.json'), + ] + + def is_supported(self, path: str) -> bool: + return is_mocha_json(path) + + @staticmethod + def _test_files_path() -> pathlib.Path: + return test_files_path + + @staticmethod + def get_test_files() -> List[str]: + return glob(str(test_files_path / '**' / '*.json'), recursive=True) + + @staticmethod + def parse_file(filename) -> JUnitTreeOrParseError: + return safe_parse_xml_file(filename, parse_mocha_json_file) + + def test_is_mocha_json(self): + with tempfile.TemporaryDirectory() as path: + self.assertFalse(is_mocha_json(os.path.join(path, 'file'))) + + filepath = os.path.join(path, 'file.json') + with open(filepath, mode='wt') as w: + json.dump({"stats": {"suites": 1}, "tests": [{"fullTitle": "test name"}]}, w) + self.assertTrue(is_mocha_json(filepath)) + + os.rename(filepath, os.path.join(path, 'file.xml')) + self.assertFalse(is_mocha_json(os.path.join(path, 'file.xml'))) + + +if __name__ == "__main__": + TestMochaJson.update_expectations() diff --git a/python/test/test_nunit.py b/python/test/test_nunit.py new file mode 100644 index 0000000..4a718f1 --- /dev/null +++ b/python/test/test_nunit.py @@ -0,0 +1,41 @@ +import pathlib +import sys +import unittest +from glob import glob +from typing import List + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) +sys.path.append(str(pathlib.Path(__file__).resolve().parent)) + +from publish.junit import JUnitTreeOrParseError +from publish.nunit import parse_nunit_files, is_nunit +from test_junit import JUnitXmlParseTest + +test_files_path = pathlib.Path(__file__).resolve().parent / 'files' / 'nunit' + + +class TestNunit(unittest.TestCase, JUnitXmlParseTest): + maxDiff = None + + @property + def test(self): + return self + + def is_supported(self, path: str) -> bool: + return is_nunit(path) + + @staticmethod + def _test_files_path() -> pathlib.Path: + return test_files_path + + @staticmethod + def get_test_files() -> List[str]: + return glob(str(test_files_path / '**' / '*.xml'), recursive=True) + + @staticmethod + def parse_file(filename) -> JUnitTreeOrParseError: + return list(parse_nunit_files([filename], False))[0][1] + + +if __name__ == "__main__": + TestNunit.update_expectations() diff --git a/python/test/test_progress.py b/python/test/test_progress.py new file mode 100644 index 0000000..bd93faa --- /dev/null +++ b/python/test/test_progress.py @@ -0,0 +1,72 @@ +import unittest +from datetime import datetime, timezone + +import mock + +from publish.progress import Progress, ProgressLogger + + +class TestProgress(unittest.TestCase): + def test_get_progress(self): + progress = Progress(10) + self.assertEqual('0 of 10', progress.get_progress()) + self.assertEqual('0 of 10', progress.get_progress()) + self.assertEqual('item', progress.observe('item')) + self.assertEqual('1 of 10', progress.get_progress()) + self.assertEqual('1 of 10', progress.get_progress()) + self.assertEqual(1, progress.observe(1)) + self.assertEqual('2 of 10', progress.get_progress()) + self.assertEqual('2 of 10', progress.get_progress()) + self.assertEqual(1.2, progress.observe(1.2)) + self.assertEqual('3 of 10', progress.get_progress()) + self.assertEqual('3 of 10', progress.get_progress()) + obj = object() + self.assertEqual(obj, progress.observe(obj)) + self.assertEqual('4 of 10', progress.get_progress()) + self.assertEqual('4 of 10', progress.get_progress()) + + def test_get_progress_thousands(self): + progress = Progress(12345) + self.assertEqual('0 of 12 345', progress.get_progress()) + for _ in range(12340): + self.assertEqual('item', progress.observe('item')) + self.assertEqual('12 340 of 12 345', progress.get_progress()) + + +class TestProgressLogger(unittest.TestCase): + def test(self): + progress = Progress(10) + logger = mock.MagicMock(info=mock.Mock()) + plogger = ProgressLogger(progress, 60, 'progress: {progress} in {time}', logger) + try: + ts = datetime(2022, 6, 1, 12, 34, 56, tzinfo=timezone.utc) + with mock.patch('publish.progress.datetime', utcnow=mock.Mock(return_value=ts)): + plogger.start() + logger.info.assert_not_called() + + progress.observe('item') + logger.info.assert_not_called() + + ts = datetime(2022, 6, 1, 12, 35, 00, tzinfo=timezone.utc) + with mock.patch('publish.progress.datetime', utcnow=mock.Mock(return_value=ts)): + plogger._log_progress() + self.assertEqual([mock.call('progress: 1 of 10 in 4 seconds')], logger.info.call_args_list) + logger.info.reset_mock() + + progress.observe('item') + progress.observe('item') + logger.info.assert_not_called() + + ts = datetime(2022, 6, 1, 12, 40, 00, tzinfo=timezone.utc) + with mock.patch('publish.progress.datetime', utcnow=mock.Mock(return_value=ts)): + plogger._log_progress() + self.assertEqual([mock.call('progress: 3 of 10 in 5 minutes and 4 seconds')], logger.info.call_args_list) + logger.info.reset_mock() + finally: + ts = datetime(2022, 6, 1, 12, 41, 23, tzinfo=timezone.utc) + with mock.patch('publish.progress.datetime', utcnow=mock.Mock(return_value=ts)): + plogger.finish('finished: {observations} of {items} in {duration}') + self.assertEqual([mock.call('finished: 3 of 10 in 6 minutes and 27 seconds')], logger.info.call_args_list) + logger.info.reset_mock() + + self.assertEqual('6 minutes and 27 seconds', plogger.duration) diff --git a/python/test/test_publish.py b/python/test/test_publish.py new file mode 100644 index 0000000..6fd99ab --- /dev/null +++ b/python/test/test_publish.py @@ -0,0 +1,2178 @@ +import pathlib +import unittest +from collections import defaultdict + +import mock + +from publish import __version__, Annotation, UnitTestSuite, UnitTestRunResults, UnitTestRunDeltaResults, CaseMessages, \ + get_json_path, get_error_annotation, get_digest_from_stats, \ + all_tests_label_md, skipped_tests_label_md, failed_tests_label_md, passed_tests_label_md, test_errors_label_md, \ + duration_label_md, SomeTestChanges, abbreviate, abbreviate_bytes, get_test_name, get_formatted_digits, \ + get_magnitude, get_delta, as_short_commit, as_delta, as_stat_number, as_stat_duration, get_stats_from_digest, \ + digest_string, ungest_string, get_details_line_md, get_commit_line_md, restrict_unicode, \ + get_short_summary, get_short_summary_md, get_long_summary_md, get_long_summary_with_runs_md, \ + get_long_summary_without_runs_md, get_long_summary_with_digest_md, get_test_changes_md, get_test_changes_list_md, \ + get_test_changes_summary_md, get_case_annotations, get_case_annotation, get_suite_annotations, \ + get_suite_annotations_for_suite, get_all_tests_list_annotation, get_skipped_tests_list_annotation, get_case_messages, \ + chunk_test_list, message_is_contained_in_content +from publish.junit import parse_junit_xml_files, process_junit_xml_elems +from publish.unittestresults import get_stats, UnitTestCase, ParseError, get_test_results, create_unit_test_case_results +from test_utils import temp_locale, d, n + +test_files_path = pathlib.Path(__file__).resolve().parent / 'files' / 'junit-xml' + + +errors = [ParseError('file', 'error', 1, 2, exception=ValueError("Invalid value"))] + + +class PublishTest(unittest.TestCase): + old_locale = None + details = [UnitTestSuite('suite', 7, 3, 2, 1, 'std-out', 'std-err')] + + def test_get_json_path(self): + detail = {'a': 'A', 'b': 'B', 'c': ['d'], 'e': {}, 'f': None} + json = {'id': 1, 'name': 'Name', 'detail': detail} + + self.assertEqual(None, get_json_path(json, 'not there')) + self.assertEqual(1, get_json_path(json, 'id')) + self.assertEqual('Name', get_json_path(json, 'name')) + self.assertEqual(detail, get_json_path(json, 'detail')) + self.assertEqual('A', get_json_path(json, 'detail.a')) + self.assertEqual(None, get_json_path(json, 'detail.a.g')) + self.assertEqual(['d'], get_json_path(json, 'detail.c')) + self.assertEqual({}, get_json_path(json, 'detail.e')) + self.assertEqual(None, get_json_path(json, 'detail.e.g')) + self.assertEqual(None, get_json_path(json, 'detail.f')) + self.assertEqual(None, get_json_path(json, 'detail.f.g')) + + def test_test_changes(self): + changes = SomeTestChanges(['removed-test', 'removed-skip', 'remain-test', 'remain-skip', 'skip', 'unskip'], + ['remain-test', 'remain-skip', 'skip', 'unskip', 'add-test', 'add-skip'], + ['removed-skip', 'remain-skip', 'unskip'], ['remain-skip', 'skip', 'add-skip']) + self.assertEqual({'add-test', 'add-skip'}, changes.adds()) + self.assertEqual({'removed-test', 'removed-skip'}, changes.removes()) + self.assertEqual({'remain-test', 'remain-skip', 'skip', 'unskip'}, changes.remains()) + self.assertEqual({'skip', 'add-skip'}, changes.skips()) + self.assertEqual({'unskip', 'removed-skip'}, changes.un_skips()) + self.assertEqual({'add-skip'}, changes.added_and_skipped()) + self.assertEqual({'skip'}, changes.remaining_and_skipped()) + self.assertEqual({'unskip'}, changes.remaining_and_un_skipped()) + self.assertEqual({'removed-skip'}, changes.removed_skips()) + + def test_test_changes_empty(self): + changes = SomeTestChanges([], [], [], []) + self.assertEqual(set(), changes.adds()) + self.assertEqual(set(), changes.removes()) + self.assertEqual(set(), changes.remains()) + self.assertEqual(set(), changes.skips()) + self.assertEqual(set(), changes.un_skips()) + self.assertEqual(set(), changes.added_and_skipped()) + self.assertEqual(set(), changes.remaining_and_skipped()) + self.assertEqual(set(), changes.remaining_and_un_skipped()) + self.assertEqual(set(), changes.removed_skips()) + + def test_test_changes_with_nones(self): + self.do_test_test_changes_with_nones(SomeTestChanges(None, None, None, None)) + self.do_test_test_changes_with_nones(SomeTestChanges(['test'], None, None, None)) + self.do_test_test_changes_with_nones(SomeTestChanges(None, ['test'], None, None)) + self.do_test_test_changes_with_nones(SomeTestChanges(None, None, ['test'], None)) + self.do_test_test_changes_with_nones(SomeTestChanges(None, None, None, ['test'])) + self.do_test_test_changes_with_nones(SomeTestChanges(['test'], None, ['test'], None)) + self.do_test_test_changes_with_nones(SomeTestChanges(None, ['test'], None, ['test'])) + self.do_test_test_changes_with_nones(SomeTestChanges(None, ['test'], ['test'], None)) + self.do_test_test_changes_with_nones(SomeTestChanges(['test'], None, None, ['test'])) + + def do_test_test_changes_with_nones(self, changes: SomeTestChanges): + self.assertIsNone(changes.adds()) + self.assertIsNone(changes.removes()) + self.assertIsNone(changes.remains()) + self.assertIsNone(changes.skips()) + self.assertIsNone(changes.un_skips()) + self.assertIsNone(changes.added_and_skipped()) + self.assertIsNone(changes.remaining_and_skipped()) + self.assertIsNone(changes.remaining_and_un_skipped()) + self.assertIsNone(changes.removed_skips()) + + def test_test_changes_has_no_tests(self): + for default in [None, 'one']: + self.assertEqual(SomeTestChanges(default, None, default, None).has_no_tests(), False) + self.assertEqual(SomeTestChanges(default, [], default, None).has_no_tests(), True) + self.assertEqual(SomeTestChanges(default, None, default, []).has_no_tests(), False) + self.assertEqual(SomeTestChanges(default, [], default, []).has_no_tests(), True) + self.assertEqual(SomeTestChanges(default, ['one'], default, []).has_no_tests(), False) + self.assertEqual(SomeTestChanges(default, [], default, ['two']).has_no_tests(), True) + self.assertEqual(SomeTestChanges(default, ['one'], default, ['two']).has_no_tests(), False) + + def test_test_changes_has_changes(self): + for changes, expected in [(SomeTestChanges(None, None, None, None), False), + (SomeTestChanges([], [], [], []), False), + (SomeTestChanges(['one'], ['one'], ['two'], ['two']), False), + (SomeTestChanges(['one'], ['three'], ['two'], ['two']), True), + (SomeTestChanges(['one'], ['one'], ['two'], ['three']), True), + (SomeTestChanges(['one'], ['two'], ['two'], ['three']), True), + (SomeTestChanges(['one'], None, ['two'], None), False), + (SomeTestChanges(None, ['one'], None, ['two']), False)]: + self.assertEqual(changes.has_changes, expected, str(changes)) + + def test_restrict_unicode(self): + self.assertEqual(None, restrict_unicode(None)) + self.assertEqual('', restrict_unicode('')) + + # utf8 characters ≤ 0xffff + self.assertEqual('…', restrict_unicode('…')) + self.assertEqual('abc', restrict_unicode('abc')) + self.assertEqual('»»»»»', restrict_unicode('»»»»»')) + self.assertEqual('▊▋▌▍▎', restrict_unicode('▊▋▌▍▎')) + + # utf8 characters > 0xffff + self.assertEqual(r'\U0001d482\U0001d483\U0001d484', restrict_unicode('𝒂𝒃𝒄')) + self.assertEqual(r'헴䜝헱홐㣇㿷䔭\U0001237a\U000214ff\U00020109㦓', restrict_unicode('헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓')) + + # restricting a second time should not alter the result + self.assertEqual(None, restrict_unicode(restrict_unicode(None))) + self.assertEqual('', restrict_unicode(restrict_unicode(''))) + self.assertEqual('…', restrict_unicode(restrict_unicode('…'))) + self.assertEqual('abc', restrict_unicode(restrict_unicode('abc'))) + self.assertEqual('»»»»»', restrict_unicode(restrict_unicode('»»»»»'))) + self.assertEqual('▊▋▌▍▎', restrict_unicode(restrict_unicode('▊▋▌▍▎'))) + self.assertEqual(r'\U0001d482\U0001d483\U0001d484', restrict_unicode(restrict_unicode('𝒂𝒃𝒄'))) + self.assertEqual(r'헴䜝헱홐㣇㿷䔭\U0001237a\U000214ff\U00020109㦓', restrict_unicode(restrict_unicode('헴䜝헱홐㣇㿷䔭𒍺𡓿𠄉㦓'))) + + def test_abbreviate_characters(self): + # None string + self.assertIsNone(abbreviate(None, 1)) + + # 1 byte utf8 characters + self.assertEqual('', abbreviate('', 1)) + self.assertEqual('…', abbreviate('…', 1)) + self.assertEqual('ab', abbreviate('ab', 3)) + self.assertEqual('ab', abbreviate('ab', 2)) + self.assertEqual('…', abbreviate('ab', 1)) + self.assertEqual('abc', abbreviate('abc', 4)) + self.assertEqual('abc', abbreviate('abc', 3)) + self.assertEqual('a…', abbreviate('abc', 2)) + self.assertEqual('…', abbreviate('abc', 1)) + self.assertEqual('abcd', abbreviate('abcd', 4)) + self.assertEqual('a…d', abbreviate('abcd', 3)) + self.assertEqual('a…', abbreviate('abcd', 2)) + self.assertEqual('…', abbreviate('abcd', 1)) + self.assertEqual('abcde', abbreviate('abcde', 5)) + self.assertEqual('ab…e', abbreviate('abcde', 4)) + self.assertEqual('a…e', abbreviate('abcde', 3)) + self.assertEqual('a…', abbreviate('abcde', 2)) + self.assertEqual('…', abbreviate('abcde', 1)) + self.assertEqual('abcdef', abbreviate('abcdef', 6)) + self.assertEqual('ab…ef', abbreviate('abcdef', 5)) + self.assertEqual('ab…f', abbreviate('abcdef', 4)) + self.assertEqual('a…f', abbreviate('abcdef', 3)) + self.assertEqual('a…', abbreviate('abcdef', 2)) + self.assertEqual('…', abbreviate('abcdef', 1)) + self.assertEqual('abcdefg', abbreviate('abcdefg', 7)) + self.assertEqual('abc…fg', abbreviate('abcdefg', 6)) + self.assertEqual('ab…fg', abbreviate('abcdefg', 5)) + self.assertEqual('ab…g', abbreviate('abcdefg', 4)) + self.assertEqual('a…g', abbreviate('abcdefg', 3)) + self.assertEqual('a…', abbreviate('abcdefg', 2)) + self.assertEqual('…', abbreviate('abcdefg', 1)) + self.assertEqual('abcdefgh', abbreviate('abcdefgh', 8)) + self.assertEqual('abc…fgh', abbreviate('abcdefgh', 7)) + self.assertEqual('abc…gh', abbreviate('abcdefgh', 6)) + self.assertEqual('ab…gh', abbreviate('abcdefgh', 5)) + self.assertEqual('ab…h', abbreviate('abcdefgh', 4)) + self.assertEqual('a…h', abbreviate('abcdefgh', 3)) + self.assertEqual('a…', abbreviate('abcdefgh', 2)) + self.assertEqual('…', abbreviate('abcdefgh', 1)) + self.assertEqual('abcdefghijklmnopqrstuvwxyz', abbreviate('abcdefghijklmnopqrstuvwxyz', 27)) + self.assertEqual('abcdefghijklmnopqrstuvwxyz', abbreviate('abcdefghijklmnopqrstuvwxyz', 26)) + self.assertEqual('abcdefghijkl…opqrstuvwxyz', abbreviate('abcdefghijklmnopqrstuvwxyz', 25)) + + # 2 bytes utf8 characters + self.assertEqual('»»»»»', abbreviate('»»»»»', 5)) + self.assertEqual('»»…»', abbreviate('»»»»»', 4)) + self.assertEqual('»…»', abbreviate('»»»»»', 3)) + self.assertEqual('»…', abbreviate('»»»»»', 2)) + self.assertEqual('…', abbreviate('»»»»»', 1)) + self.assertEqual('»»»»»»', abbreviate('»»»»»»', 6)) + self.assertEqual('»»…»»', abbreviate('»»»»»»', 5)) + self.assertEqual('»»…»', abbreviate('»»»»»»', 4)) + self.assertEqual('»…»', abbreviate('»»»»»»', 3)) + self.assertEqual('»…', abbreviate('»»»»»»', 2)) + self.assertEqual('…', abbreviate('»»»»»»', 1)) + + # 3 bytes utf8 characters + self.assertEqual('▊▋▌▍▎', abbreviate('▊▋▌▍▎', 5)) + self.assertEqual('▊▋…▎', abbreviate('▊▋▌▍▎', 4)) + self.assertEqual('▊…▎', abbreviate('▊▋▌▍▎', 3)) + self.assertEqual('▊…', abbreviate('▊▋▌▍▎', 2)) + self.assertEqual('…', abbreviate('▊▋▌▍▎', 1)) + self.assertEqual('▊▋▌▍▎▏', abbreviate('▊▋▌▍▎▏', 6)) + self.assertEqual('▊▋…▎▏', abbreviate('▊▋▌▍▎▏', 5)) + self.assertEqual('▊▋…▏', abbreviate('▊▋▌▍▎▏', 4)) + self.assertEqual('▊…▏', abbreviate('▊▋▌▍▎▏', 3)) + self.assertEqual('▊…', abbreviate('▊▋▌▍▎▏', 2)) + self.assertEqual('…', abbreviate('▊▋▌▍▎▏', 1)) + + # 4 bytes utf8 characters + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', abbreviate('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 27)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', abbreviate('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 26)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍…𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', abbreviate('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 25)) + + # mixed utf bytes: lengths=[1, 2, 3, 4, 1, 2, 3, 4] + self.assertEqual('a»▉𝒂a»▉𝒂', abbreviate('a»▉𝒂a»▉𝒂', 9)) + self.assertEqual('a»▉𝒂a»▉𝒂', abbreviate('a»▉𝒂a»▉𝒂', 8)) + self.assertEqual('a»▉…»▉𝒂', abbreviate('a»▉𝒂a»▉𝒂', 7)) + self.assertEqual('a»▉…▉𝒂', abbreviate('a»▉𝒂a»▉𝒂', 6)) + self.assertEqual('a»…▉𝒂', abbreviate('a»▉𝒂a»▉𝒂', 5)) + self.assertEqual('a»…𝒂', abbreviate('a»▉𝒂a»▉𝒂', 4)) + self.assertEqual('a…𝒂', abbreviate('a»▉𝒂a»▉𝒂', 3)) + self.assertEqual('a…', abbreviate('a»▉𝒂a»▉𝒂', 2)) + self.assertEqual('…', abbreviate('a»▉𝒂a»▉𝒂', 1)) + self.assertEqual('a»▉𝒂a»▉', abbreviate('a»▉𝒂a»▉', 8)) + self.assertEqual('a»▉𝒂a»▉', abbreviate('a»▉𝒂a»▉', 7)) + self.assertEqual('a»▉…»▉', abbreviate('a»▉𝒂a»▉', 6)) + self.assertEqual('a»…»▉', abbreviate('a»▉𝒂a»▉', 5)) + self.assertEqual('a»…▉', abbreviate('a»▉𝒂a»▉', 4)) + self.assertEqual('a…▉', abbreviate('a»▉𝒂a»▉', 3)) + self.assertEqual('a…', abbreviate('a»▉𝒂a»▉', 2)) + self.assertEqual('…', abbreviate('a»▉𝒂a»▉', 1)) + + # invalid abbreviation lengths + self.assertRaises(ValueError, lambda: abbreviate('abc', 0)) + self.assertRaises(ValueError, lambda: abbreviate('abc', -1)) + + def test_abbreviate_bytes(self): + # None string + self.assertIsNone(abbreviate_bytes(None, 3)) + + # even number of characters + # 4 bytes utf characters + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 105)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 104)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎…𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 103)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍…𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 102)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍…𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 101)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍…𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 100)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍…𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 99)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍…𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 98)) + self.assertEqual('𝒂𝒃𝒄…𝒙𝒚𝒛', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 27)) + self.assertEqual('𝒂𝒃𝒄…𝒚𝒛', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 26)) + self.assertEqual('𝒂𝒃𝒄…𝒚𝒛', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 25)) + self.assertEqual('𝒂…', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 7)) + self.assertEqual('…', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 6)) + self.assertEqual('…', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 5)) + self.assertEqual('…', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 4)) + self.assertEqual('…', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚𝒛', 3)) + # 1 byte utf characters + self.assertEqual('ab…yz', abbreviate_bytes('abcdefghijklmnopqrstuvwxyz', 7)) + self.assertEqual('ab…z', abbreviate_bytes('abcdefghijklmnopqrstuvwxyz', 6)) + self.assertEqual('a…z', abbreviate_bytes('abcdefghijklmnopqrstuvwxyz', 5)) + self.assertEqual('a…', abbreviate_bytes('abcdefghijklmnopqrstuvwxyz', 4)) + self.assertEqual('…', abbreviate_bytes('abcdefghijklmnopqrstuvwxyz', 3)) + # mixed utf bytes: lengths=[1, 2, 3, 4, 4, 3, 2, 1] + self.assertEqual('a»▉𝒂𝒂▉»a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 21)) + self.assertEqual('a»▉𝒂𝒂▉»a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 20)) + self.assertEqual('a»▉𝒂…▉»a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 19)) + self.assertEqual('a»▉…▉»a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 18)) + self.assertEqual('a»▉…▉»a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 17)) + self.assertEqual('a»▉…▉»a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 16)) + self.assertEqual('a»▉…▉»a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 15)) + self.assertEqual('a»▉…»a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 14)) + self.assertEqual('a»▉…»a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 13)) + self.assertEqual('a»▉…»a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 12)) + self.assertEqual('a»…»a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 11)) + self.assertEqual('a»…»a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 10)) + self.assertEqual('a»…»a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 9)) + self.assertEqual('a»…a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 8)) + self.assertEqual('a»…a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 7)) + self.assertEqual('a…a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 6)) + self.assertEqual('a…a', abbreviate_bytes('a»▉𝒂𝒂▉»a', 5)) + self.assertEqual('a…', abbreviate_bytes('a»▉𝒂𝒂▉»a', 4)) + self.assertEqual('…', abbreviate_bytes('a»▉𝒂𝒂▉»a', 3)) + + # odd number of characters + # 4 bytes utf characters + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 101)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 100)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍…𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 99)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍…𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 98)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍…𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 97)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍…𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 96)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍…𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 95)) + self.assertEqual('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌…𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 94)) + self.assertEqual('𝒂𝒃𝒄…𝒘𝒙𝒚', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 27)) + self.assertEqual('𝒂𝒃𝒄…𝒙𝒚', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 26)) + self.assertEqual('𝒂𝒃𝒄…𝒙𝒚', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 25)) + self.assertEqual('𝒂…', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 7)) + self.assertEqual('…', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 6)) + self.assertEqual('…', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 5)) + self.assertEqual('…', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 4)) + self.assertEqual('…', abbreviate_bytes('𝒂𝒃𝒄𝒅𝒆𝒇𝒈𝒉𝒊𝒋𝒌𝒍𝒎𝒏𝒐𝒑𝒒𝒓𝒔𝒕𝒖𝒗𝒘𝒙𝒚', 3)) + # 1 byte utf characters + self.assertEqual('ab…xy', abbreviate_bytes('abcdefghijklmnopqrstuvwxy', 7)) + self.assertEqual('ab…y', abbreviate_bytes('abcdefghijklmnopqrstuvwxy', 6)) + self.assertEqual('a…y', abbreviate_bytes('abcdefghijklmnopqrstuvwxy', 5)) + self.assertEqual('a…', abbreviate_bytes('abcdefghijklmnopqrstuvwxy', 4)) + self.assertEqual('…', abbreviate_bytes('abcdefghijklmnopqrstuvwxy', 3)) + # mixed utf bytes: lengths=[1, 2, 3, 4, 1, 2, 3] + self.assertEqual('a»▉𝒂a»▉', abbreviate_bytes('a»▉𝒂a»▉', 17)) + self.assertEqual('a»▉𝒂a»▉', abbreviate_bytes('a»▉𝒂a»▉', 16)) + self.assertEqual('a»▉…a»▉', abbreviate_bytes('a»▉𝒂a»▉', 15)) + self.assertEqual('a»▉…»▉', abbreviate_bytes('a»▉𝒂a»▉', 14)) + self.assertEqual('a»…»▉', abbreviate_bytes('a»▉𝒂a»▉', 13)) + self.assertEqual('a»…»▉', abbreviate_bytes('a»▉𝒂a»▉', 12)) + self.assertEqual('a»…»▉', abbreviate_bytes('a»▉𝒂a»▉', 11)) + self.assertEqual('a»…▉', abbreviate_bytes('a»▉𝒂a»▉', 10)) + self.assertEqual('a»…▉', abbreviate_bytes('a»▉𝒂a»▉', 9)) + self.assertEqual('a…▉', abbreviate_bytes('a»▉𝒂a»▉', 8)) + self.assertEqual('a…▉', abbreviate_bytes('a»▉𝒂a»▉', 7)) + self.assertEqual('a…', abbreviate_bytes('a»▉𝒂a»▉', 6)) + self.assertEqual('a…', abbreviate_bytes('a»▉𝒂a»▉', 5)) + self.assertEqual('a…', abbreviate_bytes('a»▉𝒂a»▉', 4)) + self.assertEqual('…', abbreviate_bytes('a»▉𝒂a»▉', 3)) + + self.assertRaises(ValueError, lambda: abbreviate_bytes('abc', 2)) + self.assertRaises(ValueError, lambda: abbreviate_bytes('abc', 1)) + self.assertRaises(ValueError, lambda: abbreviate_bytes('abc', 0)) + self.assertRaises(ValueError, lambda: abbreviate_bytes('abc', -1)) + + def test_get_test_name(self): + self.assertEqual('Unknown test', get_test_name(None, None, None)) + self.assertEqual('test name', get_test_name(None, None, 'test name')) + self.assertEqual('class name ‑ Unknown test', get_test_name(None, 'class name', None)) + self.assertEqual('class name ‑ test name', get_test_name(None, 'class name', 'test name')) + self.assertEqual('file name ‑ Unknown test', get_test_name('file name', None, None)) + self.assertEqual('file name ‑ test name', get_test_name('file name', None, 'test name')) + self.assertEqual('file name ‑ class name ‑ Unknown test', get_test_name('file name', 'class name', None)) + self.assertEqual('file name ‑ class name ‑ test name', get_test_name('file name', 'class name', 'test name')) + + def test_get_formatted_digits(self): + self.assertEqual(get_formatted_digits(None), (3, 0)) + self.assertEqual(get_formatted_digits(None, 1), (3, 0)) + self.assertEqual(get_formatted_digits(None, 123), (3, 0)) + self.assertEqual(get_formatted_digits(None, 1234), (5, 0)) + self.assertEqual(get_formatted_digits(0), (1, 0)) + self.assertEqual(get_formatted_digits(1, 2, 3), (1, 0)) + self.assertEqual(get_formatted_digits(10), (2, 0)) + self.assertEqual(get_formatted_digits(100), (3, 0)) + self.assertEqual(get_formatted_digits(1234, 123, 0), (5, 0)) + with temp_locale('en_US'): + self.assertEqual(get_formatted_digits(1234, 123, 0), (5, 0)) + with temp_locale('de_DE'): + self.assertEqual(get_formatted_digits(1234, 123, 0), (5, 0)) + + self.assertEqual(get_formatted_digits(dict()), (3, 3)) + self.assertEqual(get_formatted_digits(dict(number=1)), (1, 3)) + self.assertEqual(get_formatted_digits(dict(number=12)), (2, 3)) + self.assertEqual(get_formatted_digits(dict(number=123)), (3, 3)) + self.assertEqual(get_formatted_digits(dict(number=1234)), (5, 3)) + with temp_locale('en_US'): + self.assertEqual(get_formatted_digits(dict(number=1234)), (5, 3)) + with temp_locale('de_DE'): + self.assertEqual(get_formatted_digits(dict(number=1234)), (5, 3)) + + self.assertEqual(get_formatted_digits(dict(delta=1)), (3, 1)) + self.assertEqual(get_formatted_digits(dict(number=1, delta=1)), (1, 1)) + self.assertEqual(get_formatted_digits(dict(number=1, delta=12)), (1, 2)) + self.assertEqual(get_formatted_digits(dict(number=1, delta=123)), (1, 3)) + self.assertEqual(get_formatted_digits(dict(number=1, delta=1234)), (1, 5)) + with temp_locale('en_US'): + self.assertEqual(get_formatted_digits(dict(number=1, delta=1234)), (1, 5)) + with temp_locale('de_DE'): + self.assertEqual(get_formatted_digits(dict(number=1, delta=1234)), (1, 5)) + + def test_get_magnitude(self): + self.assertEqual(None, get_magnitude(None)) + self.assertEqual(+0, get_magnitude(+0)) + self.assertEqual(-1, get_magnitude(-1)) + self.assertEqual(+2, get_magnitude(+2)) + self.assertEqual(None, get_magnitude(dict())) + self.assertEqual(+0, get_magnitude(dict(number=+0))) + self.assertEqual(+1, get_magnitude(dict(number=+1))) + self.assertEqual(-2, get_magnitude(dict(number=-2))) + self.assertEqual(3, get_magnitude(dict(number=3, delta=5))) + self.assertEqual(3, get_magnitude(dict(duration=3))) + self.assertEqual(3, get_magnitude(dict(duration=3, delta=5))) + self.assertEqual(None, get_magnitude(dict(delta=5))) + + def test_get_delta(self): + self.assertEqual(None, get_delta(None)) + self.assertEqual(None, get_delta(+0)) + self.assertEqual(None, get_delta(-1)) + self.assertEqual(None, get_delta(+2)) + self.assertEqual(None, get_delta(dict())) + self.assertEqual(None, get_delta(dict(number=+0))) + self.assertEqual(None, get_delta(dict(number=+1))) + self.assertEqual(None, get_delta(dict(number=-2))) + self.assertEqual(5, get_delta(dict(number=3, delta=5))) + self.assertEqual(None, get_delta(dict(duration=3))) + self.assertEqual(5, get_delta(dict(duration=3, delta=5))) + self.assertEqual(5, get_delta(dict(delta=5))) + + def test_as_short_commit(self): + self.assertEqual(as_short_commit(None), None) + self.assertEqual(as_short_commit(''), None) + self.assertEqual(as_short_commit('commit'), 'commit') + self.assertEqual(as_short_commit('0123456789abcdef'), '01234567') + self.assertEqual(as_short_commit('b469da3d223225fa3f014a3c9e9466b42a1471c5'), 'b469da3d') + + def test_as_delta(self): + self.assertEqual(as_delta(0, 1), '±0') + self.assertEqual(as_delta(+1, 1), '+1') + self.assertEqual(as_delta(-2, 1), ' - 2') + + self.assertEqual(as_delta(0, 2), '±  0') + self.assertEqual(as_delta(+1, 2), '+  1') + self.assertEqual(as_delta(-2, 2), ' -   2') + + self.assertEqual(as_delta(1, 5), '+       1') + self.assertEqual(as_delta(12, 5), '+     12') + self.assertEqual(as_delta(123, 5), '+   123') + self.assertEqual(as_delta(1234, 5), '+1 234') + self.assertEqual(as_delta(1234, 6), '+  1 234') + self.assertEqual(as_delta(123, 6), '+     123') + + with temp_locale('en_US'): + self.assertEqual(as_delta(1234, 5), '+1 234') + self.assertEqual(as_delta(1234, 6), '+  1 234') + self.assertEqual(as_delta(123, 6), '+     123') + with temp_locale('de_DE'): + self.assertEqual(as_delta(1234, 5), '+1 234') + self.assertEqual(as_delta(1234, 6), '+  1 234') + self.assertEqual(as_delta(123, 6), '+     123') + + def test_as_stat_number(self): + label = 'unit' + self.assertEqual(as_stat_number(None, 1, 0, label), 'N/A unit') + + self.assertEqual(as_stat_number(1, 1, 0, label), '1 unit') + self.assertEqual(as_stat_number(123, 6, 0, label), '     123 unit') + self.assertEqual(as_stat_number(1234, 6, 0, label), '  1 234 unit') + self.assertEqual(as_stat_number(12345, 6, 0, label), '12 345 unit') + + with temp_locale('en_US'): + self.assertEqual(as_stat_number(123, 6, 0, label), '     123 unit') + self.assertEqual(as_stat_number(1234, 6, 0, label), '  1 234 unit') + self.assertEqual(as_stat_number(12345, 6, 0, label), '12 345 unit') + with temp_locale('de_DE'): + self.assertEqual(as_stat_number(123, 6, 0, label), '     123 unit') + self.assertEqual(as_stat_number(1234, 6, 0, label), '  1 234 unit') + self.assertEqual(as_stat_number(12345, 6, 0, label), '12 345 unit') + + self.assertEqual(as_stat_number(dict(number=1), 1, 0, label), '1 unit') + + self.assertEqual(as_stat_number(dict(number=1, delta=-1), 1, 1, label), '1 unit  - 1 ') + self.assertEqual(as_stat_number(dict(number=2, delta=+0), 1, 1, label), '2 unit ±0 ') + self.assertEqual(as_stat_number(dict(number=3, delta=+1), 1, 1, label), '3 unit +1 ') + self.assertEqual(as_stat_number(dict(number=3, delta=+1), 1, 2, label), '3 unit +  1 ') + self.assertEqual(as_stat_number(dict(number=3, delta=+1), 2, 2, label), '  3 unit +  1 ') + self.assertEqual(as_stat_number(dict(number=3, delta=+1234), 1, 6, label), '3 unit +  1 234 ') + self.assertEqual(as_stat_number(dict(number=3, delta=+12345), 1, 6, label), '3 unit +12 345 ') + with temp_locale('en_US'): + self.assertEqual(as_stat_number(dict(number=3, delta=+1234), 1, 6, label), '3 unit +  1 234 ') + self.assertEqual(as_stat_number(dict(number=3, delta=+12345), 1, 6, label), '3 unit +12 345 ') + with temp_locale('de_DE'): + self.assertEqual(as_stat_number(dict(number=3, delta=+1234), 1, 6, label), '3 unit +  1 234 ') + self.assertEqual(as_stat_number(dict(number=3, delta=+12345), 1, 6, label), '3 unit +12 345 ') + + self.assertEqual(as_stat_number(dict(delta=-1), 3, 1, label), 'N/A unit  - 1 ') + + self.assertEqual(as_stat_number(dict(number=1, delta=-2, new=3), 1, 1, label), '1 unit  - 2, 3 new ') + self.assertEqual(as_stat_number(dict(number=2, delta=+0, new=3, gone=4), 1, 1, label), '2 unit ±0, 3 new, 4 gone ') + self.assertEqual(as_stat_number(dict(number=3, delta=+1, gone=4), 1, 1, label), '3 unit +1, 4 gone ') + + def test_as_stat_duration(self): + label = 'time' + self.assertEqual(as_stat_duration(None, label), 'N/A time') + self.assertEqual(as_stat_duration(0, None), '0s') + self.assertEqual(as_stat_duration(0, label), '0s time') + self.assertEqual(as_stat_duration(12, label), '12s time') + self.assertEqual(as_stat_duration(72, label), '1m 12s time') + self.assertEqual(as_stat_duration(3754, label), '1h 2m 34s time') + self.assertEqual(as_stat_duration(-3754, label), '1h 2m 34s time') + self.assertEqual(as_stat_duration(94354, label), '1d 2h 12m 34s time') + self.assertEqual(as_stat_duration(223954, label), '2d 14h 12m 34s time') + + self.assertEqual(as_stat_duration(d(3754), label), '1h 2m 34s time') + self.assertEqual(as_stat_duration(d(3754, 0), label), '1h 2m 34s time ±0s') + self.assertEqual(as_stat_duration(d(3754, 1234), label), '1h 2m 34s time + 20m 34s') + self.assertEqual(as_stat_duration(d(3754, -123), label), '1h 2m 34s time - 2m 3s') + self.assertEqual(as_stat_duration(d(3754, -94354), label), '1h 2m 34s time - 1d 2h 12m 34s') + self.assertEqual(as_stat_duration(d(3754, -223954), label), '1h 2m 34s time - 2d 14h 12m 34s') + self.assertEqual(as_stat_duration(dict(delta=123), label), 'N/A time + 2m 3s') + + def test_get_stats_digest_undigest(self): + digest = get_digest_from_stats(UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + )) + self.assertTrue(isinstance(digest, str)) + self.assertTrue(len(digest) > 100) + stats = get_stats_from_digest(digest) + self.assertEqual(stats, UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=None, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + )) + + def test_digest_ungest_string(self): + digest = digest_string('abc') + self.assertTrue(isinstance(digest, str)) + self.assertTrue(len(digest) > 10) + string = ungest_string(digest) + self.assertEqual(string, 'abc') + + def test_get_stats_from_digest(self): + self.assertEqual( + get_stats_from_digest('H4sIAAAAAAAC/0XOwQ6CMBAE0F8hPXtgEVT8GdMUSDYCJdv2ZP' + 'x3psLW28zbZLIfM/E8BvOs6FKZkDj+SoMyJLGR/Yp6RcUh5lOr' + '+RWSc4DuD2/eALcCk+UZcC8winiBPCCS1rzXn1HnqC5wzBEpnH' + 'PUKOgc5QedXxaOaJq+O+lMT3jdAAAA'), + UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=None, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + ) + ) + + def test_get_short_summary(self): + self.assertEqual('No tests found', get_short_summary(UnitTestRunResults(files=0, errors=[], suites=0, duration=123, suite_details=self.details, tests=0, tests_succ=0, tests_skip=0, tests_fail=0, tests_error=0, runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, commit='commit'))) + self.assertEqual('10 tests found in 2m 3s', get_short_summary(UnitTestRunResults(files=1, errors=[], suites=2, duration=123, suite_details=self.details, tests=10, tests_succ=0, tests_skip=0, tests_fail=0, tests_error=0, runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, commit='commit'))) + self.assertEqual('All 10 tests pass in 2m 3s', get_short_summary(UnitTestRunResults(files=1, errors=[], suites=2, duration=123, suite_details=self.details, tests=10, tests_succ=10, tests_skip=0, tests_fail=0, tests_error=0, runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, commit='commit'))) + self.assertEqual('All 9 tests pass, 1 skipped in 2m 3s', get_short_summary(UnitTestRunResults(files=1, errors=[], suites=2, duration=123, suite_details=self.details, tests=10, tests_succ=9, tests_skip=1, tests_fail=0, tests_error=0, runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, commit='commit'))) + self.assertEqual('2 fail, 1 skipped, 7 pass in 2m 3s', get_short_summary(UnitTestRunResults(files=1, errors=[], suites=2, duration=123, suite_details=self.details, tests=10, tests_succ=7, tests_skip=1, tests_fail=2, tests_error=0, runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, commit='commit'))) + self.assertEqual('3 errors, 2 fail, 1 skipped, 4 pass in 2m 3s', get_short_summary(UnitTestRunResults(files=1, errors=[], suites=2, duration=123, suite_details=self.details, tests=10, tests_succ=4, tests_skip=1, tests_fail=2, tests_error=3, runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, commit='commit'))) + self.assertEqual('2 fail, 8 pass in 2m 3s', get_short_summary(UnitTestRunResults(files=1, errors=[], suites=2, duration=123, suite_details=self.details, tests=10, tests_succ=8, tests_skip=0, tests_fail=2, tests_error=0, runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, commit='commit'))) + self.assertEqual('3 errors, 7 pass in 2m 3s', get_short_summary(UnitTestRunResults(files=1, errors=[], suites=2, duration=123, suite_details=self.details, tests=10, tests_succ=7, tests_skip=0, tests_fail=0, tests_error=3, runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, commit='commit'))) + self.assertEqual('1 parse errors', get_short_summary(UnitTestRunResults(files=1, errors=errors, suites=0, duration=0, suite_details=self.details, tests=0, tests_succ=0, tests_skip=0, tests_fail=0, tests_error=0, runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, commit='commit'))) + self.assertEqual('1 parse errors, 4 pass in 2m 3s', get_short_summary(UnitTestRunResults(files=2, errors=errors, suites=1, duration=123, suite_details=self.details, tests=4, tests_succ=4, tests_skip=0, tests_fail=0, tests_error=0, runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, commit='commit'))) + self.assertEqual('1 parse errors, 1 skipped, 4 pass in 2m 3s', get_short_summary(UnitTestRunResults(files=2, errors=errors, suites=1, duration=123, suite_details=self.details, tests=5, tests_succ=4, tests_skip=1, tests_fail=0, tests_error=0, runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, commit='commit'))) + self.assertEqual('1 parse errors, 2 fail, 1 skipped, 4 pass in 2m 3s', get_short_summary(UnitTestRunResults(files=2, errors=errors, suites=1, duration=123, suite_details=self.details, tests=7, tests_succ=4, tests_skip=1, tests_fail=2, tests_error=0, runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, commit='commit'))) + self.assertEqual('1 parse errors, 3 errors, 2 fail, 1 skipped, 4 pass in 2m 3s', get_short_summary(UnitTestRunResults(files=2, errors=errors, suites=1, duration=123, suite_details=self.details, tests=10, tests_succ=4, tests_skip=1, tests_fail=2, tests_error=3, runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, commit='commit'))) + + def test_label_md(self): + self.assertEqual(all_tests_label_md, 'tests') + self.assertEqual(passed_tests_label_md, f'[:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "passed tests")') + self.assertEqual(skipped_tests_label_md, f'[:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "skipped / disabled tests")') + self.assertEqual(failed_tests_label_md, f'[:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "failed tests")') + self.assertEqual(test_errors_label_md, f'[:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "test errors")') + self.assertEqual(duration_label_md, f'[:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "duration of all tests")') + + def test_get_short_summary_md(self): + self.assertEqual(get_short_summary_md(UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + )), (f'4 {all_tests_label_md} 5 {passed_tests_label_md} 6 {skipped_tests_label_md} 7 {failed_tests_label_md} 8 {test_errors_label_md}')) + + def test_get_short_summary_md_with_delta(self): + self.assertEqual(get_short_summary_md(UnitTestRunDeltaResults( + files=n(1, 2), errors=[], suites=n(2, -3), duration=d(3, 4), + tests=n(4, -5), tests_succ=n(5, 6), tests_skip=n(6, -7), tests_fail=n(7, 8), tests_error=n(8, -9), + runs=n(9, 10), runs_succ=n(10, -11), runs_skip=n(11, 12), runs_fail=n(12, -13), runs_error=n(13, 14), + commit='commit', + reference_type='type', reference_commit='0123456789abcdef' + )), (f'4 {all_tests_label_md}  - 5  5 {passed_tests_label_md} +6  6 {skipped_tests_label_md}  - 7  7 {failed_tests_label_md} +8  8 {test_errors_label_md}  - 9 ')) + + def test_get_details_line_md(self): + for fails, errors, parse_errors, expected in [ + (0, 0, 0, ''), + (1, 0, 0, 'failures'), + (0, 1, 0, 'errors'), + (0, 0, 1, 'parsing errors'), + (1, 1, 0, 'failures and errors'), + (0, 1, 1, 'parsing errors and errors'), + (1, 0, 1, 'parsing errors and failures'), + (1, 1, 1, 'parsing errors, failures and errors'), + ]: + with self.subTest(fails=fails, errors=errors, parse_errors=parse_errors): + stats = UnitTestRunResults( + files=1, errors=[None] * parse_errors, suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=4 - 1 - fails - errors, tests_skip=1, tests_fail=fails, tests_error=errors, + runs=4, runs_succ=4 - 1 - fails - errors, runs_skip=1, runs_fail=fails, runs_error=errors, + commit='commit' + ) + actual = get_details_line_md(stats, 'https://details.url/') + if expected: + expected = f'For more details on these {expected}, see [this check](https://details.url/).' + + self.assertEqual(expected, actual) + + def test_get_commit_line_md(self): + stats = UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + ) + self.assertEqual(get_commit_line_md(stats), 'Results for commit commit.') + + stats_with_delta = UnitTestRunDeltaResults( + files=n(1, 2), errors=[], suites=n(2, -3), duration=d(3, 4), + tests=n(4, -5), tests_succ=n(5, 6), tests_skip=n(6, -7), tests_fail=n(7, 8), tests_error=n(8, -9), + runs=n(9, 10), runs_succ=n(10, -11), runs_skip=n(11, 12), runs_fail=n(12, -13), runs_error=n(13, 14), + commit='commit', reference_type='type', reference_commit='ref' + ) + self.assertEqual(get_commit_line_md(stats_with_delta), 'Results for commit commit. ± Comparison against type commit ref.') + + for ref_type, ref in [(None, None), ('type', None), (None, 'ref')]: + with self.subTest(ref_type=ref_type, ref=ref): + stats_with_delta = UnitTestRunDeltaResults( + files=n(1, 2), errors=[], suites=n(2, -3), duration=d(3, 4), + tests=n(4, -5), tests_succ=n(5, 6), tests_skip=n(6, -7), tests_fail=n(7, 8), tests_error=n(8, -9), + runs=n(9, 10), runs_succ=n(10, -11), runs_skip=n(11, 12), runs_fail=n(12, -13), runs_error=n(13, 14), + commit='commit', reference_type=ref_type, reference_commit=ref + ) + self.assertEqual(get_commit_line_md(stats_with_delta), 'Results for commit commit.') + + #### + # test that get_long_summary_md calls into get_long_summary_with_runs_md and get_long_summary_without_runs_md + #### + + @classmethod + def test_get_long_summary_md_with_single_runs(cls): + with mock.patch('publish.get_long_summary_with_runs_md') as w: + with mock.patch('publish.get_long_summary_without_runs_md') as wo: + stats = UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=cls.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=4, runs_succ=5, runs_skip=6, runs_fail=7, runs_error=8, + commit='commit' + ) + test_changes = mock.Mock() + get_long_summary_md(stats, 'url', test_changes, 10) + w.assert_not_called() + wo.assert_called_once_with(stats, 'url', test_changes, 10) + + @classmethod + def test_get_long_summary_md_with_multiple_runs(cls): + with mock.patch('publish.get_long_summary_with_runs_md') as w: + with mock.patch('publish.get_long_summary_without_runs_md') as wo: + stats = UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=cls.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=0, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=0, + commit='commit' + ) + test_changes = mock.Mock() + get_long_summary_md(stats, 'url', test_changes, 10) + w.assert_called_once_with(stats, 'url', test_changes, 10) + wo.assert_not_called() + + #### + # test get_long_summary_with_runs_md + #### + + def test_get_long_summary_with_runs_md(self): + self.assertEqual(get_long_summary_with_runs_md(UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=0, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=0, + commit='commit' + )), (f'1 files    2 suites   3s {duration_label_md}\n' + f'4 {all_tests_label_md}   5 {passed_tests_label_md}   6 {skipped_tests_label_md}   7 {failed_tests_label_md}\n' + f'9 runs  10 {passed_tests_label_md} 11 {skipped_tests_label_md} 12 {failed_tests_label_md}\n' + f'\n' + f'Results for commit commit.\n')) + + def test_get_long_summary_with_runs_md_with_errors(self): + self.assertEqual(get_long_summary_with_runs_md(UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + )), (f'1 files    2 suites   3s {duration_label_md}\n' + f'4 {all_tests_label_md}   5 {passed_tests_label_md}   6 {skipped_tests_label_md}   7 {failed_tests_label_md}   8 {test_errors_label_md}\n' + f'9 runs  10 {passed_tests_label_md} 11 {skipped_tests_label_md} 12 {failed_tests_label_md} 13 {test_errors_label_md}\n' + f'\n' + f'Results for commit commit.\n')) + + def test_get_long_summary_with_runs_md_with_deltas(self): + self.assertEqual(get_long_summary_with_runs_md(UnitTestRunDeltaResults( + files=n(1, 2), errors=[], suites=n(2, -3), duration=d(3, 4), + tests=n(4, -5), tests_succ=n(5, 6), tests_skip=n(6, -7), tests_fail=n(7, 8), tests_error=n(8, -9), + runs=n(9, 10), runs_succ=n(10, -11), runs_skip=n(11, 12), runs_fail=n(12, -13), runs_error=n(13, 14), + commit='123456789abcdef0', reference_type='type', reference_commit='0123456789abcdef' + )), (f'1 files  +  2    2 suites   - 3   3s {duration_label_md} +4s\n' + f'4 {all_tests_label_md}  -   5    5 {passed_tests_label_md} +  6    6 {skipped_tests_label_md}  -   7    7 {failed_tests_label_md} +  8    8 {test_errors_label_md}  -   9 \n' + f'9 runs  +10  10 {passed_tests_label_md}  - 11  11 {skipped_tests_label_md} +12  12 {failed_tests_label_md}  - 13  13 {test_errors_label_md} +14 \n' + f'\n' + f'Results for commit 12345678. ± Comparison against type commit 01234567.\n')) + + def test_get_long_summary_with_runs_md_with_details_url_with_fails(self): + self.assertEqual(get_long_summary_with_runs_md( + UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=0, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=0, + commit='commit' + ), + 'https://details.url/' + ), (f'1 files    2 suites   3s {duration_label_md}\n' + f'4 {all_tests_label_md}   5 {passed_tests_label_md}   6 {skipped_tests_label_md}   7 {failed_tests_label_md}\n' + f'9 runs  10 {passed_tests_label_md} 11 {skipped_tests_label_md} 12 {failed_tests_label_md}\n' + f'\n' + f'For more details on these failures, see [this check](https://details.url/).\n' + f'\n' + f'Results for commit commit.\n') + ) + + def test_get_long_summary_with_runs_md_with_details_url_without_fails(self): + self.assertEqual(get_long_summary_with_runs_md( + UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=0, tests_error=0, + runs=9, runs_succ=10, runs_skip=11, runs_fail=0, runs_error=0, + commit='commit' + ), + 'https://details.url/' + ), (f'1 files    2 suites   3s {duration_label_md}\n' + f'4 {all_tests_label_md}   5 {passed_tests_label_md}   6 {skipped_tests_label_md} 0 {failed_tests_label_md}\n' + f'9 runs  10 {passed_tests_label_md} 11 {skipped_tests_label_md} 0 {failed_tests_label_md}\n' + f'\n' + f'Results for commit commit.\n') + ) + + def test_get_long_summary_with_runs_md_with_test_lists(self): + self.assertEqual(get_long_summary_with_runs_md( + UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=0, tests_error=0, + runs=9, runs_succ=10, runs_skip=11, runs_fail=0, runs_error=0, + commit='commit' + ), + 'https://details.url/', + SomeTestChanges( + ['test1', 'test2', 'test3', 'test4', 'test5'], ['test5', 'test6'], + ['test2'], ['test5', 'test6'] + ), + ), (f'1 files    2 suites   3s {duration_label_md}\n' + f'4 {all_tests_label_md}   5 {passed_tests_label_md}   6 {skipped_tests_label_md} 0 {failed_tests_label_md}\n' + f'9 runs  10 {passed_tests_label_md} 11 {skipped_tests_label_md} 0 {failed_tests_label_md}\n' + '\n' + 'Results for commit commit.\n' + '\n' + '
\n' + ' This pull request removes 4 and adds 1 tests. ' + 'Note that renamed tests count towards both.\n' + '\n' + '```\n' + 'test1\n' + 'test2\n' + 'test3\n' + 'test4\n' + '```\n' + '\n' + '```\n' + 'test6\n' + '```\n' + '
\n' + '\n' + '
\n' + ' This pull request removes 1 skipped test and adds 1 skipped test. ' + 'Note that renamed tests count towards both.\n' + '\n' + '```\n' + 'test2\n' + '```\n' + '\n' + '```\n' + 'test6\n' + '```\n' + '
\n' + '\n' + '
\n' + ' This pull request skips 1 test.\n' + '\n' + '```\n' + 'test5\n' + '```\n' + '
\n') + ) + + #### + # test get_long_summary_without_runs_md + #### + + def test_get_long_summary_without_runs_md(self): + self.assertEqual(get_long_summary_without_runs_md(UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=0, + runs=4, runs_succ=5, runs_skip=6, runs_fail=7, runs_error=0, + commit='commit' + )), (f'4 {all_tests_label_md}   5 {passed_tests_label_md}  3s {duration_label_md}\n' + f'2 suites  6 {skipped_tests_label_md}\n' + f'1 files    7 {failed_tests_label_md}\n' + f'\n' + f'Results for commit commit.\n')) + + def test_get_long_summary_without_runs_md_with_errors(self): + self.assertEqual(get_long_summary_without_runs_md(UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=4, runs_succ=5, runs_skip=6, runs_fail=7, runs_error=8, + commit='commit' + )), (f'4 {all_tests_label_md}   5 {passed_tests_label_md}  3s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "duration of all tests")\n' + f'2 suites  6 {skipped_tests_label_md}\n' + f'1 files    7 {failed_tests_label_md}  8 {test_errors_label_md}\n' + f'\n' + f'Results for commit commit.\n')) + + def test_get_long_summary_without_runs_md_with_delta(self): + self.assertEqual(get_long_summary_without_runs_md(UnitTestRunDeltaResults( + files=n(1, 2), errors=[], suites=n(2, -3), duration=d(3, 4), + tests=n(4, -5), tests_succ=n(5, 6), tests_skip=n(6, -7), tests_fail=n(7, 8), tests_error=n(0, 0), + runs=n(4, -5), runs_succ=n(5, 6), runs_skip=n(6, -7), runs_fail=n(7, 8), runs_error=n(0, 0), + commit='123456789abcdef0', reference_type='type', reference_commit='0123456789abcdef' + )), (f'4 {all_tests_label_md}   - 5   5 {passed_tests_label_md} +6   3s {duration_label_md} +4s\n' + f'2 suites  - 3   6 {skipped_tests_label_md}  - 7 \n' + f'1 files   +2   7 {failed_tests_label_md} +8 \n' + f'\n' + f'Results for commit 12345678. ± Comparison against type commit 01234567.\n')) + + def test_get_long_summary_without_runs_md_with_errors_and_deltas(self): + self.assertEqual(get_long_summary_without_runs_md(UnitTestRunDeltaResults( + files=n(1, 2), errors=[], suites=n(2, -3), duration=d(3, 4), + tests=n(4, -5), tests_succ=n(5, 6), tests_skip=n(6, -7), tests_fail=n(7, 8), tests_error=n(8, -9), + runs=n(4, -5), runs_succ=n(5, 6), runs_skip=n(6, -7), runs_fail=n(7, 8), runs_error=n(8, -9), + commit='123456789abcdef0', reference_type='type', reference_commit='0123456789abcdef' + )), (f'4 {all_tests_label_md}   - 5   5 {passed_tests_label_md} +6   3s {duration_label_md} +4s\n' + f'2 suites  - 3   6 {skipped_tests_label_md}  - 7 \n' + f'1 files   +2   7 {failed_tests_label_md} +8   8 {test_errors_label_md}  - 9 \n' + f'\n' + f'Results for commit 12345678. ± Comparison against type commit 01234567.\n')) + + def test_get_long_summary_without_runs_md_with_details_url_with_fails(self): + self.assertEqual(get_long_summary_without_runs_md( + UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=0, + runs=4, runs_succ=5, runs_skip=6, runs_fail=7, runs_error=0, + commit='commit' + ), + 'https://details.url/' + ), (f'4 {all_tests_label_md}   5 {passed_tests_label_md}  3s {duration_label_md}\n' + f'2 suites  6 {skipped_tests_label_md}\n' + f'1 files    7 {failed_tests_label_md}\n' + f'\n' + f'For more details on these failures, see [this check](https://details.url/).\n' + f'\n' + f'Results for commit commit.\n') + ) + + def test_get_long_summary_without_runs_md_with_details_url_without_fails(self): + self.assertEqual(get_long_summary_without_runs_md( + UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=0, tests_error=0, + runs=4, runs_succ=5, runs_skip=6, runs_fail=0, runs_error=0, + commit='commit' + ), + 'https://details.url/' + ), (f'4 {all_tests_label_md}   5 {passed_tests_label_md}  3s {duration_label_md}\n' + f'2 suites  6 {skipped_tests_label_md}\n' + f'1 files    0 {failed_tests_label_md}\n' + f'\n' + f'Results for commit commit.\n') + ) + + def test_get_long_summary_without_runs_md_with_test_lists(self): + self.assertEqual(get_long_summary_without_runs_md( + UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=0, tests_error=0, + runs=4, runs_succ=5, runs_skip=6, runs_fail=0, runs_error=0, + commit='commit' + ), + 'https://details.url/', + SomeTestChanges( + ['test1', 'test2', 'test3', 'test4', 'test5'], ['test5', 'test6'], + ['test2'], ['test5', 'test6'] + ), + ), (f'4 {all_tests_label_md}   5 {passed_tests_label_md}  3s {duration_label_md}\n' + f'2 suites  6 {skipped_tests_label_md}\n' + f'1 files    0 {failed_tests_label_md}\n' + f'\n' + f'Results for commit commit.\n' + f'\n' + '
\n' + ' This pull request removes 4 and adds 1 tests. ' + 'Note that renamed tests count towards both.\n' + '\n' + '```\n' + 'test1\n' + 'test2\n' + 'test3\n' + 'test4\n' + '```\n' + '\n' + '```\n' + 'test6\n' + '```\n' + '
\n' + '\n' + '
\n' + ' This pull request removes 1 skipped test and adds 1 skipped test. ' + 'Note that renamed tests count towards both.\n' + '\n' + '```\n' + 'test2\n' + '```\n' + '\n' + '```\n' + 'test6\n' + '```\n' + '
\n' + '\n' + '
\n' + ' This pull request skips 1 test.\n' + '\n' + '```\n' + 'test5\n' + '```\n' + '
\n') + ) + + def test_get_long_summary_without_runs_md_with_all_tests_removed(self): + self.assertEqual(get_long_summary_without_runs_md( + UnitTestRunResults( + files=0, errors=[], suites=0, duration=0, suite_details=self.details, + tests=0, tests_succ=0, tests_skip=0, tests_fail=0, tests_error=0, + runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, + commit='commit' + ), + 'https://details.url/', + SomeTestChanges( + ['test1', 'test2', 'test3', 'test4', 'test5'], [], + ['test2'], [] + ), + ), (f'0 {all_tests_label_md}   0 {passed_tests_label_md}  0s {duration_label_md}\n' + f'0 suites  0 {skipped_tests_label_md}\n' + f'0 files    0 {failed_tests_label_md}\n' + f'\n' + f'Results for commit commit.\n') + ) + + def test_get_long_summary_without_runs_md_with_some_files_but_all_tests_removed(self): + self.assertEqual(get_long_summary_without_runs_md( + UnitTestRunResults( + files=2, errors=[], suites=0, duration=0, suite_details=self.details, + tests=0, tests_succ=0, tests_skip=0, tests_fail=0, tests_error=0, + runs=0, runs_succ=0, runs_skip=0, runs_fail=0, runs_error=0, + commit='commit' + ), + 'https://details.url/', + SomeTestChanges( + ['test1', 'test2', 'test3', 'test4', 'test5'], [], + ['test2'], [] + ), + ), (f'0 {all_tests_label_md}   0 {passed_tests_label_md}  0s {duration_label_md}\n' + f'0 suites  0 {skipped_tests_label_md}\n' + f'2 files    0 {failed_tests_label_md}\n' + f'\n' + f'Results for commit commit.\n') + ) + + def test_get_long_summary_with_digest_md_with_single_run(self): + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + actual = get_long_summary_with_digest_md( + UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=4, runs_succ=5, runs_skip=6, runs_fail=7, runs_error=8, + commit='commit' + ) + ) + + self.assertEqual(actual, f'4 {all_tests_label_md}   5 {passed_tests_label_md}  3s {duration_label_md}\n' + f'2 suites  6 {skipped_tests_label_md}\n' + f'1 files    7 {failed_tests_label_md}  8 {test_errors_label_md}\n' + '\n' + 'Results for commit commit.\n' + '\n' + '[test-results]:data:application/gzip;base64,' + 'H4sIAAAAAAAC/02MywqAIBQFfyVct+kd/UyEJVzKjKuuon/vZF' + 'juzsyBOYWibbFiyIo8E9aTC1ACZs+TI7MDKyAO91x13KP1UkI0' + 'v1jpgGg/oSbaILpPLMyGYXoY9nvsPTPNvfzXAiexwGlLGq3JAe' + 'K6buousrLZAAAA\n') + + def test_get_long_summary_with_digest_md_with_multiple_runs(self): + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + actual = get_long_summary_with_digest_md( + UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=0, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=0, + commit='commit' + ) + ) + + self.assertEqual(actual, f'1 files    2 suites   3s {duration_label_md}\n' + f'4 {all_tests_label_md}   5 {passed_tests_label_md}   6 {skipped_tests_label_md}   7 {failed_tests_label_md}\n' + f'9 runs  10 {passed_tests_label_md} 11 {skipped_tests_label_md} 12 {failed_tests_label_md}\n' + '\n' + 'Results for commit commit.\n' + '\n' + '[test-results]:data:application/gzip;base64,' + 'H4sIAAAAAAAC/03MwQqDMBAE0F+RnD24aiv6M0VShaVqZJOciv' + '/e0brR28wbmK8ZeRq86TLKM+Mjh6OUKO8ofWC3oFaoGMI+1Zpf' + 'PloLeFzw4RXwTDD2PAGaBIOIE0gBkbjsf+0Z9Y6KBP87IoXzjk' + 'qF+51188wBRdP2A3NU1srcAAAA\n') + + def test_get_long_summary_with_digest_md_with_test_errors(self): + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + actual = get_long_summary_with_digest_md( + UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + ) + ) + + self.assertEqual(actual, f'1 files    2 suites   3s {duration_label_md}\n' + f'4 {all_tests_label_md}   5 {passed_tests_label_md}   6 {skipped_tests_label_md}   7 {failed_tests_label_md}   8 {test_errors_label_md}\n' + f'9 runs  10 {passed_tests_label_md} 11 {skipped_tests_label_md} 12 {failed_tests_label_md} 13 {test_errors_label_md}\n' + '\n' + 'Results for commit commit.\n' + '\n' + '[test-results]:data:application/gzip;base64,' + 'H4sIAAAAAAAC/0XOwQ6CMBAE0F8hPXtgEVT8GdMUSDYCJdv2ZP' + 'x3psLW28zbZLIfM/E8BvOs6FKZkDj+SoMyJLGR/Yp6RcUh5lOr' + '+RWSc4DuD2/eALcCk+UZcC8winiBPCCS1rzXn1HnqC5wzBEpnH' + 'PUKOgc5QedXxaOaJq+O+lMT3jdAAAA\n') + + def test_get_long_summary_with_digest_md_with_parse_errors(self): + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + actual = get_long_summary_with_digest_md( + UnitTestRunResults( + files=1, errors=errors, suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + ) + ) + + self.assertEqual(actual, f'1 files    1 errors    2 suites   3s {duration_label_md}\n' + f'4 {all_tests_label_md}   5 {passed_tests_label_md}   6 {skipped_tests_label_md}   7 {failed_tests_label_md}   8 {test_errors_label_md}\n' + f'9 runs  10 {passed_tests_label_md} 11 {skipped_tests_label_md} 12 {failed_tests_label_md} 13 {test_errors_label_md}\n' + '\n' + 'Results for commit commit.\n' + '\n' + '[test-results]:data:application/gzip;base64,' + 'H4sIAAAAAAAC/0XOwQ6CMBAE0F8hPXtgEVT8GdMUSDYCJdv2ZP' + 'x3psLW28zbZLIfM/E8BvOs6FKZkDj+SoMyJLGR/Yp6RcUh5lOr' + '+RWSc4DuD2/eALcCk+UZcC8winiBPCCS1rzXn1HnqC5wzBEpnH' + 'PUKOgc5QedXxaOaJq+O+lMT3jdAAAA\n') + + def test_get_long_summary_with_digest_md_with_delta(self): + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + actual = get_long_summary_with_digest_md( + UnitTestRunDeltaResults( + files=n(1, 2), errors=[], suites=n(2, -3), duration=d(3, 4), + tests=n(4, -5), tests_succ=n(5, 6), tests_skip=n(6, -7), tests_fail=n(7, 8), tests_error=n(8, -9), + runs=n(9, 10), runs_succ=n(10, -11), runs_skip=n(11, 12), runs_fail=n(12, -13), runs_error=n(13, 14), + commit='123456789abcdef0', reference_type='type', reference_commit='0123456789abcdef' + ), UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=4, runs_succ=5, runs_skip=6, runs_fail=7, runs_error=8, + commit='commit' + ) + ) + + self.assertEqual(actual, f'1 files  +  2    2 suites   - 3   3s {duration_label_md} +4s\n' + f'4 {all_tests_label_md}  -   5    5 {passed_tests_label_md} +  6    6 {skipped_tests_label_md}  -   7    7 {failed_tests_label_md} +  8    8 {test_errors_label_md}  -   9 \n' + f'9 runs  +10  10 {passed_tests_label_md}  - 11  11 {skipped_tests_label_md} +12  12 {failed_tests_label_md}  - 13  13 {test_errors_label_md} +14 \n' + '\n' + 'Results for commit 12345678. ± Comparison against type commit 01234567.\n' + '\n' + '[test-results]:data:application/gzip;base64,' + 'H4sIAAAAAAAC/02MywqAIBQFfyVct+kd/UyEJVzKjKuuon/vZF' + 'juzsyBOYWibbFiyIo8E9aTC1ACZs+TI7MDKyAO91x13KP1UkI0' + 'v1jpgGg/oSbaILpPLMyGYXoY9nvsPTPNvfzXAiexwGlLGq3JAe' + 'K6buousrLZAAAA\n') + + def test_get_long_summary_with_digest_md_with_delta_and_parse_errors(self): + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + actual = get_long_summary_with_digest_md( + UnitTestRunDeltaResults( + files=n(1, 2), errors=errors, suites=n(2, -3), duration=d(3, 4), + tests=n(4, -5), tests_succ=n(5, 6), tests_skip=n(6, -7), tests_fail=n(7, 8), tests_error=n(8, -9), + runs=n(9, 10), runs_succ=n(10, -11), runs_skip=n(11, 12), runs_fail=n(12, -13), runs_error=n(13, 14), + commit='123456789abcdef0', reference_type='type', reference_commit='0123456789abcdef' + ), UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=4, runs_succ=5, runs_skip=6, runs_fail=7, runs_error=8, + commit='commit' + ) + ) + + self.assertEqual(actual, f'1 files  +  2    1 errors    2 suites   - 3   3s {duration_label_md} +4s\n' + f'4 {all_tests_label_md}  -   5    5 {passed_tests_label_md} +  6    6 {skipped_tests_label_md}  -   7    7 {failed_tests_label_md} +  8    8 {test_errors_label_md}  -   9 \n' + f'9 runs  +10  10 {passed_tests_label_md}  - 11  11 {skipped_tests_label_md} +12  12 {failed_tests_label_md}  - 13  13 {test_errors_label_md} +14 \n' + '\n' + 'Results for commit 12345678. ± Comparison against type commit 01234567.\n' + '\n' + '[test-results]:data:application/gzip;base64,' + 'H4sIAAAAAAAC/02MywqAIBQFfyVct+kd/UyEJVzKjKuuon/vZF' + 'juzsyBOYWibbFiyIo8E9aTC1ACZs+TI7MDKyAO91x13KP1UkI0' + 'v1jpgGg/oSbaILpPLMyGYXoY9nvsPTPNvfzXAiexwGlLGq3JAe' + 'K6buousrLZAAAA\n') + + def test_get_long_summary_with_digest_md_with_delta_results_only(self): + with self.assertRaises(ValueError) as context: + get_long_summary_with_digest_md(UnitTestRunDeltaResults( + files=n(1, 2), errors=[], suites=n(2, -3), duration=d(3, 4), + tests=n(4, -5), tests_succ=n(5, 6), tests_skip=n(6, -7), tests_fail=n(7, 8), tests_error=n(8, -9), + runs=n(9, 10), runs_succ=n(10, -11), runs_skip=n(11, 12), runs_fail=n(12, -13), runs_error=n(13, 14), + commit='123456789abcdef0', reference_type='type', reference_commit='0123456789abcdef' + )) + self.assertIn('stats must be UnitTestRunResults when no digest_stats is given', context.exception.args) + + def test_get_test_changes_md(self): + self.assertEqual( + '
\n' + ' the summary\n' + '\n' + '```\n' + 'test1\n' + 'test2\n' + '```\n' + '
\n', + get_test_changes_md('the summary', 3, ['test1', 'test2']) + ) + self.assertEqual( + '
\n' + ' the summary\n' + '\n' + '```\n' + 'test1\n' + 'test2\n' + '```\n' + '\n' + '```\n' + 'test3\n' + 'test4\n' + 'test5\n' + '…\n' + '```\n' + '
\n', + get_test_changes_md('the summary', 3, ['test1', 'test2'], ['test3', 'test4', 'test5', 'test6']) + ) + + def test_get_test_changes_list_md(self): + self.assertEqual('```\n\n```\n', get_test_changes_list_md([], 3)) + self.assertEqual('```\ntest1\n```\n', get_test_changes_list_md(['test1'], 3)) + self.assertEqual('```\ntest1\ntest2\n```\n', get_test_changes_list_md(['test1', 'test2'], 3)) + self.assertEqual('```\ntest1\ntest2\ntest3\n```\n', get_test_changes_list_md(['test1', 'test2', 'test3'], 3)) + self.assertEqual('```\ntest1\ntest2\ntest3\n…\n```\n', get_test_changes_list_md(['test1', 'test2', 'test3', 'test4'], 3)) + + def test_get_test_changes_summary_md(self): + changes = SomeTestChanges( + ['test', 'test1', 'test2', 'test3'], ['test', 'test 1', 'test 2', 'test 3'], + ['test1', 'test2'], ['test 1', 'test 2', 'test 3'] + ) + self.assertEqual('
\n' + ' This pull request removes 3 and adds 3 tests. ' + 'Note that renamed tests count towards both.\n' + '\n' + '```\n' + 'test1\n' + 'test2\n' + 'test3\n' + '```\n' + '\n' + '```\n' + 'test 1\n' + 'test 2\n' + 'test 3\n' + '```\n' + '
\n' + '\n' + '
\n' + ' This pull request removes 2 skipped tests and adds 3 skipped tests. ' + 'Note that renamed tests count towards both.\n' + '\n' + '```\n' + 'test1\n' + 'test2\n' + '```\n' + '\n' + '```\n' + 'test 1\n' + 'test 2\n' + 'test 3\n' + '```\n' + '
\n', + get_test_changes_summary_md(changes, 3)) + + def test_get_test_changes_summary_md_with_nones(self): + expected = '' + changes = mock.Mock(SomeTestChanges) + changes.has_no_tests = mock.Mock(return_value=False) + changes.removes = mock.Mock(return_value=None) + changes.adds = mock.Mock(return_value=None) + changes.remaining_and_skipped = mock.Mock(return_value=None) + changes.remaining_and_un_skipped = mock.Mock(return_value=None) + changes.removed_skips = mock.Mock(return_value=None) + changes.added_and_skipped = mock.Mock(return_value=None) + self.assertEqual(expected, get_test_changes_summary_md(changes, 3)) + + changes.removes = mock.Mock(return_value=[]) + self.assertEqual(expected, get_test_changes_summary_md(changes, 3)) + + changes.removes = mock.Mock(return_value=['test1']) + expected = ( + '
\n' + ' This pull request removes 1 test.\n' + '\n' + '```\n' + 'test1\n' + '```\n' + '
\n' + ) + self.assertEqual(expected, get_test_changes_summary_md(changes, 3)) + + changes.adds = mock.Mock(return_value=[]) + self.assertEqual(expected, get_test_changes_summary_md(changes, 3)) + + changes.adds = mock.Mock(return_value=['test2']) + expected = expected.replace('1 test.', '1 and adds 1 tests. ' + 'Note that renamed tests count towards both.') + expected = expected.replace('test1', 'test1\n```\n\n```\ntest2') + self.assertEqual(expected, get_test_changes_summary_md(changes, 3)) + + changes.removed_skips = mock.Mock(return_value=[]) + self.assertEqual(expected, get_test_changes_summary_md(changes, 3)) + + changes.added_and_skipped = mock.Mock(return_value=[]) + self.assertEqual(expected, get_test_changes_summary_md(changes, 3)) + + changes.removed_skips = mock.Mock(return_value=['test5']) + self.assertEqual(expected, get_test_changes_summary_md(changes, 3)) + + changes.added_and_skipped = mock.Mock(return_value=['test6']) + expected = expected + ( + '\n' + '
\n' + ' This pull request removes 1 skipped test and adds 1 skipped test. ' + 'Note that renamed tests count towards both.\n' + '\n' + '```\n' + 'test5\n' + '```\n' + '\n' + '```\n' + 'test6\n' + '```\n' + '
\n' + ) + self.assertEqual(expected, get_test_changes_summary_md(changes, 3)) + + changes.remaining_and_skipped = mock.Mock(return_value=[]) + self.assertEqual(expected, get_test_changes_summary_md(changes, 3)) + + changes.remaining_and_skipped = mock.Mock(return_value=['test3']) + expected = expected + ( + '\n' + '
\n' + ' This pull request skips 1 test.\n' + '\n' + '```\n' + 'test3\n' + '```\n' + '
\n' + ) + self.assertEqual(expected, get_test_changes_summary_md(changes, 3)) + + changes.remaining_and_un_skipped = mock.Mock(return_value=[]) + self.assertEqual(expected, get_test_changes_summary_md(changes, 3)) + + changes.remaining_and_un_skipped = mock.Mock(return_value=['test4']) + expected = expected.replace('This pull request skips 1 test.', 'This pull request skips 1 and un-skips 1 tests.') + expected = expected.replace('test3', 'test3\n```\n\n```\ntest4') + self.assertEqual(expected, get_test_changes_summary_md(changes, 3)) + + def test_get_test_changes_summary_md_add_tests(self): + changes = SomeTestChanges( + ['test1'], ['test1', 'test2', 'test3'], + [], [] + ) + self.assertEqual('', get_test_changes_summary_md(changes, 3)) + + def test_get_test_changes_summary_md_remove_test(self): + changes = SomeTestChanges( + ['test1', 'test2', 'test3'], ['test1', 'test3'], + [], [] + ) + self.assertEqual('
\n' + ' This pull request removes 1 test.\n' + '\n' + '```\n' + 'test2\n' + '```\n' + '
\n', + get_test_changes_summary_md(changes, 3)) + + def test_get_test_changes_summary_md_remove_tests(self): + changes = SomeTestChanges( + ['test1', 'test2', 'test3'], ['test1'], + [], [] + ) + self.assertEqual('
\n' + ' This pull request removes 2 tests.\n' + '\n' + '```\n' + 'test2\n' + 'test3\n' + '```\n' + '
\n', + get_test_changes_summary_md(changes, 3)) + + def test_get_test_changes_summary_md_rename_tests(self): + changes = SomeTestChanges( + ['test1', 'test2', 'test3'], ['test 1', 'test 2', 'test 3'], + [], [] + ) + self.assertEqual('
\n' + ' This pull request removes 3 and adds 3 tests. ' + 'Note that renamed tests count towards both.\n' + '\n' + '```\n' + 'test1\n' + 'test2\n' + 'test3\n' + '```\n' + '\n' + '```\n' + 'test 1\n' + 'test 2\n' + 'test 3\n' + '```\n' + '
\n', + get_test_changes_summary_md(changes, 3)) + + def test_get_test_changes_summary_md_skip_test(self): + changes = SomeTestChanges( + ['test1', 'test2', 'test3'], ['test1', 'test2', 'test3'], + [], ['test1'] + ) + self.assertEqual('
\n' + ' This pull request skips 1 test.\n' + '\n' + '```\n' + 'test1\n' + '```\n' + '
\n', + get_test_changes_summary_md(changes, 3)) + + def test_get_test_changes_summary_md_skip_tests(self): + changes = SomeTestChanges( + ['test1', 'test2', 'test3'], ['test1', 'test2', 'test3'], + [], ['test1', 'test3'] + ) + self.assertEqual('
\n' + ' This pull request skips 2 tests.\n' + '\n' + '```\n' + 'test1\n' + 'test3\n' + '```\n' + '
\n', + get_test_changes_summary_md(changes, 3)) + + def test_get_test_changes_summary_md_un_skip_tests(self): + changes = SomeTestChanges( + ['test1', 'test2', 'test3'], ['test1', 'test2', 'test3'], + ['test1', 'test3'], [] + ) + self.assertEqual('', get_test_changes_summary_md(changes, 3)) + + def test_get_test_changes_summary_md_skip_and_un_skip_tests(self): + changes = SomeTestChanges( + ['test1', 'test2', 'test3'], ['test1', 'test2', 'test3'], + ['test1', 'test2'], ['test1', 'test3'] + ) + self.assertEqual('
\n' + ' This pull request skips 1 and un-skips 1 tests.\n' + '\n' + '```\n' + 'test3\n' + '```\n' + '\n' + '```\n' + 'test2\n' + '```\n' + '
\n', + get_test_changes_summary_md(changes, 3)) + + def test_get_test_changes_summary_md_rename_skip_tests(self): + changes = SomeTestChanges( + ['test1', 'test2', 'test3'], ['test 1', 'test 2', 'test 3'], + ['test1', 'test2'], ['test 1', 'test 2'] + ) + self.assertEqual('
\n' + ' This pull request removes 3 and adds 3 tests. ' + 'Note that renamed tests count towards both.\n' + '\n' + '```\n' + 'test1\n' + 'test2\n' + 'test3\n' + '```\n' + '\n' + '```\n' + 'test 1\n' + 'test 2\n' + 'test 3\n' + '```\n' + '
\n' + '\n' + '
\n' + ' This pull request removes 2 skipped tests and adds 2 skipped tests. ' + 'Note that renamed tests count towards both.\n' + '\n' + '```\n' + 'test1\n' + 'test2\n' + '```\n' + '\n' + '```\n' + 'test 1\n' + 'test 2\n' + '```\n' + '
\n', + get_test_changes_summary_md(changes, 3)) + + def test_get_case_messages(self): + results = create_unit_test_case_results({ + (None, 'class1', 'test1'): { + 'success': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1.0), + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1.1), + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='success', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=1.2), + ], + 'skipped': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='skipped', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=None), + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='skipped', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=None), + ], + 'failure': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='failure', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=1.23), + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='failure', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=1.234), + ], + 'error': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='error', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=1.2345), + ], + }, + (None, 'class2', 'test2'): { + 'success': [ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='success', message=None, content=None, stdout=None, stderr=None, time=None) + ], + 'skipped': [ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='skipped', message=None, content=None, stdout=None, stderr=None, time=None) + ], + 'failure': [ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='failure', message=None, content=None, stdout=None, stderr=None, time=None) + ], + 'error': [ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='error', message=None, content=None, stdout=None, stderr=None, time=None) + ], + } + }) + + expected = CaseMessages([ + ((None, 'class1', 'test1'), dict([ + ('success', defaultdict(list, [ + ('content1', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1.0), + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1.1), + ])), + ('content2', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='success', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=1.2), + ])) + ])), + ('skipped', defaultdict(list, [ + ('message2', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='skipped', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=None), + ])), + ('message3', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='skipped', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=None), + ])) + ])), + ('failure', defaultdict(list, [ + ('content4', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='failure', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=1.23), + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='failure', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=1.234), + ])), + ])), + ('error', defaultdict(list, [ + ('content5', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=1, class_name='class1', test_name='test1', result='error', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=1.2345), + ])), + ])), + ])), + ((None, 'class2', 'test2'), dict([ + ('success', dict([ + (None, list([ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='success', message=None, content=None, stdout=None, stderr=None, time=None) + ])), + ])), + ('skipped', dict([ + (None, list([ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='skipped', message=None, content=None, stdout=None, stderr=None, time=None) + ])), + ])), + ('failure', dict([ + (None, list([ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='failure', message=None, content=None, stdout=None, stderr=None, time=None) + ])), + ])), + ('error', dict([ + (None, list([ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='error', message=None, content=None, stdout=None, stderr=None, time=None) + ])), + ])), + ])) + ]) + + actual = get_case_messages(results) + + self.assertEqual(expected, actual) + + def test_annotation_to_dict(self): + annotation = Annotation(path='file1', start_line=123, end_line=123, start_column=4, end_column=5, annotation_level='notice', message='result-file1', title='1 out of 6 runs skipped: test1', raw_details='message2') + self.assertEqual(dict(path='file1', start_line=123, end_line=123, start_column=4, end_column=5, annotation_level='notice', message='result-file1', title='1 out of 6 runs skipped: test1', raw_details='message2'), annotation.to_dict()) + annotation = Annotation(path='class2', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='failure', message='result-file1', title='1 out of 4 runs with error: test2 (class2)', raw_details=None) + self.assertEqual(dict(path='class2', start_line=0, end_line=0, annotation_level='failure', message='result-file1', title='1 out of 4 runs with error: test2 (class2)'), annotation.to_dict()) + annotation = Annotation(path='file', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='message', title=None, raw_details=None) + self.assertEqual(dict(path='file', start_line=0, end_line=0, annotation_level='notice', message='message'), annotation.to_dict()) + + def test_annotation_to_dict_abbreviation(self): + annotation = Annotation(path='file', start_line=123, end_line=123, start_column=None, end_column=None, annotation_level='notice', message='message ' * 8000, title='title - ' * 31, raw_details='raw ' * 16000) + self.assertEqual('message ' * 8000, annotation.to_dict().get('message')) + self.assertEqual('title - ' * 31, annotation.to_dict().get('title')) + self.assertEqual('raw ' * 16000, annotation.to_dict().get('raw_details')) + + annotation = Annotation(path='file', start_line=123, end_line=123, start_column=None, end_column=None, annotation_level='notice', message='message ' * 8001, title='title - ' * 32, raw_details='raw ' * 16001) + self.assertEqual('message ' * 3999 + 'message…ssage ' + 'message ' * 3999, annotation.to_dict().get('message')) + self.assertEqual('title - ' * 15 + 'title -…itle - ' + 'title - ' * 15, annotation.to_dict().get('title')) + self.assertEqual('raw ' * 8000 + '…aw ' + 'raw ' * 7999, annotation.to_dict().get('raw_details')) + + def test_annotation_to_dict_restricted_unicode(self): + for text, expected in [ + ('abc', 'abc'), + ('»»»', '»»»'), + ('▊▋▌▍▎', '▊▋▌▍▎'), + ('𝒂𝒃𝒄', '\\U0001d482\\U0001d483\\U0001d484') + ]: + with self.subTest(text=text): + annotation = Annotation(path=f'file1 {text}', start_line=123, end_line=123, start_column=4, end_column=5, annotation_level='notice', message=f'result-file1 {text}', title=f'1 out of 6 runs skipped: test1 {text}', raw_details=f'message {text}') + self.assertEqual(dict(path=f'file1 {expected}', start_line=123, end_line=123, start_column=4, end_column=5, annotation_level='notice', message=f'result-file1 {expected}', title=f'1 out of 6 runs skipped: test1 {expected}', raw_details=f'message {expected}'), annotation.to_dict()) + + def test_get_case_annotation(self): + messages = CaseMessages([ + ((None, 'class1', 'test1'), dict([ + ('success', dict([ + ('message1', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='success', message='message1', content=None, stdout=None, stderr=None, time=1.0) + ])) + ])), + ('skipped', dict([ + ('message2', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name=None, test_name='test1', result='skipped', message='message2', content=None, stdout=None, stderr=None, time=1.0) + ])) + ])), + ('failure', dict([ + ('message3', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='', test_name='test1', result='failure', message='message3', content='content3', stdout=None, stderr=None, time=1.0) + ])), + ('message4', list([ + UnitTestCase(result_file='result-file2', test_file='file1', line=123, class_name='class1', test_name='test1', result='failure', message='message4', content='content4.1', stdout=None, stderr=None, time=1.0), + UnitTestCase(result_file='result-file3', test_file='file1', line=123, class_name='class1', test_name='test1', result='failure', message='message4', content='content4.2', stdout=None, stderr=None, time=1.0) + ])), + ])), + # the actual case message is taken, rather than the message given to get_case_annotation + ('error', dict([ + ('message5', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='error', message='actual message', content='content5', stdout=None, stderr=None, time=1.0) + ])) + ])), + ])), + ((None, 'class2', 'test2'), dict([ + ('success', dict([ + (None, list([ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='success', message=None, content=None, stdout=None, stderr=None, time=None) + ])), + ])), + ('skipped', dict([ + (None, list([ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='skipped', message=None, content=None, stdout=None, stderr=None, time=None) + ])), + ])), + ('failure', dict([ + (None, list([ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='failure', message=None, content=None, stdout=None, stderr=None, time=None) + ])), + ])), + ('error', dict([ + (None, list([ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='error', message=None, content=None, stdout=None, stderr=None, time=None) + ])), + ])), + ])) + ]) + + self.assertEqual(Annotation(path='file1', start_line=123, end_line=123, start_column=None, end_column=None, annotation_level='notice', message='result-file1 [took 1s]', title='1 out of 6 runs skipped: test1', raw_details='message2'), get_case_annotation(messages, (None, 'class1', 'test1'), 'skipped', 'message2', report_individual_runs=False)) + self.assertEqual(Annotation(path='file1', start_line=123, end_line=123, start_column=None, end_column=None, annotation_level='warning', message='result-file1 [took 1s]\nresult-file2 [took 1s]\nresult-file3 [took 1s]', title='3 out of 6 runs failed: test1', raw_details='message3\ncontent3'), get_case_annotation(messages, (None, 'class1', 'test1'), 'failure', 'message3', report_individual_runs=False)) + self.assertEqual(Annotation(path='file1', start_line=123, end_line=123, start_column=None, end_column=None, annotation_level='warning', message='result-file1 [took 1s]\nresult-file2 [took 1s]\nresult-file3 [took 1s]', title='3 out of 6 runs failed: test1 (class1)', raw_details='message4\ncontent4.1'), get_case_annotation(messages, (None, 'class1', 'test1'), 'failure', 'message4', report_individual_runs=False)) + # the actual case message is taken, rather than the message given to get_case_annotation + self.assertEqual(Annotation(path='file1', start_line=123, end_line=123, start_column=None, end_column=None, annotation_level='failure', message='result-file1 [took 1s]', title='1 out of 6 runs with error: test1 (class1)', raw_details='actual message\ncontent5'), get_case_annotation(messages, (None, 'class1', 'test1'), 'error', 'message5', report_individual_runs=False)) + + self.assertEqual(Annotation(path='class2', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='result-file1', title='1 out of 4 runs skipped: test2 (class2)', raw_details=None), get_case_annotation(messages, (None, 'class2', 'test2'), 'skipped', None, report_individual_runs=False)) + self.assertEqual(Annotation(path='class2', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='warning', message='result-file1', title='1 out of 4 runs failed: test2 (class2)', raw_details=None), get_case_annotation(messages, (None, 'class2', 'test2'), 'failure', None, report_individual_runs=False)) + self.assertEqual(Annotation(path='class2', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='failure', message='result-file1', title='1 out of 4 runs with error: test2 (class2)', raw_details=None), get_case_annotation(messages, (None, 'class2', 'test2'), 'error', None, report_individual_runs=False)) + + def test_get_case_annotation_report_individual_runs(self): + messages = CaseMessages([ + ((None, 'class1', 'test1'), dict([ + ('success', dict([ + ('message1', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='success', message='message1', content=None, stdout=None, stderr=None, time=1.0) + ])) + ])), + ('skipped', dict([ + ('message2', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name=None, test_name='test1', result='skipped', message='message2', content=None, stdout=None, stderr=None, time=None) + ])) + ])), + ('failure', dict([ + ('message3', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='', test_name='test1', result='failure', message='message3', content=None, stdout=None, stderr=None, time=1.23) + ])), + ('message4', list([ + UnitTestCase(result_file='result-file2', test_file='file1', line=123, class_name='class1', test_name='test1', result='failure', message='message4', content=None, stdout=None, stderr=None, time=1.234), + UnitTestCase(result_file='result-file3', test_file='file1', line=123, class_name='class1', test_name='test1', result='failure', message='message4', content=None, stdout=None, stderr=None, time=1.234) + ])), + ])), + # the actual case message is taken, rather than the message given to get_case_annotation + ('error', dict([ + ('message5', list([ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='error', message='actual message', content=None, stdout=None, stderr=None, time=1.2345) + ])) + ])), + ])) + ]) + + self.assertEqual(Annotation(path='file1', start_line=123, end_line=123, start_column=None, end_column=None, annotation_level='notice', message='result-file1', title='1 out of 6 runs skipped: test1', raw_details='message2'), get_case_annotation(messages, (None, 'class1', 'test1'), 'skipped', 'message2', report_individual_runs=True)) + self.assertEqual(Annotation(path='file1', start_line=123, end_line=123, start_column=None, end_column=None, annotation_level='warning', message='result-file1 [took 1s]', title='1 out of 6 runs failed: test1', raw_details='message3'), get_case_annotation(messages, (None, 'class1', 'test1'), 'failure', 'message3', report_individual_runs=True)) + self.assertEqual(Annotation(path='file1', start_line=123, end_line=123, start_column=None, end_column=None, annotation_level='warning', message='result-file2 [took 1s]\nresult-file3 [took 1s]', title='2 out of 6 runs failed: test1 (class1)', raw_details='message4'), get_case_annotation(messages, (None, 'class1', 'test1'), 'failure', 'message4', report_individual_runs=True)) + # the actual case message is taken, rather than the message given to get_case_annotation + self.assertEqual(Annotation(path='file1', start_line=123, end_line=123, start_column=None, end_column=None, annotation_level='failure', message='result-file1 [took 1s]', title='1 out of 6 runs with error: test1 (class1)', raw_details='actual message'), get_case_annotation(messages, (None, 'class1', 'test1'), 'error', 'message5', report_individual_runs=True)) + + def test_get_case_annotations(self): + results = create_unit_test_case_results({ + (None, 'class1', 'test1'): { + 'success': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + 'skipped': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name=None, test_name='test1', result='skipped', message='skip message', content='skip content', stdout='skip stdout', stderr='skip stderr', time=None) + ], + 'failure': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='failure', message='fail message 1', content='fail content 1', stdout='fail stdout 1', stderr='fail stderr 1', time=1.2), + UnitTestCase(result_file='result-file2', test_file='file1', line=123, class_name='class1', test_name='test1', result='failure', message='fail message 2', content='fail content 2', stdout='fail stdout 2', stderr='fail stderr 2', time=1.23), + UnitTestCase(result_file='result-file3', test_file='file1', line=123, class_name='class1', test_name='test1', result='failure', message='fail message 3', content='fail content 3', stdout='fail stdout 3', stderr='fail stderr 3', time=1.234) + ], + 'error': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='error', message='error message', content='error content', stdout='error stdout', stderr='error stderr', time=1.2345) + ], + }, + (None, 'class2', 'test2'): { + 'success': [ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='success', message=None, content=None, stdout=None, stderr=None, time=None) + ], + 'skipped': [ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='skipped', message=None, content=None, stdout=None, stderr=None, time=None) + ], + 'failure': [ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='failure', message=None, content=None, stdout=None, stderr=None, time=None) + ], + 'error': [ + UnitTestCase(result_file='result-file1', test_file=None, line=None, class_name='class2', test_name='test2', result='error', message=None, content=None, stdout=None, stderr=None, time=None) + ], + } + }) + + expected = [ + Annotation( + annotation_level='warning', + end_column=None, + end_line=123, + message='result-file1 [took 1s]\nresult-file2 [took 1s]\nresult-file3 [took 1s]', + path='file1', + start_column=None, + start_line=123, + title='3 out of 6 runs failed: test1 (class1)', + raw_details='fail message 1\nfail content 1\nfail stdout 1\nfail stderr 1' + ), Annotation( + annotation_level='failure', + end_column=None, + end_line=123, + message='result-file1 [took 1s]', + path='file1', + start_column=None, + start_line=123, + title='1 out of 6 runs with error: test1 (class1)', + raw_details='error message\nerror content\nerror stdout\nerror stderr' + ), Annotation( + annotation_level='warning', + end_column=None, + end_line=0, + message='result-file1', + path='class2', + start_column=None, + start_line=0, + title='1 out of 4 runs failed: test2 (class2)', + raw_details=None + ), Annotation( + annotation_level='failure', + end_column=None, + end_line=0, + message='result-file1', + path='class2', + start_column=None, + start_line=0, + title='1 out of 4 runs with error: test2 (class2)', + raw_details=None + ), + ] + + annotations = get_case_annotations(results, report_individual_runs=False) + + self.assertEqual(expected, annotations) + + def test_get_case_annotations_report_individual_runs(self): + results = create_unit_test_case_results({ + (None, 'class1', 'test1'): { + 'success': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + 'skipped': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name=None, test_name='test1', result='skipped', message='skip message', content='skip content', stdout='skip stdout', stderr='skip stderr', time=None) + ], + 'failure': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='failure', message='fail message 1', content='fail content 1', stdout='fail stdout 1', stderr='fail stderr 1', time=1.2), + UnitTestCase(result_file='result-file2', test_file='file1', line=123, class_name='class1', test_name='test1', result='failure', message='fail message 2', content='fail content 2', stdout='fail stdout 2', stderr='fail stderr 2', time=1.23), + UnitTestCase(result_file='result-file3', test_file='file1', line=123, class_name='class1', test_name='test1', result='failure', message='fail message 2', content='fail content 2', stdout='fail stdout 2', stderr='fail stderr 2', time=1.234) + ], + 'error': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='error', message='error message', content='error content', stdout='error stdout', stderr='error stderr', time=0.1) + ], + } + }) + + expected = [ + Annotation( + annotation_level='warning', + end_column=None, + end_line=123, + message='result-file1 [took 1s]', + path='file1', + start_column=None, + start_line=123, + title='1 out of 6 runs failed: test1 (class1)', + raw_details='fail message 1\nfail content 1\nfail stdout 1\nfail stderr 1' + ), Annotation( + annotation_level='warning', + end_column=None, + end_line=123, + message='result-file2 [took 1s]\nresult-file3 [took 1s]', + path='file1', + start_column=None, + start_line=123, + title='2 out of 6 runs failed: test1 (class1)', + raw_details='fail message 2\nfail content 2\nfail stdout 2\nfail stderr 2' + ), Annotation( + annotation_level='failure', + end_column=None, + end_line=123, + message='result-file1 [took 0s]', + path='file1', + start_column=None, + start_line=123, + title='1 out of 6 runs with error: test1 (class1)', + raw_details='error message\nerror content\nerror stdout\nerror stderr' + ) + ] + + annotations = get_case_annotations(results, report_individual_runs=True) + + self.assertEqual(expected, annotations) + + def test_get_error_annotation(self): + self.assertEqual(Annotation(path='file', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='failure', message='message', title='Error processing result file', raw_details='file'), get_error_annotation(ParseError('file', 'message', None, None, None))) + self.assertEqual(Annotation(path='file', start_line=12, end_line=12, start_column=None, end_column=None, annotation_level='failure', message='message', title='Error processing result file', raw_details='file'), get_error_annotation(ParseError('file', 'message', 12, None, None))) + self.assertEqual(Annotation(path='file', start_line=12, end_line=12, start_column=34, end_column=34, annotation_level='failure', message='message', title='Error processing result file', raw_details='file'), get_error_annotation(ParseError('file', 'message', 12, 34, None))) + self.assertEqual(Annotation(path='file', start_line=12, end_line=12, start_column=34, end_column=34, annotation_level='failure', message='message', title='Error processing result file', raw_details='file'), get_error_annotation(ParseError('file', 'message', 12, 34, ValueError('invalid value')))) + + def test_get_suite_annotations_and_for_suite(self): + out_log = 'stdout log' + err_log = 'stderr log' + multiline_out_log = 'stdout\nlog' + multiline_err_log = 'stderr\nlog' + empty_string = '' + whitespaces = ' \t\n' + whitespaces2 = '\n\t ' + + suites = [ + UnitTestSuite('no logs', 0, 0, 0, 0, None, None), + UnitTestSuite('out logs', 0, 0, 0, 0, out_log, None), + UnitTestSuite('err logs', 0, 0, 0, 0, None, err_log), + UnitTestSuite('both logs', 0, 0, 0, 0, multiline_out_log, multiline_err_log), + UnitTestSuite('empty string logs', 0, 0, 0, 0, empty_string, empty_string), + UnitTestSuite('whitespace logs', 0, 0, 0, 0, whitespaces, whitespaces2), + ] + + def create_annotation(name: str, source: str, log: str) -> Annotation: + return Annotation( + path=name, + start_line=0, + end_line=0, + start_column=None, + end_column=None, + annotation_level='warning' if source == 'stderr' else 'notice', + message=f'Test suite {name} has the following {source} output (see Raw output).', + title=f'Logging on {source} of test suite {name}', + raw_details=log + ) + + for suite in suites: + for with_out_logs, with_err_logs in [(False, False), (True, False), (False, True), (True, True)]: + with self.subTest(suite=suite, with_suite_out_logs=with_out_logs, with_suite_err_logs=with_err_logs): + actual = get_suite_annotations_for_suite(suite, with_suite_out_logs=with_out_logs, with_suite_err_logs=with_err_logs) + + expected_size = 0 + if with_out_logs and suite.stdout and suite.stdout.strip(): + expected = create_annotation(suite.name, 'stdout', suite.stdout) + self.assertIn(expected, actual) + expected_size = expected_size + 1 + if with_err_logs and suite.stderr and suite.stderr.strip(): + expected = create_annotation(suite.name, 'stderr', suite.stderr) + self.assertIn(expected, actual) + expected_size = expected_size + 1 + + self.assertEqual(expected_size, len(actual)) + + out_log_annotation = create_annotation('out logs', 'stdout', out_log) + err_log_annotation = create_annotation('err logs', 'stderr', err_log) + multiline_out_log_annotation = create_annotation('both logs', 'stdout', multiline_out_log) + multiline_err_log_annotation = create_annotation('both logs', 'stderr', multiline_err_log) + + tests = [ + (False, False, []), + (True, False, [out_log_annotation, multiline_out_log_annotation]), + (False, True, [err_log_annotation, multiline_err_log_annotation]), + (True, True, [out_log_annotation, err_log_annotation, multiline_out_log_annotation, multiline_err_log_annotation]), + ] + + for with_out_logs, with_err_logs, expected in tests: + with self.subTest(with_suite_out_logs=with_out_logs, with_suite_err_logs=with_err_logs): + self.maxDiff = None + actual = get_suite_annotations(suites, with_suite_out_logs=with_out_logs, with_suite_err_logs=with_err_logs) + self.assertEqual(expected, actual) + + def test_get_all_tests_list_annotation(self): + results = create_unit_test_case_results({ + (None, 'class1', 'test2'): { + 'success': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test2', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + }, + (None, 'class1', 'test1'): { + 'success': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + 'skipped': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name=None, test_name='test1', result='skipped', message='skip message', content='skip content', stdout='skip stdout', stderr='skip stderr', time=None) + ], + }, + ('file', 'class1', 'test2'): { + 'success': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test2', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + } + }) + + self.assertEqual([], get_all_tests_list_annotation(create_unit_test_case_results())) + self.assertEqual([Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 3 tests, see "Raw output" for the full list of tests.', title='3 tests found', raw_details='class1 ‑ test1\nclass1 ‑ test2\nfile ‑ class1 ‑ test2')], get_all_tests_list_annotation(results)) + del results[(None, 'class1', 'test1')] + del results[('file', 'class1', 'test2')] + self.assertEqual([Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There is 1 test, see "Raw output" for the name of the test.', title='1 test found', raw_details='class1 ‑ test2')], get_all_tests_list_annotation(results)) + + def test_get_all_tests_list_annotation_chunked(self): + results = create_unit_test_case_results({ + (None, 'class1', 'test2'): { + 'success': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test2', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + }, + (None, 'class1', 'test1'): { + 'success': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + 'skipped': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name=None, test_name='test1', result='skipped', message='skip message', content='skip content', stdout='skip stdout', stderr='skip stderr', time=None) + ], + }, + ('file', 'class1', 'test2'): { + 'success': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test2', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + } + }) + + self.assertEqual([], get_all_tests_list_annotation(create_unit_test_case_results())) + self.assertEqual( + [ + Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 3 tests, see "Raw output" for the list of tests 1 to 2.', title='3 tests found (test 1 to 2)', raw_details='class1 ‑ test1\nclass1 ‑ test2'), + Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 3 tests, see "Raw output" for the list of tests 3 to 3.', title='3 tests found (test 3 to 3)', raw_details='file ‑ class1 ‑ test2') + ], + get_all_tests_list_annotation(results, max_chunk_size=40) + ) + + def test_get_skipped_tests_list_annotation(self): + results = create_unit_test_case_results({ + (None, 'class1', 'test2'): { + 'skipped': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test2', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + }, + (None, 'class1', 'test1'): { + 'success': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + 'skipped': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name=None, test_name='test1', result='skipped', message='skip message', content='skip content', stdout='skip stdout', stderr='skip stderr', time=None) + ], + }, + ('file', 'class1', 'test2'): { + 'success': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test2', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + } + }) + + self.assertEqual([], get_skipped_tests_list_annotation(create_unit_test_case_results())) + self.assertEqual([Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There is 1 skipped test, see "Raw output" for the name of the skipped test.', title='1 skipped test found', raw_details='class1 ‑ test2')], get_skipped_tests_list_annotation(results)) + del results[(None, 'class1', 'test1')]['success'] + self.assertEqual([Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 2 skipped tests, see "Raw output" for the full list of skipped tests.', title='2 skipped tests found', raw_details='class1 ‑ test1\nclass1 ‑ test2')], get_skipped_tests_list_annotation(results)) + + def test_get_skipped_tests_list_annotation_chunked(self): + results = create_unit_test_case_results({ + (None, 'class1', 'test2'): { + 'skipped': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test2', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + }, + (None, 'class1', 'test1'): { + 'skipped': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test1', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + }, + ('file', 'class1', 'test2'): { + 'skipped': [ + UnitTestCase(result_file='result-file1', test_file='file1', line=123, class_name='class1', test_name='test2', result='success', message='success message', content='success content', stdout='success stdout', stderr='success stderr', time=1.0) + ], + } + }) + + self.assertEqual([], get_skipped_tests_list_annotation(create_unit_test_case_results())) + self.assertEqual( + [ + Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 3 skipped tests, see "Raw output" for the list of skipped tests 1 to 2.', title='3 skipped tests found (test 1 to 2)', raw_details='class1 ‑ test1\nclass1 ‑ test2'), + Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 3 skipped tests, see "Raw output" for the list of skipped tests 3 to 3.', title='3 skipped tests found (test 3 to 3)', raw_details='file ‑ class1 ‑ test2') + ], + get_skipped_tests_list_annotation(results, max_chunk_size=40) + ) + + def test_chunk(self): + self.assertEqual([], chunk_test_list([], '\n', 100)) + + tests = [f'abcdefghijklmnopqrstu-{i}' for i in range(10)] + chunks = chunk_test_list(tests, '\n', 10) + self.assertEqual([], chunks) + + four_per_chunk = [['abcdefghijklmnopqrstu-0', + 'abcdefghijklmnopqrstu-1', + 'abcdefghijklmnopqrstu-2', + 'abcdefghijklmnopqrstu-3'], + ['abcdefghijklmnopqrstu-4', + 'abcdefghijklmnopqrstu-5', + 'abcdefghijklmnopqrstu-6', + 'abcdefghijklmnopqrstu-7'], + ['abcdefghijklmnopqrstu-8', + 'abcdefghijklmnopqrstu-9']] + + three_per_chunk = [['abcdefghijklmnopqrstuv-0', + 'abcdefghijklmnopqrstuv-1', + 'abcdefghijklmnopqrstuv-2'], + ['abcdefghijklmnopqrstuv-3', + 'abcdefghijklmnopqrstuv-4', + 'abcdefghijklmnopqrstuv-5'], + ['abcdefghijklmnopqrstuv-6', + 'abcdefghijklmnopqrstuv-7', + 'abcdefghijklmnopqrstuv-8'], + ['abcdefghijklmnopqrstuv-9']] + + tests = [f'abcdefghijklmnopqrstu-{i}' for i in range(10)] + chunks = chunk_test_list(tests, '\n', 100) + self.assertEqual(four_per_chunk, chunks) + + tests = [f'abcdefghijklmnopqrstuv-{i}' for i in range(10)] + chunks = chunk_test_list(tests, '\r\n', 100) + self.assertEqual(three_per_chunk, chunks) + + tests = [f'abcdefghijklmnopqrstuv-{i}' for i in range(10)] + chunks = chunk_test_list(tests, '\n', 100) + self.assertEqual(three_per_chunk, chunks) + + tests = [f'abcdefghijklmnopqrstuvw-{i}' for i in range(10)] + chunks = chunk_test_list(tests, '\n', 100) + self.assertEqual([['abcdefghijklmnopqrstuvw-0', + 'abcdefghijklmnopqrstuvw-1', + 'abcdefghijklmnopqrstuvw-2'], + ['abcdefghijklmnopqrstuvw-3', + 'abcdefghijklmnopqrstuvw-4', + 'abcdefghijklmnopqrstuvw-5'], + ['abcdefghijklmnopqrstuvw-6', + 'abcdefghijklmnopqrstuvw-7', + 'abcdefghijklmnopqrstuvw-8'], + ['abcdefghijklmnopqrstuvw-9']], + chunks) + + def test_files(self): + parsed = process_junit_xml_elems( + parse_junit_xml_files([str(test_files_path / 'pytest' / 'junit.gloo.elastic.spark.tf.xml'), + str(test_files_path / 'pytest' / 'junit.gloo.elastic.spark.torch.xml'), + str(test_files_path / 'pytest' / 'junit.gloo.elastic.xml'), + str(test_files_path / 'pytest' / 'junit.gloo.standalone.xml'), + str(test_files_path / 'pytest' / 'junit.gloo.static.xml'), + str(test_files_path / 'pytest' / 'junit.mpi.integration.xml'), + str(test_files_path / 'pytest' / 'junit.mpi.standalone.xml'), + str(test_files_path / 'pytest' / 'junit.mpi.static.xml'), + str(test_files_path / 'pytest' / 'junit.spark.integration.1.xml'), + str(test_files_path / 'pytest' / 'junit.spark.integration.2.xml')], + False, False) + ).with_commit('example') + results = get_test_results(parsed, False) + stats = get_stats(results) + md = get_long_summary_md(stats) + self.assertEqual(md, (f'  10 files    10 suites   39m 1s {duration_label_md}\n' + f'217 {all_tests_label_md} 208 {passed_tests_label_md}   9 {skipped_tests_label_md} 0 {failed_tests_label_md}\n' + f'373 runs  333 {passed_tests_label_md} 40 {skipped_tests_label_md} 0 {failed_tests_label_md}\n' + f'\n' + f'Results for commit example.\n')) + + def test_file_without_cases(self): + parsed = process_junit_xml_elems(parse_junit_xml_files([str(test_files_path / 'no-cases.xml')], False, False)).with_commit('a commit sha') + results = get_test_results(parsed, False) + stats = get_stats(results) + md = get_long_summary_md(stats) + self.assertEqual(md, (f'0 {all_tests_label_md}   0 {passed_tests_label_md}  0s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "duration of all tests")\n' + f'1 suites  0 {skipped_tests_label_md}\n' + f'1 files    0 {failed_tests_label_md}\n' + f'\n' + f'Results for commit a commit.\n')) + + def test_file_without_cases_but_with_tests(self): + parsed = process_junit_xml_elems(parse_junit_xml_files([str(test_files_path / 'no-cases-but-tests.xml')], False, False)).with_commit('a commit sha') + results = get_test_results(parsed, False) + stats = get_stats(results) + md = get_long_summary_md(stats) + self.assertEqual(md, (f'6 {all_tests_label_md}   3 {passed_tests_label_md}  0s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "duration of all tests")\n' + f'1 suites  2 {skipped_tests_label_md}\n' + f'1 files    1 {failed_tests_label_md}\n' + f'\n' + f'Results for commit a commit.\n')) + + def test_non_parsable_file(self): + parsed = process_junit_xml_elems(parse_junit_xml_files(['files/empty.xml'], False, False)).with_commit('a commit sha') + results = get_test_results(parsed, False) + stats = get_stats(results) + md = get_long_summary_md(stats) + self.assertEqual(md, (f'0 {all_tests_label_md}   0 {passed_tests_label_md}  0s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "duration of all tests")\n' + f'0 suites  0 {skipped_tests_label_md}\n' + f'1 files    0 {failed_tests_label_md}\n' + f'1 errors\n' + f'\n' + f'Results for commit a commit.\n')) + + def test_files_with_testsuite_in_testsuite(self): + parsed = process_junit_xml_elems(parse_junit_xml_files([str(test_files_path / 'testsuite-in-testsuite.xml')], False, False)).with_commit('example') + results = get_test_results(parsed, False) + stats = get_stats(results) + md = get_long_summary_md(stats) + self.assertEqual(md, (f'5 {all_tests_label_md}   5 {passed_tests_label_md}  4s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "duration of all tests")\n' + f'4 suites  0 {skipped_tests_label_md}\n' + f'1 files    0 {failed_tests_label_md}\n' + f'\n' + f'Results for commit example.\n')) + + def test_files_without_annotations(self): + parsed = process_junit_xml_elems( + parse_junit_xml_files( + [str(test_files_path / 'pytest' / 'junit.gloo.elastic.spark.tf.xml'), + str(test_files_path / 'pytest' / 'junit.gloo.elastic.spark.torch.xml'), + str(test_files_path / 'pytest' / 'junit.gloo.elastic.xml'), + str(test_files_path / 'pytest' / 'junit.gloo.standalone.xml'), + str(test_files_path / 'pytest' / 'junit.gloo.static.xml'), + str(test_files_path / 'pytest' / 'junit.mpi.integration.xml'), + str(test_files_path / 'pytest' / 'junit.mpi.standalone.xml'), + str(test_files_path / 'pytest' / 'junit.mpi.static.xml'), + str(test_files_path / 'pytest' / 'junit.spark.integration.1.xml'), + str(test_files_path / 'pytest' / 'junit.spark.integration.2.xml')], + False, drop_testcases=True + ) + ).with_commit('example') + results = get_test_results(parsed, False) + stats = get_stats(results) + md = get_long_summary_md(stats) + self.assertEqual(md, (f'373 {all_tests_label_md}   333 {passed_tests_label_md}  39m 1s {duration_label_md}\n' + f'  10 suites    40 {skipped_tests_label_md}\n' + f'  10 files        0 {failed_tests_label_md}\n' + f'\n' + f'Results for commit example.\n')) + + def test_message_is_contained_in_content(self): + # non-contained test cases + for message, content in [(None, None), + ('message', None), + (None, 'content'), + ('message', 'content'), + ('message', 'the message in the content')]: + with self.subTest(message=message, content=content): + self.assertFalse(message_is_contained_in_content(message, content)) + + # contained test cases + for message, content in [('message', 'message'), + ('message', 'message in content'), + ('the message', ' the message in content'), + ('the message', '\tthe message in the content')]: + with self.subTest(message=message, content=content): + self.assertTrue(message_is_contained_in_content(message, content)) + + +if __name__ == '__main__': + unittest.main() diff --git a/python/test/test_publisher.py b/python/test/test_publisher.py new file mode 100644 index 0000000..e1feb7e --- /dev/null +++ b/python/test/test_publisher.py @@ -0,0 +1,2685 @@ +import dataclasses +import json +import os +import pathlib +import sys +import tempfile +import unittest +from collections.abc import Collection +from datetime import datetime, timezone +from typing import Optional, List, Mapping, Union, Any, Callable + +import github.CheckRun +import mock +from github import Github, GithubException + +from publish import __version__, get_json_path, comment_mode_off, comment_mode_always, \ + comment_mode_changes, comment_mode_changes_failures, comment_mode_changes_errors, \ + comment_mode_failures, comment_mode_errors, Annotation, default_annotations, \ + get_error_annotation, digest_header, get_digest_from_stats, \ + all_tests_list, skipped_tests_list, none_annotations, \ + all_tests_label_md, skipped_tests_label_md, failed_tests_label_md, passed_tests_label_md, test_errors_label_md, \ + duration_label_md, pull_request_build_mode_merge, punctuation_space, \ + get_long_summary_with_digest_md +from publish.github_action import GithubAction +from publish.publisher import Publisher, Settings, PublishData +from publish.unittestresults import UnitTestSuite, UnitTestCase, ParseError, UnitTestRunResults, UnitTestRunDeltaResults, \ + UnitTestCaseResults, create_unit_test_case_results, get_test_results, get_stats, ParsedUnitTestResultsWithCommit + +sys.path.append(str(pathlib.Path(__file__).resolve().parent)) + +from test_unittestresults import create_unit_test_run_results + + +errors = [ParseError('file', 'error', 1, 2, exception=ValueError("Invalid value"))] + + +@dataclasses.dataclass(frozen=True) +class CommentConditionTest: + earlier_is_none: bool + earlier_is_different: bool + earlier_is_different_in_failures: bool + earlier_is_different_in_errors: bool + earlier_has_failures: bool + earlier_has_errors: bool + # current_has_changes being None indicates it is not a UnitTestRunDeltaResults but UnitTestRunResults + current_has_changes: Optional[bool] + current_has_failure_changes: bool + current_has_error_changes: bool + current_has_failures: bool + current_has_errors: bool + + +class TestPublisher(unittest.TestCase): + + @staticmethod + def create_github_collection(collection: Collection) -> mock.Mock: + mocked = mock.MagicMock() + mocked.totalCount = len(collection) + mocked.__iter__ = mock.Mock(side_effect=collection.__iter__) + return mocked + + @staticmethod + def create_github_pr(repo: str, + base_commit_sha: Optional[str] = 'base', + head_commit_sha: Optional[str] = 'head', + merge_commit_sha: Optional[str] = 'merge', + number: Optional[int] = None, + state: str = 'open'): + pr = mock.MagicMock() + pr.as_pull_request = mock.Mock(return_value=pr) + pr.base.repo.full_name = repo + pr.base.sha = base_commit_sha + pr.number = number + pr.state = state + pr.head.sha = head_commit_sha + pr.merge_commit_sha = merge_commit_sha + return pr + + @staticmethod + def create_settings(actor='actor', + comment_mode=comment_mode_always, + job_summary=True, + compare_earlier=True, + report_individual_runs=False, + dedup_classes_by_file_name=False, + check_run_annotation=default_annotations, + event: Optional[dict] = {'before': 'before'}, + event_name: str = 'event name', + is_fork: bool = False, + json_file: Optional[str] = None, + json_thousands_separator: str = punctuation_space, + json_suite_details: bool = False, + json_test_case_results: bool = False, + pull_request_build: str = pull_request_build_mode_merge, + test_changes_limit: Optional[int] = 5, + search_pull_requests: bool = False): + return Settings( + token=None, + actor=actor, + api_url='https://the-github-api-url', + graphql_url='https://the-github-graphql-url', + api_retries=1, + event=event, + event_file=None, + event_name=event_name, + is_fork=is_fork, + repo='owner/repo', + commit='commit', + json_file=json_file, + json_thousands_separator=json_thousands_separator, + json_suite_details=json_suite_details, + json_test_case_results=json_test_case_results, + fail_on_errors=True, + fail_on_failures=True, + action_fail=False, + action_fail_on_inconclusive=False, + files_glob='*.xml', + junit_files_glob=None, + nunit_files_glob=None, + xunit_files_glob=None, + trx_files_glob=None, + time_factor=1.0, + test_file_prefix=None, + check_name='Check Name', + comment_title='Comment Title', + comment_mode=comment_mode, + job_summary=job_summary, + compare_earlier=compare_earlier, + pull_request_build=pull_request_build, + test_changes_limit=test_changes_limit, + report_individual_runs=report_individual_runs, + report_suite_out_logs=False, + report_suite_err_logs=False, + dedup_classes_by_file_name=dedup_classes_by_file_name, + large_files=False, + ignore_runs=False, + check_run_annotation=check_run_annotation, + seconds_between_github_reads=1.5, + seconds_between_github_writes=2.5, + secondary_rate_limit_wait_seconds=6.0, + search_pull_requests=search_pull_requests, + ) + + stats = UnitTestRunResults( + files=1, + errors=[], + suites=2, + duration=3, + + suite_details=[], + + tests=22, + tests_succ=4, + tests_skip=5, + tests_fail=6, + tests_error=7, + + runs=38, + runs_succ=8, + runs_skip=9, + runs_fail=10, + runs_error=11, + + commit='commit' + ) + + def create_mocks(self, + repo_name: Optional[str] = None, + repo_login: Optional[str] = None, + commit: Optional[mock.Mock] = mock.MagicMock(), + digest: Optional[str] = None, + check_names: List[str] = None): + gh = mock.MagicMock(Github) + gh._Github__requester = mock.MagicMock() + gha = mock.MagicMock(GithubAction) + repo = mock.MagicMock() + + # have repo.create_check_run return the arguments given to it + def create_check_run_hook(**kwargs) -> Mapping[str, Any]: + return mock.MagicMock(html_url='mock url', create_check_run_kwargs=kwargs) + + repo.create_check_run = mock.Mock(side_effect=create_check_run_hook) + + if commit: + runs = [] + if digest and check_names: + for check_name in check_names: + run = mock.MagicMock() + run.name = check_name + check_run_output = mock.MagicMock(summary='summary\n{}{}'.format(digest_header, digest)) + run.output = check_run_output + runs.append(run) + + check_runs = self.create_github_collection(runs) + commit.get_check_runs = mock.Mock(return_value=check_runs) + repo.get_commit = mock.Mock(return_value=commit) + repo.owner.login = repo_login + repo.name = repo_name + gh.get_repo = mock.Mock(return_value=repo) + + return gh, gha, gh._Github__requester, repo, commit + + cases = create_unit_test_case_results({ + (None, 'class', 'test'): dict( + success=[ + UnitTestCase( + result_file='result file', test_file='test file', line=0, + class_name='class', test_name='test', + result='success', message=None, content=None, + stdout=None, stderr=None, + time=1.2 + ) + ], + failure=[ + UnitTestCase( + result_file='result file', test_file='test file', line=0, + class_name='class', test_name='test', + result='failure', message='message', content='content', + stdout='stdout', stderr='stderr', + time=1.234 + ) + ] + ), + (None, 'class', 'test2'): dict( + skipped=[ + UnitTestCase( + result_file='result file', test_file='test file', line=0, + class_name='class', test_name='test2', + result='skipped', message='skipped', content=None, + stdout=None, stderr=None, + time=None + ) + ], + error=[ + UnitTestCase( + result_file='result file', test_file='test file', line=0, + class_name='class', test_name='test2', + result='error', message='error message', content='error content', + stdout='error stdout', stderr='error stderr', + time=1.2345 + ) + ] + ), + (None, 'class', 'test3'): dict( + skipped=[ + UnitTestCase( + result_file='result file', test_file='test file', line=0, + class_name='class', test_name='test3', + result='skipped', message='skipped', content=None, + stdout=None, stderr=None, + time=None + ) + ] + ) + }) + + @staticmethod + def get_stats(base: str) -> UnitTestRunResults: + return UnitTestRunResults( + files=1, + errors=[], + suites=2, + duration=3, + + suite_details=[], + + tests=21, + tests_succ=12, + tests_skip=4, + tests_fail=2, + tests_error=3, + + runs=37, + runs_succ=25, + runs_skip=7, + runs_fail=4, + runs_error=1, + + commit=base + ) + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + base_digest = get_digest_from_stats(get_stats.__func__('base')) + past_digest = get_digest_from_stats(get_stats.__func__('past')) + + @staticmethod + def call_mocked_publish(settings: Settings, + stats: UnitTestRunResults = stats, + cases: UnitTestCaseResults = cases, + prs: List[object] = [], + cr: object = None, + bcr: object = None): + # UnitTestCaseResults is mutable, always copy it + cases = create_unit_test_case_results(cases) + + # mock Publisher and call publish + publisher = mock.MagicMock(Publisher) + publisher._settings = settings + publisher.get_pulls = mock.Mock(return_value=prs) + publisher.publish_check = mock.Mock(return_value=(cr, None)) + publisher.get_check_run = mock.Mock(return_value=bcr) + Publisher.publish(publisher, stats, cases, 'success') + + # return calls to mocked instance, except call to _logger + mock_calls = [(call[0], call.args, call.kwargs) + for call in publisher.mock_calls + if not call[0].startswith('_logger.')] + return mock_calls + + def test_get_test_list_annotations(self): + cases = create_unit_test_case_results({ + (None, 'class', 'test abcd'): {'success': [None]}, + (None, 'class', 'test efgh'): {'skipped': [None]}, + (None, 'class', 'test ijkl'): {'skipped': [None]}, + }) + + settings = self.create_settings(check_run_annotation=[all_tests_list, skipped_tests_list]) + gh = mock.MagicMock() + publisher = Publisher(settings, gh, None) + annotations = publisher.get_test_list_annotations(cases, max_chunk_size=42) + + self.assertEqual([ + Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 2 skipped tests, see "Raw output" for the full list of skipped tests.', title='2 skipped tests found', raw_details='class ‑ test efgh\nclass ‑ test ijkl'), + Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 3 tests, see "Raw output" for the list of tests 1 to 2.', title='3 tests found (test 1 to 2)', raw_details='class ‑ test abcd\nclass ‑ test efgh'), + Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 3 tests, see "Raw output" for the list of tests 3 to 3.', title='3 tests found (test 3 to 3)', raw_details='class ‑ test ijkl') + ], annotations) + + def test_get_test_list_annotations_chunked_and_restricted_unicode(self): + cases = create_unit_test_case_results({ + (None, 'class', 'test 𝒂'): {'success': [None]}, + (None, 'class', 'test 𝒃'): {'skipped': [None]}, + (None, 'class', 'test 𝒄'): {'skipped': [None]}, + }) + + settings = self.create_settings(check_run_annotation=[all_tests_list, skipped_tests_list]) + gh = mock.MagicMock() + publisher = Publisher(settings, gh, None) + annotations = publisher.get_test_list_annotations(cases, max_chunk_size=42) + + self.assertEqual([ + Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 2 skipped tests, see "Raw output" for the list of skipped tests 1 to 1.', title='2 skipped tests found (test 1 to 1)', raw_details='class ‑ test \\U0001d483'), + Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 2 skipped tests, see "Raw output" for the list of skipped tests 2 to 2.', title='2 skipped tests found (test 2 to 2)', raw_details='class ‑ test \\U0001d484'), + Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 3 tests, see "Raw output" for the list of tests 1 to 1.', title='3 tests found (test 1 to 1)', raw_details='class ‑ test \\U0001d482'), + Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 3 tests, see "Raw output" for the list of tests 2 to 2.', title='3 tests found (test 2 to 2)', raw_details='class ‑ test \\U0001d483'), + Annotation(path='.github', start_line=0, end_line=0, start_column=None, end_column=None, annotation_level='notice', message='There are 3 tests, see "Raw output" for the list of tests 3 to 3.', title='3 tests found (test 3 to 3)', raw_details='class ‑ test \\U0001d484') + ], annotations) + + def do_test_require_comment(self, comment_mode, test_expectation: Callable[["CommentConditionTest"], bool]): + tests = [(test, test_expectation(test)) for test in self.comment_condition_tests] + + publisher = mock.MagicMock(Publisher) + publisher._settings = self.create_settings(comment_mode=comment_mode) + + for test, expected in tests: + with self.subTest(test): + earlier = mock.MagicMock( + is_different=mock.Mock(return_value=test.earlier_is_different), + is_different_in_failures=mock.Mock(return_value=test.earlier_is_different_in_failures), + is_different_in_errors=mock.Mock(return_value=test.earlier_is_different_in_errors), + has_failures=test.earlier_has_failures, + has_errors=test.earlier_has_errors + ) if not test.earlier_is_none else None + current = mock.MagicMock( + is_delta=test.current_has_changes is not None, + has_failures=test.current_has_failures, + has_errors=test.current_has_errors) + if current.is_delta: + current.has_changes = test.current_has_changes + current.has_failure_changes = test.current_has_failure_changes + current.has_error_changes = test.current_has_error_changes + current.without_delta = mock.Mock(return_value=current) + required = Publisher.require_comment(publisher, current, earlier) + self.assertEqual(required, expected) + # do not access these prperties when current is not a delta stats + self.assertTrue(current.is_delta or 'has_changes' not in current._mock_children, 'has_changes') + self.assertTrue(current.is_delta or 'has_failure_changes' not in current._mock_children, 'has_failure_changes') + self.assertTrue(current.is_delta or 'has_error_changes' not in current._mock_children, 'has_error_changes') + + comment_condition_tests = [CommentConditionTest(earlier_is_none, + earlier_is_different, earlier_is_different_in_failures, earlier_is_different_in_errors, + earlier_has_failures, earlier_has_errors, + current_has_changes, current_has_failure_changes, current_has_error_changes, + current_has_failures, current_has_errors) + for earlier_is_none in [False, True] + for earlier_is_different in [False, True] + for earlier_is_different_in_failures in ([False, True] if earlier_is_different else [False]) + for earlier_is_different_in_errors in ([False, True] if earlier_is_different else [False]) + for earlier_has_failures in [False, True] + for earlier_has_errors in [False, True] + + for current_has_changes in [None, False, True] + for current_has_failure_changes in ([False, True] if current_has_changes else [False]) + for current_has_error_changes in ([False, True] if current_has_changes else [False]) + for current_has_failures in [False, True] + for current_has_errors in [False, True]] + + def test_require_comment_off(self): + self.do_test_require_comment( + comment_mode_off, + lambda _: False + ) + + def test_require_comment_always(self): + self.do_test_require_comment( + comment_mode_always, + lambda _: True + ) + + def test_require_comment_changes(self): + self.do_test_require_comment( + comment_mode_changes, + lambda test: not test.earlier_is_none and test.earlier_is_different or + test.current_has_changes is None or test.current_has_changes + ) + + def test_require_comment_changes_failures(self): + self.do_test_require_comment( + comment_mode_changes_failures, + lambda test: not test.earlier_is_none and (test.earlier_is_different_in_failures or test.earlier_is_different_in_errors) or + test.current_has_changes is None or test.current_has_failure_changes or test.current_has_error_changes + ) + + def test_require_comment_changes_errors(self): + self.do_test_require_comment( + comment_mode_changes_errors, + lambda test: not test.earlier_is_none and test.earlier_is_different_in_errors or + test.current_has_changes is None or test.current_has_error_changes + ) + + def test_require_comment_failures(self): + self.do_test_require_comment( + comment_mode_failures, + lambda test: not test.earlier_is_none and (test.earlier_has_failures or test.earlier_has_errors) or + (test.current_has_failures or test.current_has_errors) + ) + + def test_require_comment_errors(self): + self.do_test_require_comment( + comment_mode_errors, + lambda test: not test.earlier_is_none and test.earlier_has_errors or test.current_has_errors + ) + + def test_publish_with_fork(self): + settings = self.create_settings(is_fork=True, job_summary=True, comment_mode=comment_mode_always) + bcr = mock.MagicMock() + with mock.patch('publish.publisher.logger') as l: + mock_calls = self.call_mocked_publish(settings, prs=[object()], bcr=bcr) + self.assertEqual([ + mock.call('Publishing success results for commit commit'), + mock.call('This action is running on a pull_request event for a fork repository. ' + 'Pull request comments and check runs cannot be created, so disabling these features. ' + 'To fully run the action on fork repository pull requests, ' + f'see https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}' + '/README.md#support-fork-repositories-and-dependabot-branches') + ], l.info.call_args_list) + + self.assertEqual(2, len(mock_calls)) + (method, args, kwargs) = mock_calls[0] + self.assertEqual('get_check_run', method) + self.assertEqual(('before', ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[1] + self.assertEqual('publish_job_summary', method) + self.assertEqual((settings.comment_title, self.stats, None, bcr), args) + self.assertEqual({}, kwargs) + + def test_publish_without_comment(self): + settings = self.create_settings(comment_mode=comment_mode_off) + mock_calls = self.call_mocked_publish(settings, prs=[object()]) + + self.assertEqual(2, len(mock_calls)) + (method, args, kwargs) = mock_calls[0] + self.assertEqual('publish_check', method) + self.assertEqual((self.stats, self.cases, 'success'), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[1] + self.assertEqual('publish_job_summary', method) + self.assertEqual((settings.comment_title, self.stats, None, None), args) + self.assertEqual({}, kwargs) + + def test_publish_without_job_summary_and_comment(self): + settings = self.create_settings(comment_mode=comment_mode_off, job_summary=False) + mock_calls = self.call_mocked_publish(settings, prs=[object()]) + + self.assertEqual(1, len(mock_calls)) + (method, args, kwargs) = mock_calls[0] + self.assertEqual('publish_check', method) + self.assertEqual((self.stats, self.cases, 'success'), args) + self.assertEqual({}, kwargs) + + def test_publish_with_comment_without_pr(self): + settings = self.create_settings() + mock_calls = self.call_mocked_publish(settings, prs=[]) + + self.assertEqual(3, len(mock_calls)) + + (method, args, kwargs) = mock_calls[0] + self.assertEqual('publish_check', method) + self.assertEqual((self.stats, self.cases, 'success'), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[1] + self.assertEqual('publish_job_summary', method) + self.assertEqual((settings.comment_title, self.stats, None, None), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[2] + self.assertEqual('get_pulls', method) + self.assertEqual((settings.commit, ), args) + self.assertEqual({}, kwargs) + + def test_publish_without_compare(self): + pr = object() + cr = object() + settings = self.create_settings(compare_earlier=False) + mock_calls = self.call_mocked_publish(settings, prs=[pr], cr=cr) + + self.assertEqual(4, len(mock_calls)) + + (method, args, kwargs) = mock_calls[0] + self.assertEqual('publish_check', method) + self.assertEqual((self.stats, self.cases, 'success'), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[1] + self.assertEqual('publish_job_summary', method) + self.assertEqual((settings.comment_title, self.stats, cr, None), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[2] + self.assertEqual('get_pulls', method) + self.assertEqual((settings.commit, ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[3] + self.assertEqual('publish_comment', method) + self.assertEqual((settings.comment_title, self.stats, pr, cr, self.cases), args) + self.assertEqual({}, kwargs) + + def test_publish_comment_compare_earlier(self): + pr = mock.MagicMock(number="1234", create_issue_comment=mock.Mock(return_value=mock.MagicMock())) + cr = mock.MagicMock() + bcr = mock.MagicMock() + bs = UnitTestRunResults(1, [], 1, 1, [], 3, 1, 2, 0, 0, 3, 1, 2, 0, 0, 'commit') + stats = self.stats + cases = create_unit_test_case_results(self.cases) + settings = self.create_settings(compare_earlier=True) + publisher = mock.MagicMock(Publisher) + publisher._settings = settings + publisher.get_check_run = mock.Mock(return_value=bcr) + publisher.get_stats_from_check_run = mock.Mock(return_value=bs) + publisher.get_stats_delta = mock.Mock(return_value=bs) + publisher.get_base_commit_sha = mock.Mock(return_value="base commit") + publisher.get_test_lists_from_check_run = mock.Mock(return_value=(None, None)) + publisher.require_comment = mock.Mock(return_value=True) + publisher.get_latest_comment = mock.Mock(return_value=None) + with mock.patch('publish.publisher.get_long_summary_with_digest_md', return_value='body'): + Publisher.publish_comment(publisher, 'title', stats, pr, cr, cases) + mock_calls = publisher.mock_calls + + self.assertEqual(6, len(mock_calls)) + + (method, args, kwargs) = mock_calls[0] + self.assertEqual('get_base_commit_sha', method) + self.assertEqual((pr, ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[1] + self.assertEqual('get_check_run', method) + self.assertEqual(('base commit', ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[2] + self.assertEqual('get_stats_from_check_run', method) + self.assertEqual((bcr, ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[3] + self.assertEqual('get_test_lists_from_check_run', method) + self.assertEqual((bcr, ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[4] + self.assertEqual('get_latest_comment', method) + + (method, args, kwargs) = mock_calls[5] + self.assertEqual('require_comment', method) + + mock_calls = pr.mock_calls + self.assertEqual(1, len(mock_calls)) + + (method, args, kwargs) = mock_calls[0] + self.assertEqual('create_issue_comment', method) + self.assertEqual(('## title\nbody', ), args) + self.assertEqual({}, kwargs) + + def test_publish_comment_compare_earlier_with_restricted_unicode(self): + pr = mock.MagicMock(number="1234", create_issue_comment=mock.Mock(return_value=mock.MagicMock())) + cr = mock.MagicMock(html_url='html://url') + bcr = mock.MagicMock() + bs = UnitTestRunResults(1, [], 1, 1, [], 3, 1, 2, 0, 0, 3, 1, 2, 0, 0, 'commit') + stats = self.stats + # the new test cases with un-restricted unicode, as they come from test result files + cases = create_unit_test_case_results({ + # removed test 𝒂 + (None, 'class', 'test 𝒃'): {'success': [None]}, # unchanged test 𝒃 + # removed skipped 𝒄 + (None, 'class', 'skipped 𝒅'): {'skipped': [None]}, # unchanged skipped 𝒅 + (None, 'class', 'skipped 𝒆'): {'skipped': [None]}, # added skipped 𝒆 + (None, 'class', 'test 𝒇'): {'success': [None]}, # added test 𝒇 + }) + + settings = self.create_settings(compare_earlier=True) + publisher = mock.MagicMock(Publisher) + publisher._settings = settings + publisher.get_check_run = mock.Mock(return_value=bcr) + publisher.get_stats_from_check_run = mock.Mock(return_value=bs) + publisher.get_stats_delta = mock.Mock(return_value=bs) + publisher.get_base_commit_sha = mock.Mock(return_value="base commit") + publisher.get_latest_comment = mock.Mock(return_value=None) + publisher.require_comment = mock.Mock(return_value=True) + # the earlier test cases with restricted unicode as they come from the check runs API + publisher.get_test_lists_from_check_run = mock.Mock(return_value=( + # before, these existed: test 𝒂, test 𝒃, skipped 𝒄, skipped 𝒅 + ['class ‑ test \\U0001d482', 'class ‑ test \\U0001d483', 'class ‑ skipped \\U0001d484', 'class ‑ skipped \\U0001d485'], + ['class ‑ skipped \\U0001d484', 'class ‑ skipped \\U0001d485'] + )) + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + Publisher.publish_comment(publisher, 'title', stats, pr, cr, cases) + expected_digest = f'{digest_header}{get_digest_from_stats(stats)}' + + mock_calls = publisher.mock_calls + + self.assertEqual(6, len(mock_calls)) + + (method, args, kwargs) = mock_calls[0] + self.assertEqual('get_base_commit_sha', method) + self.assertEqual((pr, ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[1] + self.assertEqual('get_check_run', method) + self.assertEqual(('base commit', ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[2] + self.assertEqual('get_stats_from_check_run', method) + self.assertEqual((bcr, ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[3] + self.assertEqual('get_test_lists_from_check_run', method) + self.assertEqual((bcr, ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[4] + self.assertEqual('get_latest_comment', method) + + (method, args, kwargs) = mock_calls[5] + self.assertEqual('require_comment', method) + + mock_calls = pr.mock_calls + self.assertEqual(1, len(mock_calls)) + + (method, args, kwargs) = mock_calls[0] + self.assertEqual('create_issue_comment', method) + self.assertEqual(('## title\n' + '\u205f\u20041 files\u2004 ±\u205f\u20040\u2002\u2003' + '2 suites\u2004 +1\u2002\u2003\u2002' + f'3s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "duration of all tests") +2s\n' + '22 tests +19\u2002\u2003' + f'4 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "passed tests") +3\u2002\u2003' + f'5 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "skipped / disabled tests") +3\u2002\u2003\u205f\u2004' + f'6 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "failed tests") +\u205f\u20046\u2002\u2003\u205f\u2004' + f'7 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "test errors") +\u205f\u20047\u2002\n' + f'38 runs\u2006 +35\u2002\u20038 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "passed tests") +7\u2002\u2003' + f'9 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "skipped / disabled tests") +7\u2002\u2003' + f'10 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "failed tests") +10\u2002\u2003' + f'11 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "test errors") +11\u2002\n' + '\n' + 'For more details on these failures and errors, see [this check](html://url).\n' + '\n' + 'Results for commit commit.\u2003± Comparison against base commit commit.\n' + '\n' + '
\n' + ' This pull request removes 2 and adds 2 tests. Note that renamed tests count towards both.\n' + '\n' + '```\n' + 'class ‑ skipped \\U0001d484\n' + 'class ‑ test \\U0001d482\n' + '```\n' + '\n' + '```\n' + 'class ‑ skipped \\U0001d486\n' + 'class ‑ test \\U0001d487\n' + '```\n' + '
\n' + '\n' + '
\n' + ' This pull request removes 1 skipped test and adds 1 skipped test. Note that renamed tests count towards both.\n' + '\n' + '```\n' + 'class ‑ skipped \\U0001d484\n' + '```\n' + '\n' + '```\n' + 'class ‑ skipped \\U0001d486\n' + '```\n' + '
\n' + '\n' + f'{expected_digest}\n', ), args) + self.assertEqual({}, kwargs) + + def test_publish_comment_compare_with_itself(self): + pr = mock.MagicMock() + cr = mock.MagicMock() + stats = self.stats + cases = create_unit_test_case_results(self.cases) + settings = self.create_settings(compare_earlier=True) + publisher = mock.MagicMock(Publisher) + publisher._settings = settings + publisher.get_check_run = mock.Mock(return_value=None) + publisher.get_base_commit_sha = mock.Mock(return_value=stats.commit) + publisher.get_test_lists_from_check_run = mock.Mock(return_value=(None, None)) + publisher.get_latest_comment = mock.Mock(return_value=None) + with mock.patch('publish.publisher.get_long_summary_md', return_value='body'): + Publisher.publish_comment(publisher, 'title', stats, pr, cr, cases) + mock_calls = publisher.mock_calls + + self.assertEqual(1, len(mock_calls)) + + (method, args, kwargs) = mock_calls[0] + self.assertEqual('get_base_commit_sha', method) + self.assertEqual((pr, ), args) + self.assertEqual({}, kwargs) + + mock_calls = pr.mock_calls + self.assertEqual(0, len(mock_calls)) + + def test_publish_comment_compare_with_None(self): + pr = mock.MagicMock(number="1234", create_issue_comment=mock.Mock(return_value=mock.MagicMock())) + cr = mock.MagicMock() + stats = self.stats + cases = create_unit_test_case_results(self.cases) + settings = self.create_settings(compare_earlier=True) + publisher = mock.MagicMock(Publisher) + publisher._settings = settings + publisher.get_check_run = mock.Mock(return_value=None) + publisher.get_base_commit_sha = mock.Mock(return_value=None) + publisher.get_test_lists_from_check_run = mock.Mock(return_value=(None, None)) + publisher.get_latest_comment = mock.Mock(return_value=None) + publisher.require_comment = mock.Mock(return_value=True) + with mock.patch('publish.publisher.get_long_summary_with_digest_md', return_value='body'): + Publisher.publish_comment(publisher, 'title', stats, pr, cr, cases) + mock_calls = publisher.mock_calls + + self.assertEqual(5, len(mock_calls)) + + (method, args, kwargs) = mock_calls[0] + self.assertEqual('get_base_commit_sha', method) + self.assertEqual((pr, ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[1] + self.assertEqual('get_check_run', method) + self.assertEqual((None, ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[2] + self.assertEqual('get_test_lists_from_check_run', method) + self.assertEqual((None, ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[3] + self.assertEqual('get_latest_comment', method) + + (method, args, kwargs) = mock_calls[4] + self.assertEqual('require_comment', method) + + mock_calls = pr.mock_calls + self.assertEqual(1, len(mock_calls)) + + (method, args, kwargs) = mock_calls[0] + self.assertEqual('create_issue_comment', method) + self.assertEqual(('## title\nbody', ), args) + self.assertEqual({}, kwargs) + + def do_test_publish_comment_with_reuse_comment(self, one_exists: bool): + pr = mock.MagicMock(number="1234", create_issue_comment=mock.Mock(return_value=mock.MagicMock())) + cr = mock.MagicMock() + lc = mock.MagicMock(body='latest comment') if one_exists else None + stats = self.stats + cases = create_unit_test_case_results(self.cases) + settings = self.create_settings(comment_mode=comment_mode_always, compare_earlier=False) + publisher = mock.MagicMock(Publisher) + publisher._settings = settings + publisher.get_test_lists_from_check_run = mock.Mock(return_value=(None, None)) + publisher.get_latest_comment = mock.Mock(return_value=lc) + publisher.reuse_comment = mock.Mock(return_value=one_exists) + publisher.require_comment = mock.Mock(return_value=True) + with mock.patch('publish.publisher.get_long_summary_with_digest_md', return_value='body'): + Publisher.publish_comment(publisher, 'title', stats, pr, cr, cases) + mock_calls = publisher.mock_calls + + self.assertEqual(5 if one_exists else 3, len(mock_calls)) + + (method, args, kwargs) = mock_calls[0] + self.assertEqual('get_test_lists_from_check_run', method) + self.assertEqual((None, ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[1] + self.assertEqual('get_latest_comment', method) + self.assertEqual((pr, ), args) + self.assertEqual({}, kwargs) + + if one_exists: + (method, args, kwargs) = mock_calls[2] + self.assertEqual('get_stats_from_summary_md', method) + self.assertEqual(('latest comment', ), args) + self.assertEqual({}, kwargs) + + (method, args, kwargs) = mock_calls[3] + self.assertEqual('require_comment', method) + + (method, args, kwargs) = mock_calls[4] + self.assertEqual('reuse_comment', method) + self.assertEqual((lc, '## title\nbody'), args) + self.assertEqual({}, kwargs) + else: + (method, args, kwargs) = mock_calls[2] + self.assertEqual('require_comment', method) + + mock_calls = pr.mock_calls + self.assertEqual(0 if one_exists else 1, len(mock_calls)) + + if not one_exists: + (method, args, kwargs) = mock_calls[0] + self.assertEqual('create_issue_comment', method) + self.assertEqual(('## title\nbody', ), args) + self.assertEqual({}, kwargs) + + def test_publish_comment_with_reuse_comment_none_existing(self): + self.do_test_publish_comment_with_reuse_comment(one_exists=False) + + def test_publish_comment_with_reuse_comment_one_existing(self): + self.do_test_publish_comment_with_reuse_comment(one_exists=True) + + def do_test_reuse_comment(self, earlier_body: str, expected_body: str): + comment = mock.MagicMock() + publisher = mock.MagicMock(Publisher) + Publisher.reuse_comment(publisher, comment, earlier_body) + + comment.edit.assert_called_once_with(expected_body) + self.assertEqual(0, len(publisher.mock_calls)) + + def test_reuse_comment_existing_not_updated(self): + # we do not expect the body to be extended by the recycle message + self.do_test_reuse_comment(earlier_body='a new comment', + expected_body='a new comment\n:recycle: This comment has been updated with latest results.') + + def test_reuse_comment_existing_updated(self): + # we do not expect the body to be extended by the recycle message + self.do_test_reuse_comment(earlier_body='comment already updated\n:recycle: Has been updated', + expected_body='comment already updated\n:recycle: Has been updated') + + def test_get_pull_from_event(self): + settings = self.create_settings() + gh, gha, req, repo, commit = self.create_mocks() + pr = self.create_github_pr(settings.repo, head_commit_sha=settings.commit) + repo.get_pull = mock.Mock(return_value=pr) + + publisher = Publisher(settings, gh, gha) + + actual = publisher.get_pull_from_event() + self.assertIsNone(actual) + repo.get_pull.assert_not_called() + + # test with pull request in event file + settings = self.create_settings(event={'pull_request': {'number': 1234, 'base': {'repo': {'full_name': 'owner/repo'}}}}) + publisher = Publisher(settings, gh, gha) + + actual = publisher.get_pull_from_event() + self.assertIs(actual, pr) + repo.get_pull.assert_called_once_with(1234) + repo.get_pull.reset_mock() + + # test with none in pull request + for event in [ + {}, + {'pull_request': None}, + {'pull_request': {'number': 1234, 'base': None}}, + {'pull_request': {'number': 1234, 'base': {'repo': None}}}, + {'pull_request': {'number': 1234, 'base': {'repo': {}}}}, + ]: + settings = self.create_settings(event=event) + publisher = Publisher(settings, gh, gha) + + actual = publisher.get_pull_from_event() + self.assertIsNone(actual) + repo.get_pull.assert_not_called() + + def do_test_get_pulls(self, + settings: Settings, + pull_requests: mock.Mock, + event_pull_request: Optional[mock.Mock], + expected: List[mock.Mock]) -> mock.Mock: + gh, gha, req, repo, commit = self.create_mocks() + + gh.search_issues = mock.Mock(return_value=pull_requests) + commit.get_pulls = mock.Mock(return_value=pull_requests) + if event_pull_request is not None: + repo.get_pull = mock.Mock(return_value=event_pull_request) + + publisher = Publisher(settings, gh, gha) + + actual = publisher.get_pulls(settings.commit) + self.assertEqual(expected, actual) + if settings.search_pull_requests: + gh.search_issues.assert_called_once_with('type:pr repo:"{}" {}'.format(settings.repo, settings.commit)) + commit.get_pulls.assert_not_called() + else: + gh.search_issues.assert_not_called() + if event_pull_request is not None and \ + settings.repo == get_json_path(settings.event, 'pull_request.base.repo.full_name'): + repo.get_pull.assert_called_once_with(event_pull_request.number) + commit.get_pulls.assert_not_called() + else: + repo.get_pull.assert_not_called() + commit.get_pulls.assert_called_once_with() + return gha + + def test_get_pulls_without_event(self): + settings = self.create_settings() + pr = self.create_github_pr(settings.repo, head_commit_sha=settings.commit) + pull_requests = self.create_github_collection([pr]) + gha = self.do_test_get_pulls(settings, pull_requests, None, [pr]) + gha.warning.assert_not_called() + gha.error.assert_not_called() + + def test_get_pulls_with_other_event_pr(self): + settings = self.create_settings(event={'pull_request': {'number': 1234, 'base': {'repo': {'full_name': 'owner/repo'}}}}) + event_pr = self.create_github_pr(settings.repo, head_commit_sha=settings.commit, number=1234) + pr = self.create_github_pr(settings.repo, head_commit_sha=settings.commit, number=5678) + pull_requests = self.create_github_collection([pr]) + gha = self.do_test_get_pulls(settings, pull_requests, event_pr, [event_pr]) + gha.warning.assert_not_called() + gha.error.assert_not_called() + + def test_get_pulls_with_other_repo_event_pr(self): + settings = self.create_settings(event={'pull_request': {'number': 1234, 'base': {'repo': {'full_name': 'fork/repo'}}}}) + event_pr = self.create_github_pr(settings.repo, head_commit_sha=settings.commit, number=1234) + pr = self.create_github_pr(settings.repo, head_commit_sha=settings.commit, number=5678) + pull_requests = self.create_github_collection([pr]) + gha = self.do_test_get_pulls(settings, pull_requests, event_pr, [pr]) + gha.warning.assert_not_called() + gha.error.assert_not_called() + + def test_get_pulls_only_with_event_pr(self): + settings = self.create_settings(event={'pull_request': {'number': 1234, 'base': {'repo': {'full_name': 'owner/repo'}}}}) + pr = self.create_github_pr(settings.repo, head_commit_sha=settings.commit, number=1234) + pull_requests = self.create_github_collection([]) + gha = self.do_test_get_pulls(settings, pull_requests, pr, [pr]) + gha.warning.assert_not_called() + gha.error.assert_not_called() + + def test_get_pulls_no_pulls(self): + settings = self.create_settings() + pull_requests = self.create_github_collection([]) + gha = self.do_test_get_pulls(settings, pull_requests, None, []) + gha.warning.assert_not_called() + gha.error.assert_not_called() + + def test_get_pulls_closed_pull(self): + settings = self.create_settings(event={'pull_request': {'number': 1234, 'base': {'repo': {'full_name': 'owner/repo'}}}}) + pr = self.create_github_pr(settings.repo, state='closed', head_commit_sha=settings.commit, number=1234) + pull_requests = self.create_github_collection([]) + gha = self.do_test_get_pulls(settings, pull_requests, pr, []) + gha.warning.assert_not_called() + gha.error.assert_not_called() + + def test_get_pulls_head_commit(self): + settings = self.create_settings(event={'pull_request': {'number': 1234, 'base': {'repo': {'full_name': 'owner/repo'}}}}) + pr = self.create_github_pr(settings.repo, state='open', head_commit_sha=settings.commit, merge_commit_sha='merge', number=1234) + pull_requests = self.create_github_collection([]) + gha = self.do_test_get_pulls(settings, pull_requests, pr, [pr]) + gha.warning.assert_not_called() + gha.error.assert_not_called() + + def test_get_pulls_merge_commit(self): + settings = self.create_settings(event={'pull_request': {'number': 1234, 'base': {'repo': {'full_name': 'owner/repo'}}}}) + pr1 = self.create_github_pr(settings.repo, state='open', head_commit_sha='one head commit', merge_commit_sha=settings.commit, number=1234) + pr2 = self.create_github_pr(settings.repo, state='open', head_commit_sha='two head commit', merge_commit_sha='other merge commit', number=1234) + pull_requests = self.create_github_collection([]) + + gha = self.do_test_get_pulls(settings, pull_requests, pr1, [pr1]) + gha.warning.assert_not_called() + gha.error.assert_not_called() + + gha = self.do_test_get_pulls(settings, pull_requests, pr2, []) + gha.warning.assert_not_called() + gha.error.assert_not_called() + + def test_get_pulls_forked_repo(self): + settings = self.create_settings() + fork = self.create_github_pr('other/fork', head_commit_sha=settings.commit) + pull_requests = self.create_github_collection([]) + self.do_test_get_pulls(settings, pull_requests, fork, []) + + def test_get_pulls_via_search(self): + settings = self.create_settings(search_pull_requests=True) + pr = self.create_github_pr(settings.repo, head_commit_sha=settings.commit) + search_issues = self.create_github_collection([pr]) + gha = self.do_test_get_pulls(settings, search_issues, None, [pr]) + gha.warning.assert_not_called() + gha.error.assert_not_called() + + def do_test_get_check_run_from_list(self, runs: List[github.CheckRun.CheckRun], expected: Optional[github.CheckRun.CheckRun]): + settings = self.create_settings() + gh, gha, req, repo, commit = self.create_mocks() + publisher = Publisher(settings, gh, gha) + + actual = publisher.get_check_run_from_list(runs) + self.assertEqual(expected, actual) + + def test_get_check_run_from_list_empty(self): + self.do_test_get_check_run_from_list([], None) + + def test_get_check_run_from_list_many(self): + runs = [ + self.mock_check_run(name='Other title', status='completed', started_at=datetime(2021, 3, 19, 12, 2, 4, tzinfo=timezone.utc), summary='summary\n[test-results]:data:application/gzip;base64,digest'), + self.mock_check_run(name='Check Name', status='other status', started_at=datetime(2021, 3, 19, 12, 2, 4, tzinfo=timezone.utc), summary='summary\n[test-results]:data:application/gzip;base64,digest'), + self.mock_check_run(name='Check Name', status='completed', started_at=datetime(2021, 3, 19, 12, 0, 0, tzinfo=timezone.utc), summary='summary\n[test-results]:data:application/gzip;base64,digest'), + self.mock_check_run(name='Check Name', status='completed', started_at=datetime(2021, 3, 19, 12, 2, 4, tzinfo=timezone.utc), summary='summary\n[test-results]:data:application/gzip;base64,digest'), + self.mock_check_run(name='Check Name', status='completed', started_at=datetime(2021, 3, 19, 12, 2, 4, tzinfo=timezone.utc), summary='no digest'), + self.mock_check_run(name='Check Name', status='completed', started_at=datetime(2021, 3, 19, 12, 2, 4, tzinfo=timezone.utc), summary=None) + ] + expected = runs[3] + name = runs[0].name + self.do_test_get_check_run_from_list(runs, expected) + + def test_get_stats_from_summary_md(self): + results = create_unit_test_run_results() + summary = get_long_summary_with_digest_md(results, results, 'http://url') + actual = Publisher.get_stats_from_summary_md(summary) + self.assertEqual(results, actual) + + def test_get_stats_from_summary_md_recycled(self): + summary = f'body\n\n{digest_header}H4sIAGpapmIC/1WMyw7CIBQFf6Vh7QK4FMGfMeQWEmJbDI9V479LI6DuzsxJ5iDOrzaR2wSXiaTi84ClRJN92CvSivXI5yX7vqeCWIX4iod/VsGGcMavf8LGGGILxrKfPaba7j3Ghvj0ROeWg86/NQzb5nMFIhCBgnbUzQAIVik+c6W1YU5KVPoqNF04teT1BvQuAoL9AAAA\n:recycle: This comment has been updated with latest results.' + actual = Publisher.get_stats_from_summary_md(summary) + self.assertIsNotNone(actual) + self.assertEqual(6, actual.tests) + + @staticmethod + def mock_check_run(name: str, status: str, started_at: datetime, summary: str) -> mock.Mock: + run = mock.MagicMock(status=status, started_at=started_at, output=mock.MagicMock(summary=summary)) + run.name = name + return run + + def do_test_get_stats_from_commit(self, + settings: Settings, + commit_sha: Optional[str], + commit: Optional[mock.Mock], + digest: Optional[str], + check_names: Optional[List[str]], + expected: Optional[Union[UnitTestRunResults, mock.Mock]]): + gh, gha, req, repo, commit = self.create_mocks(commit=commit, digest=digest, check_names=check_names) + publisher = Publisher(settings, gh, gha) + + actual = publisher.get_stats_from_commit(commit_sha) + actual_dict = None + if actual is not None: + actual_dict = actual.to_dict() + del actual_dict['errors'] + + expected_dict = None + if expected is not None: + expected_dict = expected.to_dict() + del expected_dict['errors'] + + self.assertEqual(expected_dict, actual_dict) + if commit_sha is not None and commit_sha != '0000000000000000000000000000000000000000': + repo.get_commit.assert_called_once_with(commit_sha) + if commit is not None: + commit.get_check_runs.assert_called_once_with() + + if expected is None and \ + commit_sha is not None and \ + commit_sha != '0000000000000000000000000000000000000000': + gha.error.assert_called_once_with('Could not find commit {}'.format(commit_sha)) + + def test_get_stats_from_commit(self): + settings = self.create_settings() + self.do_test_get_stats_from_commit( + settings, 'base commit', mock.Mock(), self.base_digest, [settings.check_name], self.get_stats('base') + ) + + def test_get_stats_from_commit_with_no_commit(self): + settings = self.create_settings() + self.do_test_get_stats_from_commit(settings, 'base commit', None, None, None, None) + + def test_get_stats_from_commit_with_none_commit_sha(self): + settings = self.create_settings() + self.do_test_get_stats_from_commit(settings, None, mock.Mock(), self.base_digest, [settings.check_name], None) + + def test_get_stats_from_commit_with_zeros_commit_sha(self): + settings = self.create_settings() + self.do_test_get_stats_from_commit( + settings, '0000000000000000000000000000000000000000', mock.Mock(), self.base_digest, [settings.check_name], None + ) + + def test_get_stats_from_commit_with_multiple_check_runs(self): + settings = self.create_settings() + self.do_test_get_stats_from_commit( + settings, 'base commit', mock.Mock(), self.base_digest, + [settings.check_name, 'other check', 'more checks'], + self.get_stats('base') + ) + + def test_get_stats_from_commit_not_exists(self): + def exception(commit: str): + raise GithubException(422, {'message': f"No commit found for SHA: {commit}"}, headers=None) + + settings = self.create_settings() + gh, gha, req, repo, commit = self.create_mocks(digest=self.base_digest, check_names=[settings.check_name]) + repo.get_commit = mock.Mock(side_effect=exception) + publisher = Publisher(settings, gh, gha) + + actual = publisher.get_stats_from_commit('commitsha') + self.assertEqual(None, actual) + gha.warning.assert_called_once_with("{'message': 'No commit found for SHA: commitsha'}") + gha.error.assert_called_once_with("Could not find commit commitsha") + + all_tests_annotation = mock.Mock() + all_tests_annotation.title = '1 test found' + all_tests_annotation.message = 'There is 1 test, see "Raw output" for the name of the test' + all_tests_annotation.raw_details = 'class ‑ test1' + + skipped_tests_annotation = mock.Mock() + skipped_tests_annotation.title = '1 skipped test found' + skipped_tests_annotation.message = 'There is 1 skipped test, see "Raw output" for the name of the skipped test' + skipped_tests_annotation.raw_details = 'class ‑ test4' + + other_annotation = mock.Mock() + other_annotation.title = None + other_annotation.message = 'message one' + other_annotation.raw_details = None + + all_annotations = [all_tests_annotation, skipped_tests_annotation, other_annotation] + + def test_get_test_lists_from_none_check_run(self): + self.assertEqual((None, None), Publisher.get_test_lists_from_check_run(None)) + + def test_get_test_lists_from_check_run_single_test(self): + check_run = mock.Mock() + check_run.get_annotations = mock.Mock(return_value=self.all_annotations) + self.assertEqual((['class ‑ test1'], ['class ‑ test4']), Publisher.get_test_lists_from_check_run(check_run)) + + def test_get_test_lists_from_check_run_more_tests(self): + annotation1 = mock.Mock() + annotation1.title = None + annotation1.message = 'message one' + annotation1.raw_details = None + + annotation2 = mock.Mock() + annotation2.title = '3 tests found' + annotation2.message = 'There are 3 tests, see "Raw output" for the full list of tests.' + annotation2.raw_details = 'test one\ntest two\ntest three' + + annotation3 = mock.Mock() + annotation3.title = '3 skipped tests found' + annotation3.message = 'There are 3 skipped tests, see "Raw output" for the full list of skipped tests.' + annotation3.raw_details = 'skip one\nskip two\nskip three' + + annotations = [annotation1, annotation2, annotation3] + check_run = mock.Mock() + check_run.get_annotations = mock.Mock(return_value=annotations) + self.assertEqual( + (['test one', 'test two', 'test three'], ['skip one', 'skip two', 'skip three']), + Publisher.get_test_lists_from_check_run(check_run) + ) + + def test_get_test_lists_from_check_run_chunked_tests(self): + annotation1 = mock.Mock() + annotation1.title = None + annotation1.message = 'message one' + annotation1.raw_details = None + + annotation2 = mock.Mock() + annotation2.title = '4 tests found (test 1 to 2)' + annotation2.message = 'There are 4 tests, see "Raw output" for the list of tests 1 to 2.' + annotation2.raw_details = 'test one\ntest two' + + annotation3 = mock.Mock() + annotation3.title = '4 tests found (test 3 to 4)' + annotation3.message = 'There are 4 tests, see "Raw output" for the list of tests 3 to 4.' + annotation3.raw_details = 'test three\ntest four' + + annotation4 = mock.Mock() + annotation4.title = '4 skipped tests found (test 1 to 2)' + annotation4.message = 'There are 4 skipped tests, see "Raw output" for the list of skipped tests 1 to 2.' + annotation4.raw_details = 'skip one\nskip two' + + annotation5 = mock.Mock() + annotation5.title = '4 skipped tests found (test 3 to 4)' + annotation5.message = 'There are 4 skipped tests, see "Raw output" for the list of skipped tests 3 to 4.' + annotation5.raw_details = 'skip three\nskip four' + + annotations = [annotation1, annotation2, annotation3, annotation4, annotation5] + check_run = mock.Mock() + check_run.get_annotations = mock.Mock(return_value=annotations) + self.assertEqual( + (['test one', 'test two', 'test three', 'test four'], ['skip one', 'skip two', 'skip three', 'skip four']), + Publisher.get_test_lists_from_check_run(check_run) + ) + + def test_get_test_lists_from_check_run_none_raw_details(self): + annotation1 = mock.Mock() + annotation1.title = '1 test found' + annotation1.message = 'There is 1 test, see "Raw output" for the name of the test' + annotation1.raw_details = None + + annotation2 = mock.Mock() + annotation2.title = '1 skipped test found' + annotation2.message = 'There is 1 skipped test, see "Raw output" for the name of the skipped test' + annotation2.raw_details = None + + annotations = [annotation1, annotation2] + check_run = mock.Mock() + check_run.get_annotations = mock.Mock(return_value=annotations) + self.assertEqual((None, None), Publisher.get_test_lists_from_check_run(check_run)) + + def test_get_test_lists_from_generated_annotations(self): + cases = create_unit_test_case_results({ + (None, 'class', 'test abcd'): {'success': [None]}, + (None, 'class', 'test efgh'): {'skipped': [None]}, + (None, 'class', 'test ijkl'): {'skipped': [None]}, + }) + + settings = self.create_settings(check_run_annotation=[all_tests_list, skipped_tests_list]) + gh = mock.MagicMock() + publisher = Publisher(settings, gh, None) + annotations = publisher.get_test_list_annotations(cases, max_chunk_size=42) + + check_run = mock.Mock() + check_run.get_annotations = mock.Mock(return_value=annotations) + self.assertEqual( + (['class ‑ test abcd', 'class ‑ test efgh', 'class ‑ test ijkl'], ['class ‑ test efgh', 'class ‑ test ijkl']), + Publisher.get_test_lists_from_check_run(check_run) + ) + + def test_publish_check_without_annotations(self): + self.do_test_publish_check_without_base_stats([], [none_annotations]) + + def test_publish_check_with_default_annotations(self): + self.do_test_publish_check_without_base_stats([], default_annotations) + + def test_publish_check_with_all_tests_annotations(self): + self.do_test_publish_check_without_base_stats([], [all_tests_list]) + + def test_publish_check_with_skipped_tests_annotations(self): + self.do_test_publish_check_without_base_stats([], [skipped_tests_list]) + + def test_publish_check_without_base_stats(self): + self.do_test_publish_check_without_base_stats([]) + + def test_publish_check_without_base_stats_with_errors(self): + self.do_test_publish_check_without_base_stats(errors) + + def do_test_publish_check_without_base_stats(self, errors: List[ParseError], annotations: List[str] = default_annotations): + settings = self.create_settings(event={}, check_run_annotation=annotations) + gh, gha, req, repo, commit = self.create_mocks(commit=mock.Mock(), digest=None, check_names=[]) + publisher = Publisher(settings, gh, gha) + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + check_run, before_check_run = publisher.publish_check(self.stats.with_errors(errors), self.cases, 'conclusion') + + repo.get_commit.assert_not_called() + error_annotations = [get_error_annotation(error).to_dict() for error in errors] + annotations = error_annotations + [ + {'path': 'test file', 'start_line': 0, 'end_line': 0, 'annotation_level': 'warning', 'message': 'result file [took 1s]', 'title': '1 out of 2 runs failed: test (class)', 'raw_details': 'message\ncontent\nstdout\nstderr'}, + {'path': 'test file', 'start_line': 0, 'end_line': 0, 'annotation_level': 'failure', 'message': 'result file [took 1s]', 'title': '1 out of 2 runs with error: test2 (class)', 'raw_details': 'error message\nerror content\nerror stdout\nerror stderr'} + ] + ( + [ + {'path': '.github', 'start_line': 0, 'end_line': 0, 'annotation_level': 'notice', 'message': 'There is 1 skipped test, see "Raw output" for the name of the skipped test.', 'title': '1 skipped test found', 'raw_details': 'class ‑ test3'} + ] if skipped_tests_list in annotations else [] + ) + ( + [ + {'path': '.github', 'start_line': 0, 'end_line': 0, 'annotation_level': 'notice', 'message': 'There are 3 tests, see "Raw output" for the full list of tests.', 'title': '3 tests found', 'raw_details': 'class ‑ test\nclass ‑ test2\nclass ‑ test3'} + ] if all_tests_list in annotations else [] + ) + + create_check_run_kwargs = dict( + name=settings.check_name, + head_sha=settings.commit, + status='completed', + conclusion='conclusion', + output={ + 'title': '{}7 errors, 6 fail, 5 skipped, 4 pass in 3s' + .format('{} parse errors, '.format(len(errors)) if len(errors) > 0 else ''), + 'summary': f'\u205f\u20041 files\u2004\u2003{{errors}}2 suites\u2004\u2003\u20023s {duration_label_md}\n' + f'22 {all_tests_label_md}\u20034 {passed_tests_label_md}\u20035 {skipped_tests_label_md}\u2003\u205f\u20046 {failed_tests_label_md}\u2003\u205f\u20047 {test_errors_label_md}\n' + f'38 runs\u2006\u20038 {passed_tests_label_md}\u20039 {skipped_tests_label_md}\u200310 {failed_tests_label_md}\u200311 {test_errors_label_md}\n' + '\n' + 'Results for commit commit.\n' + '\n' + '[test-results]:data:application/gzip;base64,' + 'H4sIAAAAAAAC/0WOSQqEMBBFryJZu+g4tK2XkRAVCoc0lWQl3t' + '3vULqr9z48alUDTb1XTaLTRPlI4YQM0EU2gdwCzIEYwjllAq2P' + '1sIUrxjpD1E+YjA0QXwf0TM7hqlgOC5HMP/dt/RevnK18F3THx' + 'FS08fz1s0zBZBc2w5zHdX73QAAAA==\n'.format(errors='{} errors\u2004\u2003'.format(len(errors)) if len(errors) > 0 else ''), + 'annotations': annotations + } + ) + repo.create_check_run.assert_called_once_with(**create_check_run_kwargs) + + # this checks that publisher.publish_check returned + # the result of the last call to repo.create_check_run + self.assertIsInstance(check_run, mock.Mock) + self.assertTrue(hasattr(check_run, 'create_check_run_kwargs')) + self.assertEqual(create_check_run_kwargs, check_run.create_check_run_kwargs) + self.assertIsNone(before_check_run) + + # check the json output has been provided + title_errors = '{} parse errors, '.format(len(errors)) if len(errors) > 0 else '' + summary_errors = '{} errors\u2004\u2003'.format(len(errors)) if len(errors) > 0 else '' + gha.add_to_output.assert_called_once_with( + 'json', + '{' + f'"title": "{title_errors}7 errors, 6 fail, 5 skipped, 4 pass in 3s", ' + f'"summary": "  1 files  {summary_errors}2 suites   3s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"duration of all tests\\")\\n22 tests 4 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"passed tests\\") 5 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"skipped / disabled tests\\")   6 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"failed tests\\")   7 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"test errors\\")\\n38 runs  8 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"passed tests\\") 9 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"skipped / disabled tests\\") 10 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"failed tests\\") 11 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"test errors\\")\\n\\nResults for commit commit.\\n", ' + '"conclusion": "conclusion", ' + '"stats": {"files": 1, ' + f'"errors": {len(errors)}, ' + '"suites": 2, "duration": 3, "tests": 22, "tests_succ": 4, "tests_skip": 5, "tests_fail": 6, "tests_error": 7, "runs": 38, "runs_succ": 8, "runs_skip": 9, "runs_fail": 10, "runs_error": 11, "commit": "commit"}, ' + f'"annotations": {len(annotations)}, ' + f'"check_url": "{check_run.html_url}", ' + '"formatted": {' + '"stats": {"files": "1", ' + f'"errors": "{len(errors)}", ' + '"suites": "2", "duration": "3", "tests": "22", "tests_succ": "4", "tests_skip": "5", "tests_fail": "6", "tests_error": "7", "runs": "38", "runs_succ": "8", "runs_skip": "9", "runs_fail": "10", "runs_error": "11", "commit": "commit"}' + '}' + '}' + ) + + def test_publish_check_with_base_stats(self): + self.do_test_publish_check_with_base_stats([]) + + def test_publish_check_with_base_stats_with_errors(self): + self.do_test_publish_check_with_base_stats(errors) + + def do_test_publish_check_with_base_stats(self, errors: List[ParseError]): + earlier_commit = 'past' + settings = self.create_settings(event={'before': earlier_commit}) + gh, gha, req, repo, commit = self.create_mocks(commit=mock.Mock(), digest=self.past_digest, check_names=[settings.check_name]) + publisher = Publisher(settings, gh, gha) + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + check_run, before_check_run = publisher.publish_check(self.stats.with_errors(errors), self.cases, 'conclusion') + + repo.get_commit.assert_called_once_with(earlier_commit) + error_annotations = [get_error_annotation(error).to_dict() for error in errors] + create_check_run_kwargs = dict( + name=settings.check_name, + head_sha=settings.commit, + status='completed', + conclusion='conclusion', + output={ + 'title': '{}7 errors, 6 fail, 5 skipped, 4 pass in 3s' + .format('{} parse errors, '.format(len(errors)) if len(errors) > 0 else ''), + 'summary': f'\u205f\u20041 files\u2004 ±0\u2002\u2003{{errors}}2 suites\u2004 ±0\u2002\u2003\u20023s {duration_label_md} ±0s\n' + f'22 {all_tests_label_md} +1\u2002\u20034 {passed_tests_label_md} \u2006-\u200a\u205f\u20048\u2002\u20035 {skipped_tests_label_md} +1\u2002\u2003\u205f\u20046 {failed_tests_label_md} +4\u2002\u2003\u205f\u20047 {test_errors_label_md} +\u205f\u20044\u2002\n' + f'38 runs\u2006 +1\u2002\u20038 {passed_tests_label_md} \u2006-\u200a17\u2002\u20039 {skipped_tests_label_md} +2\u2002\u200310 {failed_tests_label_md} +6\u2002\u200311 {test_errors_label_md} +10\u2002\n' + '\n' + 'Results for commit commit.\u2003± Comparison against earlier commit past.\n' + '\n' + '[test-results]:data:application/gzip;base64,' + 'H4sIAAAAAAAC/0WOSQqEMBBFryJZu+g4tK2XkRAVCoc0lWQl3t' + '3vULqr9z48alUDTb1XTaLTRPlI4YQM0EU2gdwCzIEYwjllAq2P' + '1sIUrxjpD1E+YjA0QXwf0TM7hqlgOC5HMP/dt/RevnK18F3THx' + 'FS08fz1s0zBZBc2w5zHdX73QAAAA==\n'.format(errors='{} errors\u2004\u2003'.format(len(errors)) if len(errors) > 0 else ''), + 'annotations': error_annotations + [ + {'path': 'test file', 'start_line': 0, 'end_line': 0, 'annotation_level': 'warning', 'message': 'result file [took 1s]', 'title': '1 out of 2 runs failed: test (class)', 'raw_details': 'message\ncontent\nstdout\nstderr'}, + {'path': 'test file', 'start_line': 0, 'end_line': 0, 'annotation_level': 'failure', 'message': 'result file [took 1s]', 'title': '1 out of 2 runs with error: test2 (class)', 'raw_details': 'error message\nerror content\nerror stdout\nerror stderr'}, + {'path': '.github', 'start_line': 0, 'end_line': 0, 'annotation_level': 'notice', 'message': 'There is 1 skipped test, see "Raw output" for the name of the skipped test.', 'title': '1 skipped test found', 'raw_details': 'class ‑ test3'}, + {'path': '.github', 'start_line': 0, 'end_line': 0, 'annotation_level': 'notice', 'message': 'There are 3 tests, see "Raw output" for the full list of tests.', 'title': '3 tests found', 'raw_details': 'class ‑ test\nclass ‑ test2\nclass ‑ test3'} + ] + } + ) + repo.create_check_run.assert_called_once_with(**create_check_run_kwargs) + + # this checks that publisher.publish_check returned + # the result of the last call to repo.create_check_run + self.assertIsInstance(check_run, mock.Mock) + self.assertTrue(hasattr(check_run, 'create_check_run_kwargs')) + self.assertEqual(create_check_run_kwargs, check_run.create_check_run_kwargs) + self.assertIsInstance(before_check_run, mock.Mock) + + # check the json output has been provided + title_errors = '{} parse errors, '.format(len(errors)) if len(errors) > 0 else '' + summary_errors = '{} errors\u2004\u2003'.format(len(errors)) if len(errors) > 0 else '' + gha.add_to_output.assert_called_once_with( + 'json', + '{' + f'"title": "{title_errors}7 errors, 6 fail, 5 skipped, 4 pass in 3s", ' + f'"summary": "  1 files  ±0  {summary_errors}2 suites  ±0   3s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"duration of all tests\\") ±0s\\n22 tests +1  4 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"passed tests\\")  -   8  5 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"skipped / disabled tests\\") +1    6 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"failed tests\\") +4    7 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"test errors\\") +  4 \\n38 runs  +1  8 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"passed tests\\")  - 17  9 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"skipped / disabled tests\\") +2  10 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"failed tests\\") +6  11 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"test errors\\") +10 \\n\\nResults for commit commit. ± Comparison against earlier commit past.\\n", ' + '"conclusion": "conclusion", ' + '"stats": {"files": 1, ' + f'"errors": {len(errors)}, ' + '"suites": 2, "duration": 3, "tests": 22, "tests_succ": 4, "tests_skip": 5, "tests_fail": 6, "tests_error": 7, "runs": 38, "runs_succ": 8, "runs_skip": 9, "runs_fail": 10, "runs_error": 11, "commit": "commit"}, ' + '"stats_with_delta": {"files": {"number": 1, "delta": 0}, ' + f'"errors": {len(errors)}, ' + '"suites": {"number": 2, "delta": 0}, "duration": {"duration": 3, "delta": 0}, "tests": {"number": 22, "delta": 1}, "tests_succ": {"number": 4, "delta": -8}, "tests_skip": {"number": 5, "delta": 1}, "tests_fail": {"number": 6, "delta": 4}, "tests_error": {"number": 7, "delta": 4}, "runs": {"number": 38, "delta": 1}, "runs_succ": {"number": 8, "delta": -17}, "runs_skip": {"number": 9, "delta": 2}, "runs_fail": {"number": 10, "delta": 6}, "runs_error": {"number": 11, "delta": 10}, "commit": "commit", "reference_type": "earlier", "reference_commit": "past"}, ' + f'"annotations": {4 + len(errors)}, ' + f'"check_url": "{check_run.html_url}", ' + '"formatted": {' + '"stats": {"files": "1", ' + f'"errors": "{len(errors)}", ' + '"suites": "2", "duration": "3", "tests": "22", "tests_succ": "4", "tests_skip": "5", "tests_fail": "6", "tests_error": "7", "runs": "38", "runs_succ": "8", "runs_skip": "9", "runs_fail": "10", "runs_error": "11", "commit": "commit"}, ' + '"stats_with_delta": {"files": {"number": "1", "delta": "0"}, ' + f'"errors": "{len(errors)}", ' + '"suites": {"number": "2", "delta": "0"}, "duration": {"duration": "3", "delta": "0"}, "tests": {"number": "22", "delta": "1"}, "tests_succ": {"number": "4", "delta": "-8"}, "tests_skip": {"number": "5", "delta": "1"}, "tests_fail": {"number": "6", "delta": "4"}, "tests_error": {"number": "7", "delta": "4"}, "runs": {"number": "38", "delta": "1"}, "runs_succ": {"number": "8", "delta": "-17"}, "runs_skip": {"number": "9", "delta": "2"}, "runs_fail": {"number": "10", "delta": "6"}, "runs_error": {"number": "11", "delta": "10"}, "commit": "commit", "reference_type": "earlier", "reference_commit": "past"}' + '}' + '}' + ) + + def test_publish_check_without_compare(self): + earlier_commit = 'past' + settings = self.create_settings(event={'before': earlier_commit}, compare_earlier=False) + gh, gha, req, repo, commit = self.create_mocks(commit=mock.Mock(), digest=self.past_digest, check_names=[settings.check_name]) + publisher = Publisher(settings, gh, gha) + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + check_run, before_check_run = publisher.publish_check(self.stats, self.cases, 'conclusion') + + repo.get_commit.assert_not_called() + create_check_run_kwargs = dict( + name=settings.check_name, + head_sha=settings.commit, + status='completed', + conclusion='conclusion', + output={ + 'title': '7 errors, 6 fail, 5 skipped, 4 pass in 3s', + 'summary': f'\u205f\u20041 files\u2004\u20032 suites\u2004\u2003\u20023s {duration_label_md}\n' + f'22 {all_tests_label_md}\u20034 {passed_tests_label_md}\u20035 {skipped_tests_label_md}\u2003\u205f\u20046 {failed_tests_label_md}\u2003\u205f\u20047 {test_errors_label_md}\n' + f'38 runs\u2006\u20038 {passed_tests_label_md}\u20039 {skipped_tests_label_md}\u200310 {failed_tests_label_md}\u200311 {test_errors_label_md}\n' + '\n' + 'Results for commit commit.\n' + '\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/0WOSQqEMBBFryJ' + 'Zu+g4tK2XkRAVCoc0lWQl3t3vULqr9z48alUDTb1XTaLTRPlI4YQM0EU2gdwCzIEYwjllAq' + '2P1sIUrxjpD1E+YjA0QXwf0TM7hqlgOC5HMP/dt/RevnK18F3THxFS08fz1s0zBZBc2w5zH' + 'dX73QAAAA==\n', + 'annotations': [ + {'path': 'test file', 'start_line': 0, 'end_line': 0, 'annotation_level': 'warning', 'message': 'result file [took 1s]', 'title': '1 out of 2 runs failed: test (class)', 'raw_details': 'message\ncontent\nstdout\nstderr'}, + {'path': 'test file', 'start_line': 0, 'end_line': 0, 'annotation_level': 'failure', 'message': 'result file [took 1s]', 'title': '1 out of 2 runs with error: test2 (class)', 'raw_details': 'error message\nerror content\nerror stdout\nerror stderr'}, + {'path': '.github', 'start_line': 0, 'end_line': 0, 'annotation_level': 'notice', 'message': 'There is 1 skipped test, see "Raw output" for the name of the skipped test.', 'title': '1 skipped test found', 'raw_details': 'class ‑ test3'}, + {'path': '.github', 'start_line': 0, 'end_line': 0, 'annotation_level': 'notice', 'message': 'There are 3 tests, see "Raw output" for the full list of tests.', 'title': '3 tests found', 'raw_details': 'class ‑ test\nclass ‑ test2\nclass ‑ test3'} + ] + } + ) + repo.create_check_run.assert_called_once_with(**create_check_run_kwargs) + + # this checks that publisher.publish_check returned + # the result of the last call to repo.create_check_run + self.assertIsInstance(check_run, mock.Mock) + self.assertTrue(hasattr(check_run, 'create_check_run_kwargs')) + self.assertEqual(create_check_run_kwargs, check_run.create_check_run_kwargs) + self.assertIsNone(before_check_run) + + def test_publish_check_with_multiple_annotation_pages(self): + earlier_commit = 'past' + settings = self.create_settings(event={'before': earlier_commit}) + gh, gha, req, repo, commit = self.create_mocks(commit=mock.Mock(), digest=self.past_digest, check_names=[settings.check_name]) + publisher = Publisher(settings, gh, gha) + + # generate a lot cases + cases = create_unit_test_case_results({ + (None, 'class', f'test{i}'): dict( + failure=[ + UnitTestCase( + result_file='result file', test_file='test file', line=i, + class_name='class', test_name=f'test{i}', + result='failure', message=f'message{i}', content=f'content{i}', + stdout=f'stdout{i}', stderr=f'stderr{i}', + time=1.234 + i / 1000 + ) + ] + ) + for i in range(1, 151) + }) + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + check_run, before_check_run = publisher.publish_check(self.stats, cases, 'conclusion') + + repo.get_commit.assert_called_once_with(earlier_commit) + # we expect a single call to create_check_run + create_check_run_kwargs = dict( + name=settings.check_name, + head_sha=settings.commit, + status='completed', + conclusion='conclusion', + output={ + 'title': '7 errors, 6 fail, 5 skipped, 4 pass in 3s', + 'summary': f'\u205f\u20041 files\u2004 ±0\u2002\u20032 suites\u2004 ±0\u2002\u2003\u20023s {duration_label_md} ±0s\n' + f'22 {all_tests_label_md} +1\u2002\u20034 {passed_tests_label_md} \u2006-\u200a\u205f\u20048\u2002\u20035 {skipped_tests_label_md} +1\u2002\u2003\u205f\u20046 {failed_tests_label_md} +4\u2002\u2003\u205f\u20047 {test_errors_label_md} +\u205f\u20044\u2002\n' + f'38 runs\u2006 +1\u2002\u20038 {passed_tests_label_md} \u2006-\u200a17\u2002\u20039 {skipped_tests_label_md} +2\u2002\u200310 {failed_tests_label_md} +6\u2002\u200311 {test_errors_label_md} +10\u2002\n' + '\n' + 'Results for commit commit.\u2003± Comparison against earlier commit past.\n' + '\n' + '[test-results]:data:application/gzip;base64,' + 'H4sIAAAAAAAC/0WOSQqEMBBFryJZu+g4tK2XkRAVCoc0lWQl3t' + '3vULqr9z48alUDTb1XTaLTRPlI4YQM0EU2gdwCzIEYwjllAq2P' + '1sIUrxjpD1E+YjA0QXwf0TM7hqlgOC5HMP/dt/RevnK18F3THx' + 'FS08fz1s0zBZBc2w5zHdX73QAAAA==\n', + 'annotations': ([ + {'path': 'test file', 'start_line': i, 'end_line': i, 'annotation_level': 'warning', 'message': 'result file [took 1s]', 'title': f'test{i} (class) failed', 'raw_details': f'message{i}\ncontent{i}\nstdout{i}\nstderr{i}'} + # we expect the first 50 annotations in the create call + for i in range(1, 51) + ]) + } + ) + repo.create_check_run.assert_called_once_with(**create_check_run_kwargs) + + # this checks that publisher.publish_check returned + # the result of the call to repo.create_check_run + self.assertIsInstance(check_run, mock.Mock) + self.assertTrue(hasattr(check_run, 'create_check_run_kwargs')) + self.assertEqual(create_check_run_kwargs, check_run.create_check_run_kwargs) + self.assertIsInstance(before_check_run, mock.Mock) + + # we expect the edit method of the created check to be called for the remaining annotations + # we expect three calls, each batch starting at these starts, + # then a last batch with notice annotations + outputs = [ + { + 'title': '7 errors, 6 fail, 5 skipped, 4 pass in 3s', + 'summary': f'\u205f\u20041 files\u2004 ±0\u2002\u20032 suites\u2004 ±0\u2002\u2003\u20023s {duration_label_md} ±0s\n' + f'22 {all_tests_label_md} +1\u2002\u20034 {passed_tests_label_md} \u2006-\u200a\u205f\u20048\u2002\u20035 {skipped_tests_label_md} +1\u2002\u2003\u205f\u20046 {failed_tests_label_md} +4\u2002\u2003\u205f\u20047 {test_errors_label_md} +\u205f\u20044\u2002\n' + f'38 runs\u2006 +1\u2002\u20038 {passed_tests_label_md} \u2006-\u200a17\u2002\u20039 {skipped_tests_label_md} +2\u2002\u200310 {failed_tests_label_md} +6\u2002\u200311 {test_errors_label_md} +10\u2002\n' + '\n' + 'Results for commit commit.\u2003± Comparison against earlier commit past.\n' + '\n' + '[test-results]:data:application/gzip;base64,' + 'H4sIAAAAAAAC/0WOSQqEMBBFryJZu+g4tK2XkRAVCoc0lWQl3t' + '3vULqr9z48alUDTb1XTaLTRPlI4YQM0EU2gdwCzIEYwjllAq2P' + '1sIUrxjpD1E+YjA0QXwf0TM7hqlgOC5HMP/dt/RevnK18F3THx' + 'FS08fz1s0zBZBc2w5zHdX73QAAAA==\n', + 'annotations': ([ + {'path': 'test file', 'start_line': i, 'end_line': i, 'annotation_level': 'warning', 'message': 'result file [took 1s]', 'title': f'test{i} (class) failed', 'raw_details': f'message{i}\ncontent{i}\nstdout{i}\nstderr{i}'} + # for each edit we expect a batch of 50 annotations starting at start + for i in range(start, start + 50) + ] if start < 151 else [ + # and a batch of the remainder annotation + {'path': '.github', 'start_line': 0, 'end_line': 0, 'annotation_level': 'notice', 'message': 'There are 150 tests, see "Raw output" for the full list of tests.', 'title': '150 tests found', 'raw_details': '\n'.join(sorted([f'class ‑ test{i}' for i in range(1, 151)]))} + ]) + } + for start in [51, 101, 151] + ] + + self.assertEqual(check_run.edit.call_args_list, [mock.call(output=output) for output in outputs]) + + publish_data = PublishData( + title='title', + summary='summary', + conclusion='conclusion', + stats=UnitTestRunResults( + files=12345, + errors=[ParseError('file', 'message', 1, 2, exception=ValueError("Invalid value"))], + suites=2, + suite_details=[UnitTestSuite('suite', 7, 3, 2, 1, 'stdout', 'stderr')], + duration=3456, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8901, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=1345, + commit='commit' + ), + stats_with_delta=UnitTestRunDeltaResults( + files={'number': 1234, 'delta': -1234}, + errors=[ + ParseError('file', 'message', 1, 2, exception=ValueError("Invalid value")), + ParseError('file2', 'message2', 2, 4) + ], + suites={'number': 2, 'delta': -2}, + duration={'number': 3456, 'delta': -3456}, + tests={'number': 4, 'delta': -4}, tests_succ={'number': 5, 'delta': -5}, + tests_skip={'number': 6, 'delta': -6}, tests_fail={'number': 7, 'delta': -7}, + tests_error={'number': 8, 'delta': -8}, + runs={'number': 9, 'delta': -9}, runs_succ={'number': 10, 'delta': -10}, + runs_skip={'number': 11, 'delta': -11}, runs_fail={'number': 12, 'delta': -12}, + runs_error={'number': 1345, 'delta': -1345}, + commit='commit', + reference_type='type', reference_commit='ref' + ), + annotations=[Annotation( + path='path', + start_line=1, + end_line=2, + start_column=3, + end_column=4, + annotation_level='failure', + message='message', + title=f'Error processing result file', + raw_details='file' + )], + check_url='http://check-run.url', + cases=create_unit_test_case_results({ + (None, 'class name', 'test name'): {"success": [ + UnitTestCase( + class_name='test.classpath.classname', + content='content', + line=1, + message='message', + result='success', + result_file='/path/to/test/test.classpath.classname', + stderr='stderr', + stdout='stdout', + test_file='file1', + test_name='casename', + time=0.1 + ) + ]}, + }) + ) + + def test_publish_check_with_suite_details(self): + results = get_test_results(ParsedUnitTestResultsWithCommit( + files=1, + errors=errors, + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, + suite_details=[ + UnitTestSuite(name='suite', tests=2, skipped=3, failures=4, errors=5, stdout='stdout log', stderr='stderr log'), + UnitTestSuite(name='suite2', tests=1, skipped=1, failures=1, errors=1, stdout=None, stderr=None), + ], + cases=[ + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test2', result='skipped', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=2), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test3', result='failure', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=3), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test1', result='error', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=4), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test2', result='skipped', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=5), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test3', result='failure', message='message6', content='content6', stdout='stdout6', stderr='stderr6', time=6), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test4', result='failure', message='message7', content='content7', stdout='stdout7', stderr='stderr7', time=7), + ], + commit='commit' + ), False) + stats = get_stats(results) + + with tempfile.TemporaryDirectory() as path: + filepath = os.path.join(path, 'file.json') + settings = self.create_settings(event={}, json_file=filepath, json_suite_details=True) + gh, gha, req, repo, commit = self.create_mocks(commit=mock.Mock(), digest=None, check_names=[]) + publisher = Publisher(settings, gh, gha) + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + check_run, before_check_run = publisher.publish_check(stats, results.case_results, 'conclusion') + + repo.get_commit.assert_not_called() + + create_check_run_kwargs = dict( + name=settings.check_name, + head_sha=settings.commit, + status='completed', + conclusion='conclusion', + output={ + 'title': '1 parse errors, 1 errors, 3 fail, 2 skipped, 1 pass in 7s', + 'summary': f'1 files\u2004\u2003\u205f\u20041 errors\u2004\u20032 suites\u2004\u2003\u20027s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "duration of all tests")\n' + f'7 tests\u2003\u205f\u20041 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "passed tests")\u20032 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "skipped / disabled tests")\u20033 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "failed tests")\u20031 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "test errors")\n' + f'3 runs\u2006\u2003-12 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "passed tests")\u20034 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "skipped / disabled tests")\u20035 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "failed tests")\u20036 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "test errors")\n' + '\n' + 'Results for commit commit.\n' + '\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MSwqAMAxEryJd68I/eBmRWiH4qST' + 'tSry7URNxN+8NM4eZYHFkuiRPE0MRwgMFwxhxCOA3xpaRi0D/3FO0VoYiZthl/IppgIVF+QmH6FE2GDeS8o56l+' + 'XFZ96/SlnuamV9a1hYv64QGDSdF7scnZDbAAAA\n', + 'annotations': [ + {'path': 'file', 'start_line': 1, 'end_line': 1, 'start_column': 2, 'end_column': 2, 'annotation_level': 'failure', 'message': 'error', 'title': 'Error processing result file', 'raw_details': 'file'}, + {'path': 'test', 'start_line': 123, 'end_line': 123, 'annotation_level': 'warning', 'message': 'result [took 3s]', 'title': 'test3 (class1) failed', 'raw_details': 'message3\ncontent3\nstdout3\nstderr3'}, + {'path': 'test', 'start_line': 123, 'end_line': 123, 'annotation_level': 'failure', 'message': 'result [took 4s]', 'title': 'test1 (class2) with error', 'raw_details': 'message4\ncontent4\nstdout4\nstderr4'}, + {'path': 'test', 'start_line': 123, 'end_line': 123, 'annotation_level': 'warning', 'message': 'result [took 6s]', 'title': 'test3 (class2) failed', 'raw_details': 'message6\ncontent6\nstdout6\nstderr6'}, + {'path': 'test', 'start_line': 123, 'end_line': 123, 'annotation_level': 'warning', 'message': 'result [took 7s]', 'title': 'test4 (class2) failed', 'raw_details': 'message7\ncontent7\nstdout7\nstderr7'}, + {'path': '.github', 'start_line': 0, 'end_line': 0, 'annotation_level': 'notice', 'message': 'There are 2 skipped tests, see "Raw output" for the full list of skipped tests.', 'title': '2 skipped tests found', 'raw_details': 'class1 ‑ test2\nclass2 ‑ test2'}, + {'path': '.github', 'start_line': 0, 'end_line': 0, 'annotation_level': 'notice', 'message': 'There are 7 tests, see "Raw output" for the full list of tests.', 'title': '7 tests found', 'raw_details': 'class1 ‑ test1\nclass1 ‑ test2\nclass1 ‑ test3\nclass2 ‑ test1\nclass2 ‑ test2\nclass2 ‑ test3\nclass2 ‑ test4'} + ] + } + ) + repo.create_check_run.assert_called_once_with(**create_check_run_kwargs) + + # this checks that publisher.publish_check returned + # the result of the last call to repo.create_check_run + self.assertIsInstance(check_run, mock.Mock) + self.assertTrue(hasattr(check_run, 'create_check_run_kwargs')) + self.assertEqual(create_check_run_kwargs, check_run.create_check_run_kwargs) + self.assertIsNone(before_check_run) + + # assert the json file + with open(filepath, encoding='utf-8') as r: + actual = r.read() + self.assertEqual( + '{' + '"title": "1 parse errors, 1 errors, 3 fail, 2 skipped, 1 pass in 7s", ' + '"summary": "' + f'1 files    1 errors  2 suites   7s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"duration of all tests\\")\\n' + f'7 tests   1 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"passed tests\\") 2 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"skipped / disabled tests\\") 3 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"failed tests\\") 1 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"test errors\\")\\n' + f'3 runs  -12 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"passed tests\\") 4 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"skipped / disabled tests\\") 5 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"failed tests\\") 6 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"test errors\\")\\n' + '\\n' + 'Results for commit commit.\\n", ' + '"conclusion": "conclusion", ' + '"stats": {"files": 1, "errors": [{"file": "file", "message": "error", "line": 1, "column": 2}], "suites": 2, "duration": 7, ' + '"suite_details": [' + '{"name": "suite", "tests": 2, "skipped": 3, "failures": 4, "errors": 5, "stdout": "stdout log", "stderr": "stderr log"}, {"name": "suite2", "tests": 1, "skipped": 1, "failures": 1, "errors": 1}' + '], ' + '"tests": 7, "tests_succ": 1, "tests_skip": 2, "tests_fail": 3, "tests_error": 1, "runs": 3, "runs_succ": -12, "runs_skip": 4, "runs_fail": 5, "runs_error": 6, "commit": "commit"}, ' + '"annotations": [' + '{"path": "file", "start_line": 1, "end_line": 1, "start_column": 2, "end_column": 2, "annotation_level": "failure", "message": "error", "title": "Error processing result file", "raw_details": "file"}, ' + '{"path": "test", "start_line": 123, "end_line": 123, "annotation_level": "warning", "message": "result [took 3s]", "title": "test3 (class1) failed", "raw_details": "message3\\ncontent3\\nstdout3\\nstderr3"}, ' + '{"path": "test", "start_line": 123, "end_line": 123, "annotation_level": "failure", "message": "result [took 4s]", "title": "test1 (class2) with error", "raw_details": "message4\\ncontent4\\nstdout4\\nstderr4"}, ' + '{"path": "test", "start_line": 123, "end_line": 123, "annotation_level": "warning", "message": "result [took 6s]", "title": "test3 (class2) failed", "raw_details": "message6\\ncontent6\\nstdout6\\nstderr6"}, ' + '{"path": "test", "start_line": 123, "end_line": 123, "annotation_level": "warning", "message": "result [took 7s]", "title": "test4 (class2) failed", "raw_details": "message7\\ncontent7\\nstdout7\\nstderr7"}, ' + '{"path": ".github", "start_line": 0, "end_line": 0, "annotation_level": "notice", "message": "There are 2 skipped tests, see \\"Raw output\\" for the full list of skipped tests.", "title": "2 skipped tests found", "raw_details": "class1 ‑ test2\\nclass2 ‑ test2"}, ' + '{"path": ".github", "start_line": 0, "end_line": 0, "annotation_level": "notice", "message": "There are 7 tests, see \\"Raw output\\" for the full list of tests.", "title": "7 tests found", "raw_details": "class1 ‑ test1\\nclass1 ‑ test2\\nclass1 ‑ test3\\nclass2 ‑ test1\\nclass2 ‑ test2\\nclass2 ‑ test3\\nclass2 ‑ test4"}' + '], ' + '"check_url": "mock url", ' + '"formatted": {"stats": {"files": "1", "errors": [{"file": "file", "message": "error", "line": 1, "column": 2}], "suites": "2", "duration": "7", ' + '"suite_details": [' + '{"name": "suite", "tests": 2, "skipped": 3, "failures": 4, "errors": 5, "stdout": "stdout log", "stderr": "stderr log"}, {"name": "suite2", "tests": 1, "skipped": 1, "failures": 1, "errors": 1}' + '], ' + '"tests": "7", "tests_succ": "1", "tests_skip": "2", "tests_fail": "3", "tests_error": "1", "runs": "3", "runs_succ": "-12", "runs_skip": "4", "runs_fail": "5", "runs_error": "6", "commit": "commit"}}' + '}', + actual + ) + + # check the json output has been provided + gha.add_to_output.assert_called_once_with( + 'json', + '{' + '"title": "1 parse errors, 1 errors, 3 fail, 2 skipped, 1 pass in 7s", ' + '"summary": "' + f'1 files\u2004\u2003\u205f\u20041 errors\u2004\u20032 suites\u2004\u2003\u20027s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"duration of all tests\\")\\n' + f'7 tests\u2003\u205f\u20041 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"passed tests\\")\u20032 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"skipped / disabled tests\\")\u20033 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"failed tests\\")\u20031 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"test errors\\")\\n' + f'3 runs\u2006\u2003-12 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"passed tests\\")\u20034 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"skipped / disabled tests\\")\u20035 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"failed tests\\")\u20036 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"test errors\\")\\n' + '\\n' + 'Results for commit commit.\\n", ' + '"conclusion": "conclusion", ' + '"stats": {"files": 1, "errors": 1, "suites": 2, "duration": 7, "tests": 7, "tests_succ": 1, "tests_skip": 2, "tests_fail": 3, "tests_error": 1, "runs": 3, "runs_succ": -12, "runs_skip": 4, "runs_fail": 5, "runs_error": 6, "commit": "commit"}, ' + '"annotations": 7, ' + '"check_url": "mock url", ' + '"formatted": {"stats": {"files": "1", "errors": "1", "suites": "2", "duration": "7", "tests": "7", "tests_succ": "1", "tests_skip": "2", "tests_fail": "3", "tests_error": "1", "runs": "3", "runs_succ": "-12", "runs_skip": "4", "runs_fail": "5", "runs_error": "6", "commit": "commit"}}' + '}' + ) + + def test_publish_check_with_cases(self): + results = get_test_results(ParsedUnitTestResultsWithCommit( + files=1, + errors=errors, + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, + suite_details=[], + cases=[ + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test2', result='skipped', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=2), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test3', result='failure', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=3), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test1', result='error', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=4), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test2', result='skipped', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=5), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test3', result='failure', message='message6', content='content6', stdout='stdout6', stderr='stderr6', time=6), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test4', result='failure', message='message7', content='content7', stdout='stdout7', stderr='stderr7', time=7), + ], + commit='commit' + ), False) + stats = get_stats(results) + + with tempfile.TemporaryDirectory() as path: + filepath = os.path.join(path, 'file.json') + settings = self.create_settings(event={}, json_file=filepath, json_test_case_results=True) + gh, gha, req, repo, commit = self.create_mocks(commit=mock.Mock(), digest=None, check_names=[]) + publisher = Publisher(settings, gh, gha) + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + check_run, before_check_run = publisher.publish_check(stats, results.case_results, 'conclusion') + + repo.get_commit.assert_not_called() + + create_check_run_kwargs = dict( + name=settings.check_name, + head_sha=settings.commit, + status='completed', + conclusion='conclusion', + output={ + 'title': '1 parse errors, 1 errors, 3 fail, 2 skipped, 1 pass in 7s', + 'summary': f'1 files\u2004\u2003\u205f\u20041 errors\u2004\u20032 suites\u2004\u2003\u20027s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "duration of all tests")\n' + f'7 tests\u2003\u205f\u20041 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "passed tests")\u20032 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "skipped / disabled tests")\u20033 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "failed tests")\u20031 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "test errors")\n' + f'3 runs\u2006\u2003-12 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "passed tests")\u20034 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "skipped / disabled tests")\u20035 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "failed tests")\u20036 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "test errors")\n' + '\n' + 'Results for commit commit.\n' + '\n' + '[test-results]:data:application/gzip;base64,H4sIAAAAAAAC/02MSwqAMAxEryJd68I/eBmRWiH4qST' + 'tSry7URNxN+8NM4eZYHFkuiRPE0MRwgMFwxhxCOA3xpaRi0D/3FO0VoYiZthl/IppgIVF+QmH6FE2GDeS8o56l+' + 'XFZ96/SlnuamV9a1hYv64QGDSdF7scnZDbAAAA\n', + 'annotations': [ + {'path': 'file', 'start_line': 1, 'end_line': 1, 'start_column': 2, 'end_column': 2, 'annotation_level': 'failure', 'message': 'error', 'title': 'Error processing result file', 'raw_details': 'file'}, + {'path': 'test', 'start_line': 123, 'end_line': 123, 'annotation_level': 'warning', 'message': 'result [took 3s]', 'title': 'test3 (class1) failed', 'raw_details': 'message3\ncontent3\nstdout3\nstderr3'}, + {'path': 'test', 'start_line': 123, 'end_line': 123, 'annotation_level': 'failure', 'message': 'result [took 4s]', 'title': 'test1 (class2) with error', 'raw_details': 'message4\ncontent4\nstdout4\nstderr4'}, + {'path': 'test', 'start_line': 123, 'end_line': 123, 'annotation_level': 'warning', 'message': 'result [took 6s]', 'title': 'test3 (class2) failed', 'raw_details': 'message6\ncontent6\nstdout6\nstderr6'}, + {'path': 'test', 'start_line': 123, 'end_line': 123, 'annotation_level': 'warning', 'message': 'result [took 7s]', 'title': 'test4 (class2) failed', 'raw_details': 'message7\ncontent7\nstdout7\nstderr7'}, + {'path': '.github', 'start_line': 0, 'end_line': 0, 'annotation_level': 'notice', 'message': 'There are 2 skipped tests, see "Raw output" for the full list of skipped tests.', 'title': '2 skipped tests found', 'raw_details': 'class1 ‑ test2\nclass2 ‑ test2'}, + {'path': '.github', 'start_line': 0, 'end_line': 0, 'annotation_level': 'notice', 'message': 'There are 7 tests, see "Raw output" for the full list of tests.', 'title': '7 tests found', 'raw_details': 'class1 ‑ test1\nclass1 ‑ test2\nclass1 ‑ test3\nclass2 ‑ test1\nclass2 ‑ test2\nclass2 ‑ test3\nclass2 ‑ test4'} + ] + } + ) + repo.create_check_run.assert_called_once_with(**create_check_run_kwargs) + + # this checks that publisher.publish_check returned + # the result of the last call to repo.create_check_run + self.assertIsInstance(check_run, mock.Mock) + self.assertTrue(hasattr(check_run, 'create_check_run_kwargs')) + self.assertEqual(create_check_run_kwargs, check_run.create_check_run_kwargs) + self.assertIsNone(before_check_run) + + # assert the json file + with open(filepath, encoding='utf-8') as r: + actual = r.read() + self.maxDiff = None + self.assertEqual( + '{' + '"title": "1 parse errors, 1 errors, 3 fail, 2 skipped, 1 pass in 7s", ' + '"summary": "' + f'1 files    1 errors  2 suites   7s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"duration of all tests\\")\\n' + f'7 tests   1 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"passed tests\\") 2 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"skipped / disabled tests\\") 3 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"failed tests\\") 1 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"test errors\\")\\n' + f'3 runs  -12 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"passed tests\\") 4 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"skipped / disabled tests\\") 5 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"failed tests\\") 6 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"test errors\\")\\n' + '\\n' + 'Results for commit commit.\\n", ' + '"conclusion": "conclusion", ' + '"stats": {"files": 1, "errors": [{"file": "file", "message": "error", "line": 1, "column": 2}], "suites": 2, "duration": 7, "tests": 7, "tests_succ": 1, "tests_skip": 2, "tests_fail": 3, "tests_error": 1, "runs": 3, "runs_succ": -12, "runs_skip": 4, "runs_fail": 5, "runs_error": 6, "commit": "commit"}, ' + '"annotations": [' + '{"path": "file", "start_line": 1, "end_line": 1, "start_column": 2, "end_column": 2, "annotation_level": "failure", "message": "error", "title": "Error processing result file", "raw_details": "file"}, ' + '{"path": "test", "start_line": 123, "end_line": 123, "annotation_level": "warning", "message": "result [took 3s]", "title": "test3 (class1) failed", "raw_details": "message3\\ncontent3\\nstdout3\\nstderr3"}, ' + '{"path": "test", "start_line": 123, "end_line": 123, "annotation_level": "failure", "message": "result [took 4s]", "title": "test1 (class2) with error", "raw_details": "message4\\ncontent4\\nstdout4\\nstderr4"}, ' + '{"path": "test", "start_line": 123, "end_line": 123, "annotation_level": "warning", "message": "result [took 6s]", "title": "test3 (class2) failed", "raw_details": "message6\\ncontent6\\nstdout6\\nstderr6"}, ' + '{"path": "test", "start_line": 123, "end_line": 123, "annotation_level": "warning", "message": "result [took 7s]", "title": "test4 (class2) failed", "raw_details": "message7\\ncontent7\\nstdout7\\nstderr7"}, ' + '{"path": ".github", "start_line": 0, "end_line": 0, "annotation_level": "notice", "message": "There are 2 skipped tests, see \\"Raw output\\" for the full list of skipped tests.", "title": "2 skipped tests found", "raw_details": "class1 ‑ test2\\nclass2 ‑ test2"}, ' + '{"path": ".github", "start_line": 0, "end_line": 0, "annotation_level": "notice", "message": "There are 7 tests, see \\"Raw output\\" for the full list of tests.", "title": "7 tests found", "raw_details": "class1 ‑ test1\\nclass1 ‑ test2\\nclass1 ‑ test3\\nclass2 ‑ test1\\nclass2 ‑ test2\\nclass2 ‑ test3\\nclass2 ‑ test4"}' + '], ' + '"check_url": "mock url", ' + '"cases": [' + '{' + '"class_name": "class1", ' + '"test_name": "test1", ' + '"states": {' + '"success": [' + '{"result_file": "result", "test_file": "test", "line": 123, "class_name": "class1", "test_name": "test1", "result": "success", "message": "message1", "content": "content1", "stdout": "stdout1", "stderr": "stderr1", "time": 1}' + ']' + '}' + '}, {' + '"class_name": "class1", ' + '"test_name": "test2", ' + '"states": {' + '"skipped": [' + '{"result_file": "result", "test_file": "test", "line": 123, "class_name": "class1", "test_name": "test2", "result": "skipped", "message": "message2", "content": "content2", "stdout": "stdout2", "stderr": "stderr2", "time": 2}' + ']' + '}' + '}, {' + '"class_name": "class1", ' + '"test_name": "test3", ' + '"states": {' + '"failure": [' + '{"result_file": "result", "test_file": "test", "line": 123, "class_name": "class1", "test_name": "test3", "result": "failure", "message": "message3", "content": "content3", "stdout": "stdout3", "stderr": "stderr3", "time": 3}' + ']' + '}' + '}, {' + '"class_name": "class2", ' + '"test_name": "test1", ' + '"states": {' + '"error": [' + '{"result_file": "result", "test_file": "test", "line": 123, "class_name": "class2", "test_name": "test1", "result": "error", "message": "message4", "content": "content4", "stdout": "stdout4", "stderr": "stderr4", "time": 4}' + ']' + '}' + '}, {' + '"class_name": "class2", ' + '"test_name": "test2", ' + '"states": {' + '"skipped": [' + '{"result_file": "result", "test_file": "test", "line": 123, "class_name": "class2", "test_name": "test2", "result": "skipped", "message": "message5", "content": "content5", "stdout": "stdout5", "stderr": "stderr5", "time": 5}' + ']' + '}' + '}, {' + '"class_name": "class2", ' + '"test_name": "test3", ' + '"states": {' + '"failure": [' + '{"result_file": "result", "test_file": "test", "line": 123, "class_name": "class2", "test_name": "test3", "result": "failure", "message": "message6", "content": "content6", "stdout": "stdout6", "stderr": "stderr6", "time": 6}' + ']' + '}' + '}, {' + '"class_name": "class2", ' + '"test_name": "test4", "states": {' + '"failure": [' + '{"result_file": "result", "test_file": "test", "line": 123, "class_name": "class2", "test_name": "test4", "result": "failure", "message": "message7", "content": "content7", "stdout": "stdout7", "stderr": "stderr7", "time": 7}' + ']' + '}' + '}' + '], ' + '"formatted": {"stats": {"files": "1", "errors": [{"file": "file", "message": "error", "line": 1, "column": 2}], "suites": "2", "duration": "7", "tests": "7", "tests_succ": "1", "tests_skip": "2", "tests_fail": "3", "tests_error": "1", "runs": "3", "runs_succ": "-12", "runs_skip": "4", "runs_fail": "5", "runs_error": "6", "commit": "commit"}}' + '}', + actual + ) + + # check the json output has been provided + gha.add_to_output.assert_called_once_with( + 'json', + '{' + '"title": "1 parse errors, 1 errors, 3 fail, 2 skipped, 1 pass in 7s", ' + '"summary": "' + f'1 files\u2004\u2003\u205f\u20041 errors\u2004\u20032 suites\u2004\u2003\u20027s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"duration of all tests\\")\\n' + f'7 tests\u2003\u205f\u20041 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"passed tests\\")\u20032 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"skipped / disabled tests\\")\u20033 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"failed tests\\")\u20031 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"test errors\\")\\n' + f'3 runs\u2006\u2003-12 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"passed tests\\")\u20034 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"skipped / disabled tests\\")\u20035 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"failed tests\\")\u20036 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols \\"test errors\\")\\n' + '\\n' + 'Results for commit commit.\\n", ' + '"conclusion": "conclusion", ' + '"stats": {"files": 1, "errors": 1, "suites": 2, "duration": 7, "tests": 7, "tests_succ": 1, "tests_skip": 2, "tests_fail": 3, "tests_error": 1, "runs": 3, "runs_succ": -12, "runs_skip": 4, "runs_fail": 5, "runs_error": 6, "commit": "commit"}, ' + '"annotations": 7, ' + '"check_url": "mock url", ' + '"formatted": {"stats": {"files": "1", "errors": "1", "suites": "2", "duration": "7", "tests": "7", "tests_succ": "1", "tests_skip": "2", "tests_fail": "3", "tests_error": "1", "runs": "3", "runs_succ": "-12", "runs_skip": "4", "runs_fail": "5", "runs_error": "6", "commit": "commit"}}' + '}' + ) + + def test_publish_data_to_dicts(self): + for separator in ['.', ',', ' ', punctuation_space]: + for json_suite_details, json_test_case_results in [(False, False), (True, False), (False, True), (True, True)]: + with self.subTest(json_thousands_separator=separator, + json_suite_details=json_suite_details, + json_test_case_results=json_test_case_results): + stats = {'commit': 'commit', + 'duration': 3456, + 'suite_details': [{'errors': 1, + 'failures': 2, + 'name': 'suite', + 'skipped': 3, + 'stderr': 'stderr', + 'stdout': 'stdout', + 'tests': 7}], + 'errors': [{'column': 2, + 'file': 'file', + 'line': 1, + 'message': 'message'}], + 'files': 12345, + 'runs': 9, + 'runs_error': 1345, + 'runs_fail': 12, + 'runs_skip': 11, + 'runs_succ': 10, + 'suites': 2, + 'tests': 4, + 'tests_error': 8901, + 'tests_fail': 7, + 'tests_skip': 6, + 'tests_succ': 5} + + formatted_stats = {'commit': 'commit', + 'duration': "3" + separator + "456", + 'errors': [{'column': 2, + 'file': 'file', + 'line': 1, + 'message': 'message'}], + 'files': "12" + separator + "345", + 'runs': "9", + 'runs_error': "1" + separator + "345", + 'runs_fail': "12", + 'runs_skip': "11", + 'runs_succ': "10", + 'suite_details': [{'errors': 1, + 'failures': 2, + 'name': 'suite', + 'skipped': 3, + 'stderr': 'stderr', + 'stdout': 'stdout', + 'tests': 7}], + 'suites': "2", + 'tests': "4", + 'tests_error': "8" + separator + "901", + 'tests_fail': "7", + 'tests_skip': "6", + 'tests_succ': "5"} + + if not json_suite_details: + del stats['suite_details'] + del formatted_stats['suite_details'] + + expected = { + 'title': 'title', + 'summary': 'summary', + 'conclusion': 'conclusion', + 'stats': stats, + 'stats_with_delta': {'commit': 'commit', + 'duration': {'delta': -3456, 'number': 3456}, + 'errors': [{'column': 2, + 'file': 'file', + 'line': 1, + 'message': 'message'}, + {'column': 4, + 'file': 'file2', + 'line': 2, + 'message': 'message2'}], + 'files': {'delta': -1234, 'number': 1234}, + 'reference_commit': 'ref', + 'reference_type': 'type', + 'runs': {'delta': -9, 'number': 9}, + 'runs_error': {'delta': -1345, 'number': 1345}, + 'runs_fail': {'delta': -12, 'number': 12}, + 'runs_skip': {'delta': -11, 'number': 11}, + 'runs_succ': {'delta': -10, 'number': 10}, + 'suites': {'delta': -2, 'number': 2}, + 'tests': {'delta': -4, 'number': 4}, + 'tests_error': {'delta': -8, 'number': 8}, + 'tests_fail': {'delta': -7, 'number': 7}, + 'tests_skip': {'delta': -6, 'number': 6}, + 'tests_succ': {'delta': -5, 'number': 5}}, + 'formatted': {'stats': formatted_stats, + 'stats_with_delta': {'commit': 'commit', + 'duration': {'delta': "-3" + separator + "456", 'number': "3" + separator + "456"}, + 'errors': [{'column': 2, + 'file': 'file', + 'line': 1, + 'message': 'message'}, + {'column': 4, + 'file': 'file2', + 'line': 2, + 'message': 'message2'}], + 'files': {'delta': "-1" + separator + "234", 'number': "1" + separator + "234"}, + 'reference_commit': 'ref', + 'reference_type': 'type', + 'runs': {'delta': "-9", 'number': "9"}, + 'runs_error': {'delta': "-1" + separator + "345", 'number': "1" + separator + "345"}, + 'runs_fail': {'delta': "-12", 'number': "12"}, + 'runs_skip': {'delta': "-11", 'number': "11"}, + 'runs_succ': {'delta': "-10", 'number': "10"}, + 'suites': {'delta': "-2", 'number': "2"}, + 'tests': {'delta': "-4", 'number': "4"}, + 'tests_error': {'delta': "-8", 'number': "8"}, + 'tests_fail': {'delta': "-7", 'number': "7"}, + 'tests_skip': {'delta': "-6", 'number': "6"}, + 'tests_succ': {'delta': "-5", 'number': "5"}}}, + 'annotations': [{'annotation_level': 'failure', + 'end_column': 4, + 'end_line': 2, + 'message': 'message', + 'path': 'path', + 'raw_details': 'file', + 'start_column': 3, + 'start_line': 1, + 'title': 'Error processing result file'}], + 'check_url': 'http://check-run.url', + 'cases': [ + { + 'class_name': 'class name', + 'test_name': 'test name', + 'states': { + 'success': [ + { + 'class_name': 'test.classpath.classname', + 'content': 'content', + 'line': 1, + 'message': 'message', + 'result': 'success', + 'result_file': '/path/to/test/test.classpath.classname', + 'stderr': 'stderr', + 'stdout': 'stdout', + 'test_file': 'file1', + 'test_name': 'casename', + 'time': 0.1 + } + ] + } + } + ] + } + if not json_test_case_results: + del expected['cases'] + + actual = self.publish_data.to_dict(separator, with_suite_details=json_suite_details, with_cases=json_test_case_results) + self.assertEqual(expected, actual) + + self.assertEqual({ + 'title': 'title', + 'summary': 'summary', + 'conclusion': 'conclusion', + 'stats': {'commit': 'commit', + 'duration': 3456, + 'errors': 1, + 'files': 12345, + 'runs': 9, + 'runs_error': 1345, + 'runs_fail': 12, + 'runs_skip': 11, + 'runs_succ': 10, + 'suites': 2, + 'tests': 4, + 'tests_error': 8901, + 'tests_fail': 7, + 'tests_skip': 6, + 'tests_succ': 5}, + 'stats_with_delta': {'commit': 'commit', + 'duration': {'delta': -3456, 'number': 3456}, + 'errors': 2, + 'files': {'delta': -1234, 'number': 1234}, + 'reference_commit': 'ref', + 'reference_type': 'type', + 'runs': {'delta': -9, 'number': 9}, + 'runs_error': {'delta': -1345, 'number': 1345}, + 'runs_fail': {'delta': -12, 'number': 12}, + 'runs_skip': {'delta': -11, 'number': 11}, + 'runs_succ': {'delta': -10, 'number': 10}, + 'suites': {'delta': -2, 'number': 2}, + 'tests': {'delta': -4, 'number': 4}, + 'tests_error': {'delta': -8, 'number': 8}, + 'tests_fail': {'delta': -7, 'number': 7}, + 'tests_skip': {'delta': -6, 'number': 6}, + 'tests_succ': {'delta': -5, 'number': 5}}, + 'formatted': {'stats': {'commit': 'commit', + 'duration': "3" + separator + "456", + 'errors': "1", + 'files': "12" + separator + "345", + 'runs': "9", + 'runs_error': "1" + separator + "345", + 'runs_fail': "12", + 'runs_skip': "11", + 'runs_succ': "10", + 'suites': "2", + 'tests': "4", + 'tests_error': "8" + separator + "901", + 'tests_fail': "7", + 'tests_skip': "6", + 'tests_succ': "5"}, + 'stats_with_delta': {'commit': 'commit', + 'duration': {'delta': "-3" + separator + "456", 'number': "3" + separator + "456"}, + 'errors': "2", + 'files': {'delta': "-1" + separator + "234", 'number': "1" + separator + "234"}, + 'reference_commit': 'ref', + 'reference_type': 'type', + 'runs': {'delta': "-9", 'number': "9"}, + 'runs_error': {'delta': "-1" + separator + "345", 'number': "1" + separator + "345"}, + 'runs_fail': {'delta': "-12", 'number': "12"}, + 'runs_skip': {'delta': "-11", 'number': "11"}, + 'runs_succ': {'delta': "-10", 'number': "10"}, + 'suites': {'delta': "-2", 'number': "2"}, + 'tests': {'delta': "-4", 'number': "4"}, + 'tests_error': {'delta': "-8", 'number': "8"}, + 'tests_fail': {'delta': "-7", 'number': "7"}, + 'tests_skip': {'delta': "-6", 'number': "6"}, + 'tests_succ': {'delta': "-5", 'number': "5"}}}, + 'annotations': 1, + 'check_url': 'http://check-run.url'}, + self.publish_data.to_reduced_dict(separator)) + + def test_publish_json(self): + for separator in ['.', ',', ' ', punctuation_space]: + for json_suite_details, json_test_case_results in [(False, False), (True, False), (False, True), (True, True)]: + with self.subTest(json_thousands_separator=separator, + json_suite_details=json_suite_details, + json_test_case_results=json_test_case_results): + with tempfile.TemporaryDirectory() as path: + filepath = os.path.join(path, 'file.json') + settings = self.create_settings( + json_file=filepath, + json_thousands_separator=separator, + json_suite_details=json_suite_details, + json_test_case_results=json_test_case_results + ) + + gh, gha, req, repo, commit = self.create_mocks(digest=self.base_digest, check_names=[settings.check_name]) + publisher = Publisher(settings, gh, gha) + + publisher.publish_json(self.publish_data) + gha.error.assert_not_called() + + # assert the file + with open(filepath, encoding='utf-8') as r: + actual = r.read() + self.assertEqual( + '{' + '"title": "title", ' + '"summary": "summary", ' + '"conclusion": "conclusion", ' + '"stats": {"files": 12345, "errors": [{"file": "file", "message": "message", "line": 1, "column": 2}], "suites": 2, "duration": 3456, ' + + ('"suite_details": [{"name": "suite", "tests": 7, "skipped": 3, "failures": 2, "errors": 1, "stdout": "stdout", "stderr": "stderr"}], ' + if json_suite_details else '') + + '"tests": 4, "tests_succ": 5, "tests_skip": 6, "tests_fail": 7, "tests_error": 8901, "runs": 9, "runs_succ": 10, "runs_skip": 11, "runs_fail": 12, "runs_error": 1345, "commit": "commit"}, ' + '"stats_with_delta": {"files": {"number": 1234, "delta": -1234}, "errors": [{"file": "file", "message": "message", "line": 1, "column": 2}, {"file": "file2", "message": "message2", "line": 2, "column": 4}], "suites": {"number": 2, "delta": -2}, "duration": {"number": 3456, "delta": -3456}, "tests": {"number": 4, "delta": -4}, "tests_succ": {"number": 5, "delta": -5}, "tests_skip": {"number": 6, "delta": -6}, "tests_fail": {"number": 7, "delta": -7}, "tests_error": {"number": 8, "delta": -8}, "runs": {"number": 9, "delta": -9}, "runs_succ": {"number": 10, "delta": -10}, "runs_skip": {"number": 11, "delta": -11}, "runs_fail": {"number": 12, "delta": -12}, "runs_error": {"number": 1345, "delta": -1345}, "commit": "commit", "reference_type": "type", "reference_commit": "ref"}, ' + '"annotations": [{"path": "path", "start_line": 1, "end_line": 2, "start_column": 3, "end_column": 4, "annotation_level": "failure", "message": "message", "title": "Error processing result file", "raw_details": "file"}], ' + '"check_url": "http://check-run.url", ' + + ('"cases": [' + '{"class_name": "class name", "test_name": "test name", "states": {"success": [{"result_file": "/path/to/test/test.classpath.classname", "test_file": "file1", "line": 1, "class_name": "test.classpath.classname", "test_name": "casename", "result": "success", "message": "message", "content": "content", "stdout": "stdout", "stderr": "stderr", "time": 0.1}]}}' + '], ' + if json_test_case_results else '') + + '"formatted": {' + '"stats": {"files": "12' + separator + '345", "errors": [{"file": "file", "message": "message", "line": 1, "column": 2}], "suites": "2", "duration": "3' + separator + '456", ' + + ('"suite_details": [{"name": "suite", "tests": 7, "skipped": 3, "failures": 2, "errors": 1, "stdout": "stdout", "stderr": "stderr"}], ' + if json_suite_details else '') + + '"tests": "4", "tests_succ": "5", "tests_skip": "6", "tests_fail": "7", "tests_error": "8' + separator + '901", "runs": "9", "runs_succ": "10", "runs_skip": "11", "runs_fail": "12", "runs_error": "1' + separator + '345", "commit": "commit"}, ' + '"stats_with_delta": {"files": {"number": "1' + separator + '234", "delta": "-1' + separator + '234"}, "errors": [{"file": "file", "message": "message", "line": 1, "column": 2}, {"file": "file2", "message": "message2", "line": 2, "column": 4}], "suites": {"number": "2", "delta": "-2"}, "duration": {"number": "3' + separator + '456", "delta": "-3' + separator + '456"}, "tests": {"number": "4", "delta": "-4"}, "tests_succ": {"number": "5", "delta": "-5"}, "tests_skip": {"number": "6", "delta": "-6"}, "tests_fail": {"number": "7", "delta": "-7"}, "tests_error": {"number": "8", "delta": "-8"}, "runs": {"number": "9", "delta": "-9"}, "runs_succ": {"number": "10", "delta": "-10"}, "runs_skip": {"number": "11", "delta": "-11"}, "runs_fail": {"number": "12", "delta": "-12"}, "runs_error": {"number": "1' + separator + '345", "delta": "-1' + separator + '345"}, "commit": "commit", "reference_type": "type", "reference_commit": "ref"}' + '}' + '}', + actual + ) + + # data is being sent to GH action output 'json' + # some list fields are replaced by their length + expected = { + "title": "title", + "summary": "summary", + "conclusion": "conclusion", + "stats": {"files": 12345, "errors": 1, "suites": 2, "duration": 3456, "tests": 4, "tests_succ": 5, + "tests_skip": 6, "tests_fail": 7, "tests_error": 8901, "runs": 9, "runs_succ": 10, + "runs_skip": 11, "runs_fail": 12, "runs_error": 1345, "commit": "commit"}, + "stats_with_delta": {"files": {"number": 1234, "delta": -1234}, "errors": 2, + "suites": {"number": 2, "delta": -2}, "duration": {"number": 3456, "delta": -3456}, + "tests": {"number": 4, "delta": -4}, "tests_succ": {"number": 5, "delta": -5}, + "tests_skip": {"number": 6, "delta": -6}, "tests_fail": {"number": 7, "delta": -7}, + "tests_error": {"number": 8, "delta": -8}, "runs": {"number": 9, "delta": -9}, + "runs_succ": {"number": 10, "delta": -10}, + "runs_skip": {"number": 11, "delta": -11}, + "runs_fail": {"number": 12, "delta": -12}, + "runs_error": {"number": 1345, "delta": -1345}, "commit": "commit", + "reference_type": "type", "reference_commit": "ref"}, + "annotations": 1, + "check_url": "http://check-run.url", + "formatted": { + "stats": {"files": "12" + separator + "345", "errors": "1", "suites": "2", "duration": "3" + separator + "456", "tests": "4", "tests_succ": "5", + "tests_skip": "6", "tests_fail": "7", "tests_error": "8" + separator + "901", "runs": "9", "runs_succ": "10", + "runs_skip": "11", "runs_fail": "12", "runs_error": "1" + separator + "345", "commit": "commit"}, + "stats_with_delta": {"files": {"number": "1" + separator + "234", "delta": "-1" + separator + "234"}, "errors": "2", + "suites": {"number": "2", "delta": "-2"}, "duration": {"number": "3" + separator + "456", "delta": "-3" + separator + "456"}, + "tests": {"number": "4", "delta": "-4"}, "tests_succ": {"number": "5", "delta": "-5"}, + "tests_skip": {"number": "6", "delta": "-6"}, "tests_fail": {"number": "7", "delta": "-7"}, + "tests_error": {"number": "8", "delta": "-8"}, "runs": {"number": "9", "delta": "-9"}, + "runs_succ": {"number": "10", "delta": "-10"}, + "runs_skip": {"number": "11", "delta": "-11"}, + "runs_fail": {"number": "12", "delta": "-12"}, + "runs_error": {"number": "1" + separator + "345", "delta": "-1" + separator + "345"}, "commit": "commit", + "reference_type": "type", "reference_commit": "ref"} + } + } + gha.add_to_output.assert_called_once_with('json', json.dumps(expected, ensure_ascii=False)) + + def test_publish_job_summary_without_before(self): + settings = self.create_settings(job_summary=True) + gh, gha, req, repo, commit = self.create_mocks(digest=self.base_digest, check_names=[settings.check_name]) + cr = mock.MagicMock(html_url='http://check-run.url') + publisher = Publisher(settings, gh, gha) + + publisher.publish_job_summary('title', self.stats, cr, None) + mock_calls = gha.mock_calls + + self.assertEqual(1, len(mock_calls)) + (method, args, kwargs) = mock_calls[0] + self.assertEqual('add_to_job_summary', method) + self.assertEqual(('## title\n' + f'\u205f\u20041 files\u2004\u20032 suites\u2004\u2003\u20023s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "duration of all tests")\n' + f'22 tests\u20034 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "passed tests")\u20035 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "skipped / disabled tests")\u2003\u205f\u20046 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "failed tests")\u2003\u205f\u20047 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "test errors")\n' + f'38 runs\u2006\u20038 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "passed tests")\u20039 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "skipped / disabled tests")\u200310 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "failed tests")\u200311 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "test errors")\n' + '\n' + 'For more details on these failures and errors, see [this check](http://check-run.url).\n' + '\n' + 'Results for commit commit.\n', ), args) + self.assertEqual({}, kwargs) + + def test_publish_job_summary_with_before(self): + settings = self.create_settings(job_summary=True) + gh, gha, req, repo, commit = self.create_mocks(digest=self.base_digest, check_names=[settings.check_name]) + cr = mock.MagicMock(html_url='http://check-run.url') + bcr = mock.MagicMock() + bs = UnitTestRunResults( + files=2, errors=[], suites=3, duration=4, suite_details=[UnitTestSuite('suite', 7, 3, 2, 1, 'stdout', 'stderr')], + tests=20, tests_succ=5, tests_skip=4, tests_fail=5, tests_error=6, + runs=37, runs_succ=10, runs_skip=9, runs_fail=8, runs_error=7, + commit='before' + ) + publisher = Publisher(settings, gh, gha) + publisher.get_check_run = mock.Mock(return_value=bcr) + publisher.get_stats_from_check_run = mock.Mock(return_value=bs) + + publisher.publish_job_summary('title', self.stats, cr, bcr) + mock_calls = gha.mock_calls + + self.assertEqual(1, len(mock_calls)) + (method, args, kwargs) = mock_calls[0] + self.assertEqual('add_to_job_summary', method) + self.assertEqual(('## title\n' + f'\u205f\u20041 files\u2004 \u2006-\u200a1\u2002\u20032 suites\u2004 \u2006-\u200a1\u2002\u2003\u20023s [:stopwatch:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "duration of all tests") -1s\n' + f'22 tests +2\u2002\u20034 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "passed tests") \u2006-\u200a1\u2002\u20035 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "skipped / disabled tests") +1\u2002\u2003\u205f\u20046 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "failed tests") +1\u2002\u2003\u205f\u20047 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "test errors") +1\u2002\n' + f'38 runs\u2006 +1\u2002\u20038 [:heavy_check_mark:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "passed tests") \u2006-\u200a2\u2002\u20039 [:zzz:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "skipped / disabled tests") ±0\u2002\u200310 [:x:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "failed tests") +2\u2002\u200311 [:fire:](https://github.com/step-security/publish-unit-test-result-action/blob/{__version__}/README.md#the-symbols "test errors") +4\u2002\n' + '\n' + 'For more details on these failures and errors, see [this check](http://check-run.url).\n' + '\n' + 'Results for commit commit.\u2003± Comparison against earlier commit before.\n', ), args) + self.assertEqual({}, kwargs) + + def test_publish_comment(self): + settings = self.create_settings(event={'pull_request': {'base': {'sha': 'commit base'}}}, event_name='pull_request') + base_commit = 'base-commit' + + gh, gha, req, repo, commit = self.create_mocks(digest=self.base_digest, check_names=[settings.check_name]) + pr = self.create_github_pr(settings.repo, base_commit_sha=base_commit) + publisher = Publisher(settings, gh, gha) + publisher.get_latest_comment = mock.Mock(return_value=None) + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + publisher.publish_comment(settings.comment_title, self.stats, pr) + expected_digest = f'{digest_header}{get_digest_from_stats(self.stats)}' + + pr.create_issue_comment.assert_called_once_with( + '## Comment Title\n' + f'\u205f\u20041 files\u2004 ±0\u2002\u20032 suites\u2004 ±0\u2002\u2003\u20023s {duration_label_md} ±0s\n' + f'22 {all_tests_label_md} +1\u2002\u20034 {passed_tests_label_md} \u2006-\u200a\u205f\u20048\u2002\u20035 {skipped_tests_label_md} +1\u2002\u2003\u205f\u20046 {failed_tests_label_md} +4\u2002\u2003\u205f\u20047 {test_errors_label_md} +\u205f\u20044\u2002\n' + f'38 runs\u2006 +1\u2002\u20038 {passed_tests_label_md} \u2006-\u200a17\u2002\u20039 {skipped_tests_label_md} +2\u2002\u200310 {failed_tests_label_md} +6\u2002\u200311 {test_errors_label_md} +10\u2002\n' + '\n' + 'Results for commit commit.\u2003± Comparison against base commit base.\n' + '\n' + f'{expected_digest}\n' + ) + + def test_publish_comment_not_required(self): + # same as test_publish_comment but require_comment returns False + with mock.patch('publish.publisher.Publisher.require_comment', return_value=False): + settings = self.create_settings(event={'pull_request': {'base': {'sha': 'commit base'}}}, event_name='pull_request') + base_commit = 'base-commit' + + gh, gha, req, repo, commit = self.create_mocks(digest=self.base_digest, check_names=[settings.check_name]) + pr = self.create_github_pr(settings.repo, base_commit_sha=base_commit) + publisher = Publisher(settings, gh, gha) + publisher.get_latest_comment = mock.Mock(return_value=None) + + publisher.publish_comment(settings.comment_title, self.stats, pr) + + pr.create_issue_comment.assert_not_called() + + def test_publish_comment_without_base(self): + settings = self.create_settings() + + gh, gha, req, repo, commit = self.create_mocks(digest=self.base_digest, check_names=[settings.check_name]) + pr = self.create_github_pr(settings.repo) + publisher = Publisher(settings, gh, gha) + publisher.get_latest_comment = mock.Mock(return_value=None) + + compare = mock.MagicMock() + compare.merge_base_commit.sha = None + repo.compare = mock.Mock(return_value=compare) + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + publisher.publish_comment(settings.comment_title, self.stats, pr) + expected_digest = f'{digest_header}{get_digest_from_stats(self.stats)}' + + pr.create_issue_comment.assert_called_once_with( + '## Comment Title\n' + f'\u205f\u20041 files\u2004\u20032 suites\u2004\u2003\u20023s {duration_label_md}\n' + f'22 {all_tests_label_md}\u20034 {passed_tests_label_md}\u20035 {skipped_tests_label_md}\u2003\u205f\u20046 {failed_tests_label_md}\u2003\u205f\u20047 {test_errors_label_md}\n' + f'38 runs\u2006\u20038 {passed_tests_label_md}\u20039 {skipped_tests_label_md}\u200310 {failed_tests_label_md}\u200311 {test_errors_label_md}\n' + '\n' + 'Results for commit commit.\n' + '\n' + f'{expected_digest}\n' + ) + + def test_publish_comment_without_compare(self): + settings = self.create_settings(event={'pull_request': {'base': {'sha': 'commit base'}}}, event_name='pull_request', compare_earlier=False) + base_commit = 'base-commit' + + gh, gha, req, repo, commit = self.create_mocks(digest=self.base_digest, check_names=[settings.check_name]) + pr = self.create_github_pr(settings.repo, base_commit_sha=base_commit) + publisher = Publisher(settings, gh, gha) + publisher.get_latest_comment = mock.Mock(return_value=None) + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + publisher.publish_comment(settings.comment_title, self.stats, pr) + expected_digest = f'{digest_header}{get_digest_from_stats(self.stats)}' + + pr.create_issue_comment.assert_called_once_with( + '## Comment Title\n' + f'\u205f\u20041 files\u2004\u20032 suites\u2004\u2003\u20023s {duration_label_md}\n' + f'22 {all_tests_label_md}\u20034 {passed_tests_label_md}\u20035 {skipped_tests_label_md}\u2003\u205f\u20046 {failed_tests_label_md}\u2003\u205f\u20047 {test_errors_label_md}\n' + f'38 runs\u2006\u20038 {passed_tests_label_md}\u20039 {skipped_tests_label_md}\u200310 {failed_tests_label_md}\u200311 {test_errors_label_md}\n' + '\n' + 'Results for commit commit.\n' + '\n' + f'{expected_digest}\n' + ) + + def test_publish_comment_with_check_run_with_annotations(self): + settings = self.create_settings() + base_commit = 'base-commit' + + gh, gha, req, repo, commit = self.create_mocks(digest=self.base_digest, check_names=[settings.check_name]) + pr = self.create_github_pr(settings.repo, base_commit_sha=base_commit) + cr = mock.MagicMock(html_url='http://check-run.url') + publisher = Publisher(settings, gh, gha) + publisher.get_latest_comment = mock.Mock(return_value=None) + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + publisher.publish_comment(settings.comment_title, self.stats, pr, cr) + expected_digest = f'{digest_header}{get_digest_from_stats(self.stats)}' + + pr.create_issue_comment.assert_called_once_with( + '## Comment Title\n' + f'\u205f\u20041 files\u2004 ±0\u2002\u20032 suites\u2004 ±0\u2002\u2003\u20023s {duration_label_md} ±0s\n' + f'22 {all_tests_label_md} +1\u2002\u20034 {passed_tests_label_md} \u2006-\u200a\u205f\u20048\u2002\u20035 {skipped_tests_label_md} +1\u2002\u2003\u205f\u20046 {failed_tests_label_md} +4\u2002\u2003\u205f\u20047 {test_errors_label_md} +\u205f\u20044\u2002\n' + f'38 runs\u2006 +1\u2002\u20038 {passed_tests_label_md} \u2006-\u200a17\u2002\u20039 {skipped_tests_label_md} +2\u2002\u200310 {failed_tests_label_md} +6\u2002\u200311 {test_errors_label_md} +10\u2002\n' + '\n' + 'For more details on these failures and errors, see [this check](http://check-run.url).\n' + '\n' + 'Results for commit commit.\u2003± Comparison against base commit base.\n' + '\n' + f'{expected_digest}\n' + ) + + def test_publish_comment_with_check_run_without_annotations(self): + settings = self.create_settings() + base_commit = 'base-commit' + + gh, gha, req, repo, commit = self.create_mocks(digest=self.base_digest, check_names=[settings.check_name]) + pr = self.create_github_pr(settings.repo, base_commit_sha=base_commit) + cr = mock.MagicMock(html_url='http://check-run.url') + publisher = Publisher(settings, gh, gha) + publisher.get_latest_comment = mock.Mock(return_value=None) + + stats = dict(self.stats.to_dict()) + stats.update(tests_fail=0, tests_error=0, runs_fail=0, runs_error=0) + stats = UnitTestRunResults.from_dict(stats) + + # makes gzipped digest deterministic + with mock.patch('gzip.time.time', return_value=0): + publisher.publish_comment(settings.comment_title, stats, pr, cr) + expected_digest = f'{digest_header}{get_digest_from_stats(stats)}' + + pr.create_issue_comment.assert_called_once_with( + '## Comment Title\n' + f'\u205f\u20041 files\u2004 ±0\u2002\u20032 suites\u2004 ±0\u2002\u2003\u20023s {duration_label_md} ±0s\n' + f'22 {all_tests_label_md} +1\u2002\u20034 {passed_tests_label_md} \u2006-\u200a\u205f\u20048\u2002\u20035 {skipped_tests_label_md} +1\u2002\u20030 {failed_tests_label_md} \u2006-\u200a2\u2002\n' + f'38 runs\u2006 +1\u2002\u20038 {passed_tests_label_md} \u2006-\u200a17\u2002\u20039 {skipped_tests_label_md} +2\u2002\u20030 {failed_tests_label_md} \u2006-\u200a4\u2002\n' + '\n' + 'Results for commit commit.\u2003± Comparison against base commit base.\n' + '\n' + f'{expected_digest}\n' + ) + + def test_get_base_commit_sha_none_event(self): + self.do_test_get_base_commit_sha(event=None, event_name='any', expected_sha='merge base commit sha') + + def test_get_base_commit_sha_empty_event(self): + self.do_test_get_base_commit_sha(event={}, event_name='any', expected_sha='merge base commit sha') + + def test_get_base_commit_sha_pull_request_event(self): + self.do_test_get_base_commit_sha( + event={'pull_request': {'base': {'sha': 'commit sha'}}}, + event_name='pull_request', + expected_sha='commit sha' + ) + + def test_get_base_commit_sha_pull_request_event_commit_mode(self): + self.do_test_get_base_commit_sha( + event={'pull_request': {'base': {'sha': 'commit sha'}}}, + event_name='pull_request', + pull_request_build='commit', + expected_sha='merge base commit sha' + ) + + def test_get_base_commit_sha_workflow_run_event(self): + self.do_test_get_base_commit_sha( + event={'workflow_run': {}}, + event_name='workflow_run', + expected_sha=None + ) + + def test_get_base_commit_sha_push_event(self): + publisher = self.do_test_get_base_commit_sha( + event={}, + event_name='push', + expected_sha='merge base commit sha' + ) + self.assertEqual( + [mock.call('master', 'commit')], + publisher._repo.compare.mock_calls + ) + + def test_get_base_commit_sha_other_event(self): + publisher = self.do_test_get_base_commit_sha( + event={}, + event_name='any', + expected_sha='merge base commit sha' + ) + self.assertEqual( + [mock.call('master', 'commit')], + publisher._repo.compare.mock_calls + ) + + def do_test_get_base_commit_sha(self, + event: Optional[dict], + event_name: str, + pull_request_build: str = pull_request_build_mode_merge, + expected_sha: Optional[str] = None): + pr = mock.MagicMock() + pr.base.ref = 'master' + + settings = self.create_settings(event=event, event_name=event_name, pull_request_build=pull_request_build) + publisher = mock.MagicMock(_settings=settings) + compare = mock.MagicMock() + compare.merge_base_commit.sha = 'merge base commit sha' + publisher._repo.compare = mock.Mock(return_value=compare) + result = Publisher.get_base_commit_sha(publisher, pr) + + self.assertEqual(expected_sha, result) + + return publisher + + def test_get_base_commit_sha_compare_exception(self): + pr = mock.MagicMock() + + def exception(base, head): + raise Exception() + + settings = self.create_settings(event={}) + publisher = mock.MagicMock(_settings=settings) + publisher._repo.compare = mock.Mock(side_effect=exception) + result = Publisher.get_base_commit_sha(publisher, pr) + + self.assertEqual(None, result) + + def do_test_get_pull_request_comments(self, order_updated: bool): + settings = self.create_settings() + + gh, gha, req, repo, commit = self.create_mocks(repo_name=settings.repo, repo_login='login') + req.requestJsonAndCheck = mock.Mock( + return_value=({}, {'data': {'repository': {'pullRequest': {'comments': {'nodes': ['node']}}}}}) + ) + pr = self.create_github_pr(settings.repo, number=1234) + publisher = Publisher(settings, gh, gha) + + response = publisher.get_pull_request_comments(pr, order_by_updated=order_updated) + self.assertEqual(['node'], response) + return req + + def test_get_pull_request_comments(self): + req = self.do_test_get_pull_request_comments(order_updated=False) + req.requestJsonAndCheck.assert_called_once_with( + 'POST', 'https://the-github-graphql-url', + input={ + 'query': 'query ListComments {' + ' repository(owner:"login", name:"owner/repo") {' + ' pullRequest(number: 1234) {' + ' comments(last: 100) {' + ' nodes {' + ' id, databaseId, author { login }, body, isMinimized' + ' }' + ' }' + ' }' + ' }' + '}' + } + ) + + def test_get_pull_request_comments_order_updated(self): + req = self.do_test_get_pull_request_comments(order_updated=True) + req.requestJsonAndCheck.assert_called_once_with( + 'POST', 'https://the-github-graphql-url', + input={ + 'query': 'query ListComments {' + ' repository(owner:"login", name:"owner/repo") {' + ' pullRequest(number: 1234) {' + ' comments(last: 100, orderBy: { direction: ASC, field: UPDATED_AT }) {' + ' nodes {' + ' id, databaseId, author { login }, body, isMinimized' + ' }' + ' }' + ' }' + ' }' + '}' + } + ) + + comments = [ + { + 'id': 'comment one', + 'author': {'login': 'github-actions'}, + 'body': '## Comment Title\n' + 'Results for commit dee59820.\u2003± Comparison against base commit 70b5dd18.\n', + 'isMinimized': False + }, + { + 'id': 'comment two', + 'author': {'login': 'someone else'}, + 'body': '## Comment Title\n' + 'more body\n' + 'Results for commit dee59820.\u2003± Comparison against base commit 70b5dd18.\n', + 'isMinimized': False + }, + { + 'id': 'comment three', + 'author': {'login': 'github-actions'}, + 'body': '## Wrong Comment Title\n' + 'more body\n' + 'Results for commit dee59820.\u2003± Comparison against base commit 70b5dd18.\n', + 'isMinimized': False + }, + { + 'id': 'comment four', + 'author': {'login': 'github-actions'}, + 'body': '## Comment Title\n' + 'more body\n' + 'no Results for commit dee59820.\u2003± Comparison against base commit 70b5dd18.\n', + 'isMinimized': False + }, + { + 'id': 'comment five', + 'author': {'login': 'github-actions'}, + 'body': '## Comment Title\n' + 'more body\n' + 'Results for commit dee59820.\u2003± Comparison against base commit 70b5dd18.\n', + 'isMinimized': True + }, + { + 'id': 'comment six', + 'author': {'login': 'github-actions'}, + 'body': 'comment', + 'isMinimized': True + }, + # earlier version of comments with lower case result and comparison + { + 'id': 'comment seven', + 'author': {'login': 'github-actions'}, + 'body': '## Comment Title\n' + 'results for commit dee59820\u2003± comparison against base commit 70b5dd18\n', + 'isMinimized': False + }, + # comment of different actor + { + 'id': 'comment eight', + 'author': {'login': 'other-actor'}, + 'body': '## Comment Title\n' + 'Results for commit dee59820.\u2003± Comparison against base commit 70b5dd18.\n', + 'isMinimized': False + }, + # malformed comments + { + 'id': 'comment nine', + 'author': None, + }, + { + 'id': 'comment ten', + 'author': {}, + }, + ] + + def test_get_action_comments(self): + settings = self.create_settings(actor='github-actions') + gh, gha, req, repo, commit = self.create_mocks() + publisher = Publisher(settings, gh, gha) + + expected = [comment + for comment in self.comments + if comment.get('id') in ['comment one', 'comment five', 'comment seven']] + actual = publisher.get_action_comments(self.comments, is_minimized=None) + self.assertEqual(3, len(expected)) + self.assertEqual(expected, actual) + + def test_get_action_comments_other_actor(self): + settings = self.create_settings(actor='other-actor') + gh, gha, req, repo, commit = self.create_mocks() + publisher = Publisher(settings, gh, gha) + + expected = [comment + for comment in self.comments + if comment.get('id') == 'comment eight'] + actual = publisher.get_action_comments(self.comments, is_minimized=None) + self.assertEqual(1, len(expected)) + self.assertEqual(expected, actual) + + def test_get_action_comments_not_minimized(self): + settings = self.create_settings(actor='github-actions') + gh, gha, req, repo, commit = self.create_mocks() + publisher = Publisher(settings, gh, gha) + + expected = [comment + for comment in self.comments + if comment.get('id') in ['comment one', 'comment seven']] + actual = publisher.get_action_comments(self.comments, is_minimized=False) + self.assertEqual(2, len(expected)) + self.assertEqual(expected, actual) diff --git a/python/test/test_readme_md.py b/python/test/test_readme_md.py new file mode 100644 index 0000000..83ad310 --- /dev/null +++ b/python/test/test_readme_md.py @@ -0,0 +1,24 @@ +import pathlib +import unittest + +import yaml + +project_root = pathlib.Path(__file__).resolve().parent.parent.parent + + +class TestActionYml(unittest.TestCase): + + def test_readme_md(self): + with open(project_root / 'action.yml', encoding='utf-8') as r: + action = yaml.safe_load(r) + + with open(project_root / 'README.md', encoding='utf-8') as r: + readme = r.readlines() + + for input, config in action.get('inputs').items(): + with self.subTest(input=input): + if 'deprecated' not in config.get('description', '').lower(): + self.assertTrue( + any(input in line for line in readme), + msg=f'There is no line in README.md that mentions {input}' + ) diff --git a/python/test/test_trx.py b/python/test/test_trx.py new file mode 100644 index 0000000..231a374 --- /dev/null +++ b/python/test/test_trx.py @@ -0,0 +1,41 @@ +import pathlib +import sys +import unittest +from glob import glob +from typing import List, Union + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) +sys.path.append(str(pathlib.Path(__file__).resolve().parent)) + +from publish.junit import JUnitTreeOrParseError +from publish.trx import parse_trx_files, is_trx +from test_junit import JUnitXmlParseTest + +test_files_path = pathlib.Path(__file__).resolve().parent / 'files' / 'trx' + + +class TestTrx(unittest.TestCase, JUnitXmlParseTest): + maxDiff = None + + @property + def test(self): + return self + + def is_supported(self, path: str) -> bool: + return is_trx(path) + + @staticmethod + def _test_files_path() -> pathlib.Path: + return test_files_path + + @staticmethod + def get_test_files() -> List[str]: + return glob(str(test_files_path / '**' / '*.trx'), recursive=True) + + @staticmethod + def parse_file(filename) -> JUnitTreeOrParseError: + return list(parse_trx_files([filename], False))[0][1] + + +if __name__ == "__main__": + TestTrx.update_expectations() diff --git a/python/test/test_unittestresults.py b/python/test/test_unittestresults.py new file mode 100644 index 0000000..565c0f3 --- /dev/null +++ b/python/test/test_unittestresults.py @@ -0,0 +1,675 @@ +import unittest +import dataclasses +from typing import List +from xml.etree.ElementTree import ParseError as XmlParseError + +from publish.unittestresults import get_test_results, get_stats, get_stats_delta, \ + ParsedUnitTestResults, ParsedUnitTestResultsWithCommit, \ + UnitTestCase, UnitTestResults, UnitTestSuite, create_unit_test_case_results, \ + UnitTestRunResults, UnitTestRunDeltaResults, ParseError +from test_utils import d, n + +errors = [ParseError('file', 'error', exception=ValueError("Invalid value"))] +errors_dict = [{k: v + for k, v in dataclasses.asdict(e.without_exception()).items() + if v is not None} + for e in errors] + + +def create_unit_test_run_results(files=1, + errors: List[ParseError] = [], + suites=2, + suite_details=None, + duration=3, + tests=22, tests_succ=4, tests_skip=5, tests_fail=6, tests_error=7, + runs=38, runs_succ=8, runs_skip=9, runs_fail=10, runs_error=11, + commit='commit') -> UnitTestRunResults: + return UnitTestRunResults( + files=files, + errors=list(errors), + suites=suites, + suite_details=suite_details, + duration=duration, + tests=tests, tests_succ=tests_succ, tests_skip=tests_skip, tests_fail=tests_fail, tests_error=tests_error, + runs=runs, runs_succ=runs_succ, runs_skip=runs_skip, runs_fail=runs_fail, runs_error=runs_error, + commit=commit + ) + + +def create_unit_test_run_delta_results(files=1, files_delta=-1, + errors=[], + suites=2, suites_delta=-2, + duration=3, duration_delta=-3, + tests=4, tests_delta=-4, + tests_succ=5, tests_succ_delta=-5, + tests_skip=6, tests_skip_delta=-6, + tests_fail=7, tests_fail_delta=-7, + tests_error=8, tests_error_delta=-8, + runs=9, runs_delta=-9, + runs_succ=10, runs_succ_delta=-10, + runs_skip=11, runs_skip_delta=-11, + runs_fail=12, runs_fail_delta=-12, + runs_error=13, runs_error_delta=-13) -> UnitTestRunDeltaResults: + return UnitTestRunDeltaResults( + files={'number': files, 'delta': files_delta}, + errors=errors, + suites={'number': suites, 'delta': suites_delta}, + duration={'duration': duration, 'delta': duration_delta}, + tests={'number': tests, 'delta': tests_delta}, tests_succ={'number': tests_succ, 'delta': tests_succ_delta}, tests_skip={'number': tests_skip, 'delta': tests_skip_delta}, tests_fail={'number': tests_fail, 'delta': tests_fail_delta}, tests_error={'number': tests_error, 'delta': tests_error_delta}, + runs={'number': runs, 'delta': runs_delta}, runs_succ={'number': runs_succ, 'delta': runs_succ_delta}, runs_skip={'number': runs_skip, 'delta': runs_skip_delta}, runs_fail={'number': runs_fail, 'delta': runs_fail_delta}, runs_error={'number': runs_error, 'delta': runs_error_delta}, + commit='commit', + reference_type='type', reference_commit='ref' + ) + + +class TestUnitTestResults(unittest.TestCase): + details = [UnitTestSuite('suite', 7, 3, 2, 1, 'std-out', 'std-err')] + + def test_parse_error_from_xml_parse_error(self): + error = XmlParseError('xml parse error') + error.code = 123 + error.position = (1, 2) + actual = ParseError.from_exception('file', error) + expected = ParseError('file', 'xml parse error', 1, 2, exception=error) + self.assertEqual(expected, actual) + + def test_parse_error_from_file_not_found(self): + error = FileNotFoundError(2, 'No such file or directory') + error.filename = 'some file path' + actual = ParseError.from_exception('file', error) + expected = ParseError('file', "[Errno 2] No such file or directory: 'some file path'", exception=error) + self.assertEqual(expected, actual) + + def test_parse_error_from_error(self): + error = ValueError('error') + actual = ParseError.from_exception('file', error) + expected = ParseError('file', 'error', exception=error) + self.assertEqual(expected, actual) + + def test_parse_error_with_exception(self): + error = ValueError('error') + actual = ParseError.from_exception('file', error) + expected = ParseError('file', 'error', exception=None) + self.assertEqual(expected, actual.without_exception()) + + def test_parsed_unit_test_results_with_commit(self): + self.assertEqual( + ParsedUnitTestResultsWithCommit( + files=1, + errors=errors, + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, suite_details=self.details, + cases=[ + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test2', result='skipped', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=2), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test3', result='failure', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=3), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test1', result='error', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=4), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test2', result='skipped', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=5), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test3', result='failure', message='message6', content='content6', stdout='stdout6', stderr='stderr6', time=6), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test4', result='failure', message='message7', content='content7', stdout='stdout7', stderr='stderr7', time=7), + ], + commit='commit sha' + ), + ParsedUnitTestResults( + files=1, + errors=errors, + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, suite_details=self.details, + cases=[ + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test2', result='skipped', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=2), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test3', result='failure', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=3), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test1', result='error', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=4), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test2', result='skipped', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=5), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test3', result='failure', message='message6', content='content6', stdout='stdout6', stderr='stderr6', time=6), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test4', result='failure', message='message7', content='content7', stdout='stdout7', stderr='stderr7', time=7), + ] + ).with_commit('commit sha') + ) + + def test_unit_test_run_results_without_exception(self): + results = create_unit_test_run_results(errors=errors) + self.assertEqual(create_unit_test_run_results(errors=[error.without_exception() for error in errors]), + results.without_exceptions()) + + def test_unit_test_run_results_without_suite_details(self): + suite = UnitTestSuite('suite', 7, 3, 2, 1, 'stdout', 'stderr') + results = create_unit_test_run_results(suite_details=[suite]) + self.assertEqual(create_unit_test_run_results(suite_details=None), + results.without_suite_details()) + + def test_unit_test_run_delta_results_without_exception(self): + results = create_unit_test_run_delta_results(errors=errors) + self.assertEqual(create_unit_test_run_delta_results(errors=[error.without_exception() for error in errors]), + results.without_exceptions()) + + def test_unit_test_run_results_to_dict(self): + actual = UnitTestRunResults( + files=1, errors=errors, suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + ).to_dict() + expected = dict( + files=1, errors=errors_dict, suites=2, duration=3, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + ) + self.assertEqual(expected, actual) + + # results from dicts usually do not contain errors + def test_unit_test_run_results_from_dict(self): + actual = UnitTestRunResults.from_dict(dict( + files=1, errors=errors_dict, suites=2, duration=3, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + )) + expected = UnitTestRunResults( + files=1, errors=errors_dict, suites=2, duration=3, suite_details=None, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + ) + self.assertEqual(expected, actual) + + def test_unit_test_run_results_from_dict_without_errors(self): + actual = UnitTestRunResults.from_dict(dict( + files=1, suites=2, duration=3, suite_details=self.details, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + )) + expected = UnitTestRunResults( + files=1, errors=[], suites=2, duration=3, suite_details=None, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13, + commit='commit' + ) + self.assertEqual(expected, actual) + + def test_get_test_results_with_empty_cases(self): + self.assertEqual(get_test_results(ParsedUnitTestResultsWithCommit( + files=0, + errors=[], + suites=0, suite_tests=0, suite_skipped=0, suite_failures=0, suite_errors=0, suite_time=0, suite_details=self.details, + cases=[], + commit='commit' + ), False), UnitTestResults( + files=0, + errors=[], + suites=0, suite_tests=0, suite_skipped=0, suite_failures=0, suite_errors=0, suite_time=0, suite_details=self.details, + cases=0, cases_skipped=0, cases_failures=0, cases_errors=0, cases_time=0, case_results=create_unit_test_case_results(), + tests=0, tests_skipped=0, tests_failures=0, tests_errors=0, + commit='commit' + )) + + def test_get_test_results(self): + self.assertEqual(get_test_results(ParsedUnitTestResultsWithCommit( + files=1, + errors=errors, + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, suite_details=self.details, + cases=[ + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test2', result='skipped', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=2), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test3', result='failure', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=3), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test1', result='error', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=4), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test2', result='skipped', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=5), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test3', result='failure', message='message6', content='content6', stdout='stdout6', stderr='stderr6', time=6), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test4', result='failure', message='message7', content='content7', stdout='stdout7', stderr='stderr7', time=7), + ], + commit='commit' + ), False), UnitTestResults( + files=1, + errors=errors, + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, suite_details=self.details, + cases=7, cases_skipped=2, cases_failures=3, cases_errors=1, cases_time=28, + case_results=create_unit_test_case_results({ + (None, 'class1', 'test1'): dict(success=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1)]), + (None, 'class1', 'test2'): dict(skipped=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test2', result='skipped', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=2)]), + (None, 'class1', 'test3'): dict(failure=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test3', result='failure', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=3)]), + (None, 'class2', 'test1'): dict(error=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test1', result='error', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=4)]), + (None, 'class2', 'test2'): dict(skipped=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test2', result='skipped', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=5)]), + (None, 'class2', 'test3'): dict(failure=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test3', result='failure', message='message6', content='content6', stdout='stdout6', stderr='stderr6', time=6)]), + (None, 'class2', 'test4'): dict(failure=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test4', result='failure', message='message7', content='content7', stdout='stdout7', stderr='stderr7', time=7)]), + }), + tests=7, tests_skipped=2, tests_failures=3, tests_errors=1, + commit='commit' + )) + + def test_get_test_results_with_multiple_runs(self): + self.assertEqual(get_test_results(ParsedUnitTestResultsWithCommit( + files=1, + errors=[], + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, suite_details=self.details, + cases=[ + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test1', result='success', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=2), + + # success state has precedence over skipped + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test2', result='success', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=3), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test2', result='skipped', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=4), + + # only when all runs are skipped, test has state skipped + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test3', result='skipped', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=5), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test3', result='skipped', message='message6', content='content6', stdout='stdout6', stderr='stderr6', time=6), + + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test4', result='success', message='message7', content='content7', stdout='stdout7', stderr='stderr7', time=7), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test4', result='failure', message='message8', content='content8', stdout='stdout8', stderr='stderr8', time=8), + + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test5', result='success', message='message9', content='content9', stdout='stdout9', stderr='stderr9', time=9), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test5', result='error', message='message10', content='content10', stdout='stdout10', stderr='stderr10', time=10), + ], + commit='commit' + ), False), UnitTestResults( + files=1, + errors=[], + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, suite_details=self.details, + cases=10, cases_skipped=3, cases_failures=1, cases_errors=1, cases_time=55, + case_results=create_unit_test_case_results({ + (None, 'class1', 'test1'): dict(success=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1), UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test1', result='success', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=2)]), + (None, 'class1', 'test2'): dict(success=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test2', result='success', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=3)], skipped=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test2', result='skipped', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=4)]), + (None, 'class1', 'test3'): dict(skipped=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test3', result='skipped', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=5), UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test3', result='skipped', message='message6', content='content6', stdout='stdout6', stderr='stderr6', time=6)]), + (None, 'class1', 'test4'): dict(success=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test4', result='success', message='message7', content='content7', stdout='stdout7', stderr='stderr7', time=7)], failure=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test4', result='failure', message='message8', content='content8', stdout='stdout8', stderr='stderr8', time=8)]), + (None, 'class1', 'test5'): dict(success=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test5', result='success', message='message9', content='content9', stdout='stdout9', stderr='stderr9', time=9)], error=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test5', result='error', message='message10', content='content10', stdout='stdout10', stderr='stderr10', time=10)]), + }), + tests=5, tests_skipped=1, tests_failures=1, tests_errors=1, + commit='commit' + )) + + def test_get_test_results_with_duplicate_class_names(self): + with_duplicates = ParsedUnitTestResultsWithCommit( + files=1, + errors=[], + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, suite_details=self.details, + cases=[ + UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1), + UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test1', result='success', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=2), + + # success state has precedence over skipped + UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test2', result='success', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=3), + UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test2', result='skipped', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=4), + + # only when all runs are skipped, test has state skipped + UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test3', result='skipped', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=5), + UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test3', result='skipped', message='message6', content='content6', stdout='stdout6', stderr='stderr6', time=6), + + UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test4', result='success', message='message7', content='content7', stdout='stdout7', stderr='stderr7', time=7), + UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test4', result='failure', message='message8', content='content8', stdout='stdout8', stderr='stderr8', time=8), + + UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test5', result='success', message='message9', content='content9', stdout='stdout9', stderr='stderr9', time=9), + UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test5', result='error', message='message10', content='content10', stdout='stdout10', stderr='stderr10', time=10), + ], + commit='commit' + ) + + self.assertEqual(get_test_results(with_duplicates, False), UnitTestResults( + files=1, + errors=[], + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, suite_details=self.details, + cases=10, cases_skipped=3, cases_failures=1, cases_errors=1, cases_time=55, + case_results=create_unit_test_case_results({ + (None, 'class1', 'test1'): dict(success=[UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1), UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test1', result='success', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=2)]), + (None, 'class1', 'test2'): dict(success=[UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test2', result='success', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=3)], skipped=[UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test2', result='skipped', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=4)]), + (None, 'class1', 'test3'): dict(skipped=[UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test3', result='skipped', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=5), UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test3', result='skipped', message='message6', content='content6', stdout='stdout6', stderr='stderr6', time=6)]), + (None, 'class1', 'test4'): dict(success=[UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test4', result='success', message='message7', content='content7', stdout='stdout7', stderr='stderr7', time=7)], failure=[UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test4', result='failure', message='message8', content='content8', stdout='stdout8', stderr='stderr8', time=8)]), + (None, 'class1', 'test5'): dict(success=[UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test5', result='success', message='message9', content='content9', stdout='stdout9', stderr='stderr9', time=9)], error=[UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test5', result='error', message='message10', content='content10', stdout='stdout10', stderr='stderr10', time=10)]), + }), + tests=5, tests_skipped=1, tests_failures=1, tests_errors=1, + commit='commit' + )) + + self.assertEqual(get_test_results(with_duplicates, True), UnitTestResults( + files=1, + errors=[], + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, suite_details=self.details, + cases=10, cases_skipped=3, cases_failures=1, cases_errors=1, cases_time=55, + case_results=create_unit_test_case_results({ + ('test1', 'class1', 'test1'): dict(success=[UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1)]), + ('test2', 'class1', 'test1'): dict(success=[UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test1', result='success', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=2)]), + ('test1', 'class1', 'test2'): dict(success=[UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test2', result='success', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=3)]), + ('test2', 'class1', 'test2'): dict(skipped=[UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test2', result='skipped', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=4)]), + ('test1', 'class1', 'test3'): dict(skipped=[UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test3', result='skipped', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=5)]), + ('test2', 'class1', 'test3'): dict(skipped=[UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test3', result='skipped', message='message6', content='content6', stdout='stdout6', stderr='stderr6', time=6)]), + ('test1', 'class1', 'test4'): dict(success=[UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test4', result='success', message='message7', content='content7', stdout='stdout7', stderr='stderr7', time=7)]), + ('test2', 'class1', 'test4'): dict(failure=[UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test4', result='failure', message='message8', content='content8', stdout='stdout8', stderr='stderr8', time=8)]), + ('test1', 'class1', 'test5'): dict(success=[UnitTestCase(result_file='result', test_file='test1', line=123, class_name='class1', test_name='test5', result='success', message='message9', content='content9', stdout='stdout9', stderr='stderr9', time=9)]), + ('test2', 'class1', 'test5'): dict(error=[UnitTestCase(result_file='result', test_file='test2', line=123, class_name='class1', test_name='test5', result='error', message='message10', content='content10', stdout='stdout10', stderr='stderr10', time=10)]), + }), + tests=10, tests_skipped=3, tests_failures=1, tests_errors=1, + commit='commit' + )) + + def test_get_test_results_with_some_nones(self): + self.assertEqual(get_test_results(ParsedUnitTestResultsWithCommit( + files=1, + errors=[], + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, suite_details=self.details, + cases=[ + UnitTestCase(result_file='result', test_file=None, line=None, class_name='class', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1), + UnitTestCase(result_file='result', test_file=None, line=None, class_name='class', test_name='test1', result='skipped', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=None), + UnitTestCase(result_file='result', test_file=None, line=None, class_name='class', test_name='test2', result='failure', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=2), + UnitTestCase(result_file='result', test_file=None, line=None, class_name='class', test_name='test2', result='skipped', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=None), + ], + commit='commit' + ), False), UnitTestResults( + files=1, + errors=[], + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, suite_details=self.details, + cases=4, cases_skipped=2, cases_failures=1, cases_errors=0, cases_time=3, + case_results=create_unit_test_case_results({ + (None, 'class', 'test1'): dict(success=[UnitTestCase(result_file='result', test_file=None, line=None, class_name='class', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1)], skipped=[UnitTestCase(result_file='result', test_file=None, line=None, class_name='class', test_name='test1', result='skipped', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=None)]), + (None, 'class', 'test2'): dict(failure=[UnitTestCase(result_file='result', test_file=None, line=None, class_name='class', test_name='test2', result='failure', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=2)], skipped=[UnitTestCase(result_file='result', test_file=None, line=None, class_name='class', test_name='test2', result='skipped', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=None)]), + }), + tests=2, tests_skipped=0, tests_failures=1, tests_errors=0, + commit='commit' + )) + + def test_get_test_results_with_disabled_cases(self): + self.assertEqual(get_test_results(ParsedUnitTestResultsWithCommit( + files=1, + errors=errors, + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, suite_details=self.details, + cases=[ + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test2', result='skipped', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=2), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test3', result='failure', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=3), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test1', result='error', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=4), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test2', result='disabled', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=5), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test3', result='failure', message='message6', content='content6', stdout='stdout6', stderr='stderr6', time=6), + UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test4', result='failure', message='message7', content='content7', stdout='stdout7', stderr='stderr7', time=7), + ], + commit='commit' + ), False), UnitTestResults( + files=1, + errors=errors, + suites=2, suite_tests=3, suite_skipped=4, suite_failures=5, suite_errors=6, suite_time=7, suite_details=self.details, + cases=7, cases_skipped=2, cases_failures=3, cases_errors=1, cases_time=28, + case_results=create_unit_test_case_results({ + (None, 'class1', 'test1'): dict(success=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test1', result='success', message='message1', content='content1', stdout='stdout1', stderr='stderr1', time=1)]), + (None, 'class1', 'test2'): dict(skipped=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test2', result='skipped', message='message2', content='content2', stdout='stdout2', stderr='stderr2', time=2)]), + (None, 'class1', 'test3'): dict(failure=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class1', test_name='test3', result='failure', message='message3', content='content3', stdout='stdout3', stderr='stderr3', time=3)]), + (None, 'class2', 'test1'): dict(error=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test1', result='error', message='message4', content='content4', stdout='stdout4', stderr='stderr4', time=4)]), + (None, 'class2', 'test2'): dict(skipped=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test2', result='disabled', message='message5', content='content5', stdout='stdout5', stderr='stderr5', time=5)]), + (None, 'class2', 'test3'): dict(failure=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test3', result='failure', message='message6', content='content6', stdout='stdout6', stderr='stderr6', time=6)]), + (None, 'class2', 'test4'): dict(failure=[UnitTestCase(result_file='result', test_file='test', line=123, class_name='class2', test_name='test4', result='failure', message='message7', content='content7', stdout='stdout7', stderr='stderr7', time=7)]), + }), + tests=7, tests_skipped=2, tests_failures=3, tests_errors=1, + commit='commit' + )) + + def test_get_stats(self): + self.assertEqual(get_stats(UnitTestResults( + files=1, + errors=errors, + + suites=2, + suite_tests=20, + suite_skipped=5, + suite_failures=6, + suite_errors=7, + suite_time=3, + suite_details=self.details, + + cases=40, + cases_skipped=11, + cases_failures=12, + cases_errors=13, + cases_time=4, + case_results=create_unit_test_case_results(), + + tests=30, + tests_skipped=8, + tests_failures=9, + tests_errors=10, + + commit='commit' + )), UnitTestRunResults( + files=1, + errors=errors, + suites=2, + duration=3, + + suite_details=self.details, + + tests=30, + tests_succ=3, + tests_skip=8, + tests_fail=9, + tests_error=10, + + runs=20, + runs_succ=2, + runs_skip=5, + runs_fail=6, + runs_error=7, + + commit='commit' + )) + + def test_get_stats_delta(self): + self.assertEqual(get_stats_delta(UnitTestRunResults( + files=1, + errors=errors, + suites=2, + duration=3, + suite_details=self.details, + + tests=20, + tests_succ=2, + tests_skip=5, + tests_fail=6, + tests_error=7, + + runs=40, + runs_succ=12, + runs_skip=8, + runs_fail=9, + runs_error=10, + + commit='commit' + ), UnitTestRunResults( + files=3, + errors=[ParseError('other file', 'other error')], + suites=5, + duration=7, + + suite_details=self.details, + + tests=41, + tests_succ=5, + tests_skip=11, + tests_fail=13, + tests_error=15, + + runs=81, + runs_succ=25, + runs_skip=17, + runs_fail=19, + runs_error=21, + + commit='ref' + ), 'type'), UnitTestRunDeltaResults( + files=n(1, -2), + errors=errors, + suites=n(2, -3), + duration=d(3, -4), + + tests=n(20, -21), + tests_succ=n(2, -3), + tests_skip=n(5, -6), + tests_fail=n(6, -7), + tests_error=n(7, -8), + + runs=n(40, -41), + runs_succ=n(12, -13), + runs_skip=n(8, -9), + runs_fail=n(9, -10), + runs_error=n(10, -11), + + commit='commit', + reference_commit='ref', + reference_type='type' + )) + + def test_unit_test_run_results_is_different(self): + stats = create_unit_test_run_results() + create_other = create_unit_test_run_results + for diff, other, expected in [('nothing', create_other(), False), + ('files', create_other(files=stats.files+1), True), + ('errors', create_other(errors=errors), False), + ('suites', create_other(suites=stats.suites+1), True), + ('duration', create_other(duration=stats.duration+1), False), + ('tests', create_other(tests=stats.tests+1), True), + ('test success', create_other(tests_succ=stats.tests_succ+1), True), + ('test skips', create_other(tests_skip=stats.tests_skip+1), True), + ('test failures', create_other(tests_fail=stats.tests_fail+1), True), + ('test errors', create_other(tests_error=stats.tests_error+1), True), + ('runs', create_other(runs=stats.runs+1), True), + ('runs success', create_other(runs_succ=stats.runs_succ+1), True), + ('runs skips', create_other(runs_skip=stats.runs_skip+1), True), + ('runs failures', create_other(runs_fail=stats.runs_fail+1), True), + ('runs errors', create_other(runs_error=stats.runs_error+1), True), + ('commit', create_other(commit='other'), False)]: + with self.subTest(different_in=diff): + self.assertEqual(expected, stats.is_different(other), msg=diff) + + def test_unit_test_run_results_is_different_in_failures(self): + stats = create_unit_test_run_results() + create_other = create_unit_test_run_results + for diff, other, expected in [('nothing', create_other(), False), + ('files', create_other(files=stats.files+1), False), + ('errors', create_other(errors=errors), False), + ('suites', create_other(suites=stats.suites+1), False), + ('duration', create_other(duration=stats.duration+1), False), + ('tests', create_other(tests=stats.tests+1), False), + ('test success', create_other(tests_succ=stats.tests_succ+1), False), + ('test skips', create_other(tests_skip=stats.tests_skip+1), False), + ('test failures', create_other(tests_fail=stats.tests_fail+1), True), + ('test errors', create_other(tests_error=stats.tests_error+1), False), + ('runs', create_other(runs=stats.runs+1), False), + ('runs success', create_other(runs_succ=stats.runs_succ+1), False), + ('runs skips', create_other(runs_skip=stats.runs_skip+1), False), + ('runs failures', create_other(runs_fail=stats.runs_fail+1), True), + ('runs errors', create_other(runs_error=stats.runs_error+1), False), + ('commit', create_other(commit='other'), False)]: + with self.subTest(different_in=diff): + self.assertEqual(expected, stats.is_different_in_failures(other), msg=diff) + + def test_unit_test_run_results_is_different_in_errors(self): + stats = create_unit_test_run_results() + create_other = create_unit_test_run_results + for diff, other, expected in [('nothing', create_other(), False), + ('files', create_other(files=stats.files+1), False), + ('errors', create_other(errors=errors), False), + ('suites', create_other(suites=stats.suites+1), False), + ('duration', create_other(duration=stats.duration+1), False), + ('tests', create_other(tests=stats.tests+1), False), + ('test success', create_other(tests_succ=stats.tests_succ+1), False), + ('test skips', create_other(tests_skip=stats.tests_skip+1), False), + ('test failures', create_other(tests_fail=stats.tests_fail+1), False), + ('test errors', create_other(tests_error=stats.tests_error+1), True), + ('runs', create_other(runs=stats.runs+1), False), + ('runs success', create_other(runs_succ=stats.runs_succ+1), False), + ('runs skips', create_other(runs_skip=stats.runs_skip+1), False), + ('runs failures', create_other(runs_fail=stats.runs_fail+1), False), + ('runs errors', create_other(runs_error=stats.runs_error+1), True), + ('commit', create_other(commit='other'), False)]: + with self.subTest(different_in=diff): + self.assertEqual(expected, stats.is_different_in_errors(other), msg=diff) + + def test_unit_test_run_results_has_failures(self): + def create_stats(errors=[], tests_fail=0, tests_error=0, runs_fail=0, runs_error=0) -> UnitTestRunResults: + return create_unit_test_run_results(errors=errors, tests_fail=tests_fail, tests_error=tests_error, runs_fail=runs_fail, runs_error=runs_error) + + for label, stats, expected in [('no failures', create_stats(), False), + ('errors', create_stats(errors=errors), False), + ('test failures', create_stats(tests_fail=1), True), + ('test errors', create_stats(tests_error=1), False), + ('runs failures', create_stats(runs_fail=1), True), + ('runs errors', create_stats(runs_error=1), False)]: + with self.subTest(msg=label): + self.assertEqual(stats.has_failures, expected, msg=label) + + def test_unit_test_run_results_has_errors(self): + def create_stats(errors=[], tests_fail=0, tests_error=0, runs_fail=0, runs_error=0) -> UnitTestRunResults: + return create_unit_test_run_results(errors=errors, tests_fail=tests_fail, tests_error=tests_error, runs_fail=runs_fail, runs_error=runs_error) + + for label, stats, expected in [('no errors', create_stats(), False), + ('errors', create_stats(errors=errors), True), + ('test failures', create_stats(tests_fail=1), False), + ('test errors', create_stats(tests_error=1), True), + ('runs failures', create_stats(runs_fail=1), False), + ('runs errors', create_stats(runs_error=1), True)]: + with self.subTest(msg=label): + self.assertEqual(stats.has_errors, expected, msg=label) + + def test_unit_test_run_delta_results_has_changes(self): + def create_stats_with_delta(files_delta=0, + suites_delta=0, + duration_delta=0, + tests_delta=0, + tests_succ_delta=0, + tests_skip_delta=0, + tests_fail_delta=0, + tests_error_delta=0, + runs_delta=0, + runs_succ_delta=0, + runs_skip_delta=0, + runs_fail_delta=0, + runs_error_delta=0) -> UnitTestRunDeltaResults: + return create_unit_test_run_delta_results(files_delta=files_delta, suites_delta=suites_delta, duration_delta=duration_delta, + tests_delta=tests_delta, tests_succ_delta=tests_succ_delta, tests_skip_delta=tests_skip_delta, tests_fail_delta=tests_fail_delta, tests_error_delta=tests_error_delta, + runs_delta=runs_delta, runs_succ_delta=runs_succ_delta, runs_skip_delta=runs_skip_delta, runs_fail_delta=runs_fail_delta, runs_error_delta=runs_error_delta) + + for label, stats, expected in [('no deltas', create_stats_with_delta(), False), + ('files', create_stats_with_delta(files_delta=1), True), + ('suites', create_stats_with_delta(suites_delta=1), True), + ('duration', create_stats_with_delta(duration_delta=1), False), + ('tests', create_stats_with_delta(tests_delta=1), True), + ('tests succ', create_stats_with_delta(tests_succ_delta=1), True), + ('tests skip', create_stats_with_delta(tests_skip_delta=1), True), + ('tests fail', create_stats_with_delta(tests_fail_delta=1), True), + ('tests error', create_stats_with_delta(tests_error_delta=1), True), + ('runs', create_stats_with_delta(runs_delta=1), True), + ('runs succ', create_stats_with_delta(runs_succ_delta=1), True), + ('runs skip', create_stats_with_delta(runs_skip_delta=1), True), + ('runs fail', create_stats_with_delta(runs_fail_delta=1), True), + ('runs error', create_stats_with_delta(runs_error_delta=1), True)]: + with self.subTest(msg=label): + self.assertEqual(stats.has_changes, expected, msg=label) + + def unit_test_run_delta_results_has_failures(self): + def create_delta_stats(errors=[], tests_fail=0, tests_error=0, runs_fail=0, runs_error=0) -> UnitTestRunDeltaResults: + return create_unit_test_run_delta_results(errors=errors, tests_fail=tests_fail, tests_error=tests_error, runs_fail=runs_fail, runs_error=runs_error) + + for label, stats, expected in [('no failures', create_delta_stats(), False), + ('errors', create_delta_stats(errors=errors), False), + ('test failures', create_delta_stats(tests_fail=1), True), + ('test errors', create_delta_stats(tests_error=1), False), + ('runs failures', create_delta_stats(runs_fail=1), True), + ('runs errors', create_delta_stats(runs_error=1), False)]: + with self.subTest(msg=label): + self.assertEqual(stats.has_failures, expected, msg=label) + + def test_test_run_delta_results_has_errors(self): + def create_delta_stats(errors=[], tests_fail=0, tests_error=0, runs_fail=0, runs_error=0) -> UnitTestRunDeltaResults: + return create_unit_test_run_delta_results(errors=errors, tests_fail=tests_fail, tests_error=tests_error, runs_fail=runs_fail, runs_error=runs_error) + + for label, stats, expected in [('no errors', create_delta_stats(), False), + ('errors', create_delta_stats(errors=errors), True), + ('test failures', create_delta_stats(tests_fail=1), False), + ('test errors', create_delta_stats(tests_error=1), True), + ('runs failures', create_delta_stats(runs_fail=1), False), + ('runs errors', create_delta_stats(runs_error=1), True)]: + with self.subTest(msg=label): + self.assertEqual(stats.has_errors, expected, msg=label) + + def test_test_run_delta_results_without_delta(self): + with_deltas = create_unit_test_run_delta_results(files=1, errors=errors, suites=2, duration=3, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13) + without_deltas = with_deltas.without_delta() + expected = create_unit_test_run_results(files=1, errors=errors, suites=2, duration=3, + tests=4, tests_succ=5, tests_skip=6, tests_fail=7, tests_error=8, + runs=9, runs_succ=10, runs_skip=11, runs_fail=12, runs_error=13) + self.assertEqual(expected, without_deltas) diff --git a/python/test/test_utils.py b/python/test/test_utils.py new file mode 100644 index 0000000..b6d29f0 --- /dev/null +++ b/python/test/test_utils.py @@ -0,0 +1,72 @@ +# Copyright 2020 G-Research +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import locale +import os +from contextlib import contextmanager +from typing import Any, Optional + + +def n(number, delta=None): + if delta is None: + return dict(number=number) + return dict(number=number, delta=delta) + + +def d(duration, delta=None): + if delta is None: + return dict(duration=duration) + return dict(duration=duration, delta=delta) + + +@contextmanager +def temp_locale(encoding: Optional[str]) -> Any: + if encoding is None: + res = yield + return res + + old_locale = locale.setlocale(locale.LC_ALL) + encodings = [ + f'{encoding}.utf8', f'{encoding}.utf-8', + f'{encoding}.UTF8', f'{encoding}.UTF-8', + encoding + ] + + locale_set = False + for encoding in encodings: + try: + locale.setlocale(locale.LC_ALL, encoding) + locale_set = True + break + except: + pass + + if not locale_set: + raise ValueError(f'Could not set any of these locale: {", ".join(encodings)}') + + try: + res = yield + finally: + locale.setlocale(locale.LC_ALL, old_locale) + return res + + +@contextmanager +def chdir(path: str): + cwd = os.getcwd() + os.chdir(path) + try: + yield + finally: + os.chdir(cwd) diff --git a/python/test/test_xunit.py b/python/test/test_xunit.py new file mode 100644 index 0000000..b37bebf --- /dev/null +++ b/python/test/test_xunit.py @@ -0,0 +1,42 @@ +import pathlib +import sys +import unittest +from glob import glob +from typing import List + +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent)) +sys.path.append(str(pathlib.Path(__file__).resolve().parent.parent.parent)) + +from publish.junit import JUnitTreeOrParseError +from publish.xunit import parse_xunit_files, is_xunit +from test_junit import JUnitXmlParseTest + + +test_files_path = pathlib.Path(__file__).resolve().parent / 'files' / 'xunit' + + +class TestXunit(unittest.TestCase, JUnitXmlParseTest): + maxDiff = None + + @property + def test(self): + return self + + def is_supported(self, path: str) -> bool: + return is_xunit(path) + + @staticmethod + def _test_files_path(): + return test_files_path + + @staticmethod + def get_test_files() -> List[str]: + return glob(str(test_files_path / '**' / '*.xml'), recursive=True) + + @staticmethod + def parse_file(filename) -> JUnitTreeOrParseError: + return list(parse_xunit_files([filename], False))[0][1] + + +if __name__ == "__main__": + TestXunit.update_expectations()