Skip to content

Commit

Permalink
Standard scripts copied from generic-track repo
Browse files Browse the repository at this point in the history
  • Loading branch information
colinleach committed Sep 14, 2024
1 parent a0ae8a1 commit 4de209d
Show file tree
Hide file tree
Showing 4 changed files with 301 additions and 0 deletions.
84 changes: 84 additions & 0 deletions bin/add-practice-exercise
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
#!/usr/bin/env bash

# Synopsis:
# Scaffold the files for a new practice exercise.
# After creating the exercise, follow the instructions in the output.

# Example:
# bin/add-practice-exercise two-fer

# Example with difficulty:
# bin/add-practice-exercise -d 5 two-fer

# Example with author and difficulty:
# bin/add-practice-exercise -a foo -d 3 two-fer

set -euo pipefail
scriptname=$0

help_and_exit() {
echo >&2 "Scaffold the files for a new practice exercise."
echo >&2 "Usage: ${scriptname} [-h] [-a author] [-d difficulty] <exercise-slug>"
echo >&2 "Where: author is the GitHub username of the exercise creator."
echo >&2 "Where: difficulty is between 1 (easiest) to 10 (hardest)."
exit 1
}

die() { echo >&2 "$*"; exit 1; }

required_tool() {
command -v "${1}" >/dev/null 2>&1 ||
die "${1} is required but not installed. Please install it and make sure it's in your PATH."
}

require_files_template() {
jq -e --arg key "${1}" '.files[$key] | length > 0' config.json > /dev/null ||
die "The '.files.${1}' array in the 'config.json' file is empty. Please add at least one file. See https://exercism.org/docs/building/tracks/config-json#h-files for more information."
}

required_tool jq

require_files_template "solution"
require_files_template "test"
require_files_template "example"

[[ -f ./bin/fetch-configlet ]] || die "Run this script from the repo's root directory."

author=''
difficulty='1'
while getopts :ha:d: opt; do
case $opt in
h) help_and_exit ;;
a) author=$OPTARG ;;
d) difficulty=$OPTARG ;;
?) echo >&2 "Unknown option: -$OPTARG"; help_and_exit ;;
esac
done
shift "$((OPTIND - 1))"

(( $# >= 1 )) || help_and_exit

slug="${1}"

if [[ -z "${author}" ]]; then
read -rp 'Your GitHub username: ' author
fi

./bin/fetch-configlet
./bin/configlet create --practice-exercise "${slug}" --author "${author}" --difficulty "${difficulty}"

exercise_dir="exercises/practice/${slug}"
files=$(jq -r --arg dir "${exercise_dir}" '.files | to_entries | map({key: .key, value: (.value | map("'"'"'" + $dir + "/" + . + "'"'"'") | join(" and "))}) | from_entries' "${exercise_dir}/.meta/config.json")

cat << NEXT_STEPS
Your next steps are:
- Create the test suite in $(jq -r '.test' <<< "${files}")
- The tests should be based on the canonical data at 'https://github.com/exercism/problem-specifications/blob/main/exercises/${slug}/canonical-data.json'
- Any test cases you don't implement, mark them in 'exercises/practice/${slug}/.meta/tests.toml' with "include = false"
- Create the example solution in $(jq -r '.example' <<< "${files}")
- Verify the example solution passes the tests by running 'bin/verify-exercises ${slug}'
- Create the stub solution in $(jq -r '.solution' <<< "${files}")
- Update the 'difficulty' value for the exercise's entry in the 'config.json' file in the repo's root
- Validate CI using 'bin/configlet lint' and 'bin/configlet fmt'
NEXT_STEPS
31 changes: 31 additions & 0 deletions bin/fetch-configlet.ps1
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# This file is a copy of the
# https://github.com/exercism/configlet/blob/main/scripts/fetch-configlet.ps1 file.
# Please submit bugfixes/improvements to the above file to ensure that all tracks
# benefit from the changes.

$ErrorActionPreference = "Stop"
$ProgressPreference = "SilentlyContinue"

$requestOpts = @{
Headers = If ($env:GITHUB_TOKEN) { @{ Authorization = "Bearer ${env:GITHUB_TOKEN}" } } Else { @{ } }
MaximumRetryCount = 3
RetryIntervalSec = 1
}

$arch = If ([Environment]::Is64BitOperatingSystem) { "x86-64" } Else { "i386" }
$fileName = "configlet_.+_windows_$arch.zip"

Function Get-DownloadUrl {
$latestUrl = "https://api.github.com/repos/exercism/configlet/releases/latest"
Invoke-RestMethod -Uri $latestUrl -PreserveAuthorizationOnRedirect @requestOpts
| Select-Object -ExpandProperty assets
| Where-Object { $_.browser_download_url -match $FileName }
| Select-Object -ExpandProperty browser_download_url
}

$downloadUrl = Get-DownloadUrl
$outputDirectory = "bin"
$outputFile = Join-Path -Path $outputDirectory -ChildPath $fileName
Invoke-WebRequest -Uri $downloadUrl -OutFile $outputFile @requestOpts
Expand-Archive $outputFile -DestinationPath $outputDirectory -Force
Remove-Item -Path $outputFile
94 changes: 94 additions & 0 deletions bin/verify-exercises
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
#!/usr/bin/env bash

# Synopsis:
# Verify that each exercise's example/exemplar solution passes the tests.
# You can either verify all exercises or a single exercise.

# Example: verify all exercises
# bin/verify-exercises

# Example: verify single exercise
# bin/verify-exercises two-fer

set -eo pipefail

die() { echo "$*" >&2; exit 1; }

required_tool() {
command -v "${1}" >/dev/null 2>&1 ||
die "${1} is required but not installed. Please install it and make sure it's in your PATH."
}

required_tool jq

copy_example_or_examplar_to_solution() {
jq -c '[.files.solution, .files.exemplar // .files.example] | transpose | map({src: .[1], dst: .[0]}) | .[]' .meta/config.json \
| while read -r src_and_dst; do
cp "$(jq -r '.src' <<< "${src_and_dst}")" "$(jq -r '.dst' <<< "${src_and_dst}")"
done
}

unskip_tests() {
# shellcheck disable=SC2034
jq -r '.files.test[]' .meta/config.json | while read -r test_file; do
noop # TODO: replace this with the command to unskip the tests.
# Note: this function runs from within an exercise directory.
# Note: the exercise directory is a temporary directory, so feel
# free to modify its (test) files as needed.
# Note: ignore this function if either:
# - skipping tests is not supported, or
# - skipping tests does not require modifying the test files.
# Example: sed -i 's/test.skip/test/g' "${test_file}"
done
}

run_tests() {
noop # TODO: replace this with the command to run the tests for the exercise.
# Note: this function runs from within an exercise directory.
# Note: the exercise directory is a temporary directory, so feel
# free to modify its files as needed.
# Note: return a zero exit code if all tests pass, otherwise non-zero.
# Example: `npm test`
# Example: `python3 -m pytest two_fer_test.py`
}

verify_exercise() {
local dir
local slug
local tmp_dir

dir=$(realpath "${1}")
slug=$(basename "${dir}")
tmp_dir=$(mktemp -d -t "exercism-verify-${slug}-XXXXX")

echo "Verifying ${slug} exercise..."

(
trap 'rm -rf "$tmp_dir"' EXIT # remove tempdir when subshell ends
cp -r "${dir}/." "${tmp_dir}"
cd "${tmp_dir}"

copy_example_or_examplar_to_solution
unskip_tests
run_tests
)
}

verify_exercises() {
local exercise_slug

exercise_slug="${1}"

shopt -s nullglob
count=0
for exercise_dir in ./exercises/{concept,practice}/${exercise_slug}/; do
if [[ -d "${exercise_dir}" ]]; then
verify_exercise "${exercise_dir}"
((++count))
fi
done
((count > 0)) || die 'no matching exercises found!'
}

exercise_slug="${1:-*}"
verify_exercises "${exercise_slug}"
92 changes: 92 additions & 0 deletions bin/verify-exercises-in-docker
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
#!/usr/bin/env bash

# Synopsis:
# Verify that each exercise's example/exemplar solution passes the tests
# using the track's test runner Docker image.
# You can either verify all exercises or a single exercise.

# Example: verify all exercises in Docker
# bin/verify-exercises-in-docker

# Example: verify single exercise in Docker
# bin/verify-exercises-in-docker two-fer

set -eo pipefail

die() { echo "$*" >&2; exit 1; }

required_tool() {
command -v "${1}" >/dev/null 2>&1 ||
die "${1} is required but not installed. Please install it and make sure it's in your PATH."
}

required_tool docker

copy_example_or_examplar_to_solution() {
jq -c '[.files.solution, .files.exemplar // .files.example] | transpose | map({src: .[1], dst: .[0]}) | .[]' .meta/config.json \
| while read -r src_and_dst; do
cp "$(jq -r '.src' <<< "${src_and_dst}")" "$(jq -r '.dst' <<< "${src_and_dst}")"
done
}

pull_docker_image() {
# shellcheck disable=SC1083
docker pull exercism/{{SLUG}}-test-runner ||
die $'Could not find the `exercism/{{SLUG}}-test-runner` Docker image.\nCheck the test runner docs at https://exercism.org/docs/building/tooling/test-runners for more information.'
}

run_tests() {
local slug
slug="${1}"

# shellcheck disable=SC1083
docker run \
--rm \
--network none \
--read-only \
--mount type=bind,src="${PWD}",dst=/solution \
--mount type=bind,src="${PWD}",dst=/output \
--mount type=tmpfs,dst=/tmp \
exercism/{{SLUG}}-test-runner "${slug}" /solution /output
jq -e '.status == "pass"' "${PWD}/results.json" >/dev/null 2>&1
}

verify_exercise() {
local dir
local slug
local tmp_dir
dir=$(realpath "${1}")
slug=$(basename "${dir}")
tmp_dir=$(mktemp -d -t "exercism-verify-${slug}-XXXXX")

echo "Verifying ${slug} exercise..."

(
trap 'rm -rf "$tmp_dir"' EXIT # remove tempdir when subshell ends
cp -r "${dir}/." "${tmp_dir}"
cd "${tmp_dir}"

copy_example_or_examplar_to_solution
run_tests "${slug}"
)
}

verify_exercises() {
local exercise_slug
exercise_slug="${1}"

shopt -s nullglob
count=0
for exercise_dir in ./exercises/{concept,practice}/${exercise_slug}/; do
if [[ -d "${exercise_dir}" ]]; then
verify_exercise "${exercise_dir}"
((++count))
fi
done
((count > 0)) || die 'no matching exercises found!'
}

pull_docker_image

exercise_slug="${1:-*}"
verify_exercises "${exercise_slug}"

0 comments on commit 4de209d

Please sign in to comment.