diff --git a/.github/workflows/backend.yml b/.github/workflows/backend.yml index 189bf44b6f..566dbc4f2a 100644 --- a/.github/workflows/backend.yml +++ b/.github/workflows/backend.yml @@ -35,15 +35,14 @@ jobs: github.com:443 md-hdd-t032zjxllntc.z26.blob.storage.azure.net:443 objects.githubusercontent.com:443 - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Checkout repository + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - name: Setup dotnet uses: actions/setup-dotnet@6bd8b7f7774af54e05809fcc5431931b3eb1ddee # v4.0.1 with: dotnet-version: ${{ matrix.dotnet }} - name: Install ffmpeg uses: FedericoCarboni/setup-ffmpeg@36c6454b5a2348e7794ba2d82a21506605921e3d # v3 - - # Coverage. - name: Run coverage tests run: dotnet test Backend.Tests/Backend.Tests.csproj shell: bash @@ -54,19 +53,16 @@ jobs: name: coverage path: Backend.Tests/coverage.cobertura.xml retention-days: 7 - - # Development build. - - run: dotnet build BackendFramework.sln - - # Release build. - - run: dotnet publish BackendFramework.sln - - # Fmt. - - run: dotnet format --verify-no-changes + - name: Development build + run: dotnet build BackendFramework.sln + - name: Release build + run: dotnet publish BackendFramework.sln + - name: Format check + run: dotnet format --verify-no-changes upload_coverage: needs: test_build - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: # See https://docs.stepsecurity.io/harden-runner/getting-started/ for instructions on # configuring harden-runner and identifying allowed endpoints. @@ -77,12 +73,10 @@ jobs: egress-policy: block allowed-endpoints: > api.github.com:443 - api.codecov.io:443 cli.codecov.io:443 - codecov.io:443 github.com:443 + ingest.codecov.io:443 storage.googleapis.com:443 - uploader.codecov.io:443 - name: Checkout repository uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - name: Download coverage artifact @@ -146,8 +140,8 @@ jobs: uses: github/codeql-action/analyze@d39d31e687223d841ef683f52467bd88e9b21c14 # v3.25.3 docker_build: + if: ${{ github.event.type }} == "PullRequest" runs-on: ubuntu-22.04 - # if: ${{ github.event.type }} == "PullRequest" steps: # See https://docs.stepsecurity.io/harden-runner/getting-started/ for instructions on # configuring harden-runner and identifying allowed endpoints. @@ -170,7 +164,8 @@ jobs: ts-crl.ws.symantec.com:80 # For subfolders, currently a full checkout is required. # See: https://github.com/marketplace/actions/build-and-push-docker-images#path-context - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Checkout repository + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: fetch-depth: 0 - name: Build backend diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 6449b30c20..3044c16821 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -13,10 +13,10 @@ name: "CodeQL" on: push: - branches: ["master"] + branches: [master] pull_request: # The branches below must be a subset of the branches above - branches: ["master"] + branches: [master] schedule: - cron: "21 8 * * 3" @@ -26,7 +26,7 @@ permissions: # added using https://github.com/step-security/secure-workflows jobs: analyze: name: Analyze - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: actions: read contents: read diff --git a/.github/workflows/combine_deploy_image.yml b/.github/workflows/combine_deploy_image.yml index c8f010de39..c897337838 100644 --- a/.github/workflows/combine_deploy_image.yml +++ b/.github/workflows/combine_deploy_image.yml @@ -11,7 +11,7 @@ permissions: # added using https://github.com/step-security/secure-workflows jobs: build: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: # See https://docs.stepsecurity.io/harden-runner/getting-started/ for instructions on # configuring harden-runner and identifying allowed endpoints. @@ -22,10 +22,10 @@ jobs: egress-policy: block allowed-endpoints: > api.ecr-public.us-east-1.amazonaws.com:443 - api.github.com:443 archive.ubuntu.com:80 auth.docker.io:443 cdn.dl.k8s.io:443 + deb.debian.org:80 dl.k8s.io:443 files.pythonhosted.org:443 get.helm.sh:443 @@ -40,7 +40,7 @@ jobs: - name: Set up QEMU uses: docker/setup-qemu-action@49b3bc8e6bdd4a60e6116a5414239cba5943d3cf # v3.2.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1 + uses: docker/setup-buildx-action@c47758b77c9736f4b2ef4073d4d51994fabfe349 # v3.7.1 - name: Configure AWS credentials uses: aws-actions/configure-aws-credentials@e3dd6a429d7300a6a4c196c26e071d42e0343502 # v4.0.2 with: @@ -54,7 +54,7 @@ jobs: username: ${{ secrets.AWS_ACCESS_KEY_ID }} password: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - name: Build combine_deploy - uses: docker/build-push-action@2cdde995de11925a030ce8070c3d77a52ffcf1c0 # v5.3.0 + uses: docker/build-push-action@v6.10.0 with: context: "{{defaultContext}}:deploy" push: true diff --git a/.github/workflows/commit_message_check.yml b/.github/workflows/commit_message_check.yml index 4377ba8af3..fef16f348e 100644 --- a/.github/workflows/commit_message_check.yml +++ b/.github/workflows/commit_message_check.yml @@ -10,4 +10,4 @@ permissions: # added using https://github.com/step-security/secure-workflows jobs: commit-message-lint: - uses: sillsdev/FieldWorks/.github/workflows/CommitMessage.yml@ba50e637df9593a2a972b29bf670226e89c0a21b + uses: sillsdev/FieldWorks/.github/workflows/CommitMessage.yml@22859ef68af99ffbd016eca4e503278db8007913 diff --git a/.github/workflows/database.yml b/.github/workflows/database.yml index c33c59d25e..834f6c97f8 100644 --- a/.github/workflows/database.yml +++ b/.github/workflows/database.yml @@ -10,7 +10,7 @@ permissions: # added using https://github.com/step-security/secure-workflows jobs: docker_build: if: ${{ github.event.type }} == "PullRequest" - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: # See https://docs.stepsecurity.io/harden-runner/getting-started/ for instructions on # configuring harden-runner and identifying allowed endpoints. @@ -26,7 +26,8 @@ jobs: registry-1.docker.io:443 # For subfolders, currently a full checkout is required. # See: https://github.com/marketplace/actions/build-and-push-docker-images#path-context - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Checkout repository + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: fetch-depth: 0 - name: Build database image diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index 038fa8b486..470ffcdf35 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -14,7 +14,7 @@ permissions: jobs: dependency-review: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - name: Harden Runner uses: step-security/harden-runner@17d0e2bd7d51742c71671bd19fa12bdc9d40a3d6 # v2.8.1 @@ -24,4 +24,4 @@ jobs: - name: "Checkout Repository" uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - name: "Dependency Review" - uses: actions/dependency-review-action@5a2ce3f5b92ee19cbb1541a4984c76d921601d7c # v4.3.4 + uses: actions/dependency-review-action@3b139cfc5fae8b618d3eae3675e383bb1769c019 # v4.5.0 diff --git a/.github/workflows/deploy_qa.yml b/.github/workflows/deploy_qa.yml index e1b7bcce2f..7bad7dd654 100644 --- a/.github/workflows/deploy_qa.yml +++ b/.github/workflows/deploy_qa.yml @@ -13,7 +13,7 @@ jobs: matrix: component: [frontend, backend, maintenance, database] - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 outputs: image_tag: ${{ steps.build_combine.outputs.image_tag }} steps: @@ -26,6 +26,7 @@ jobs: egress-policy: block allowed-endpoints: > *.actions.githubusercontent.com:443 + *.cloudfront.net:443 *.data.mcr.microsoft.com:443 ${{ secrets.AWS_ACCOUNT }}.dkr.ecr.${{ secrets.AWS_DEFAULT_REGION }}.amazonaws.com api.ecr.${{ secrets.AWS_DEFAULT_REGION }}.amazonaws.com:443 @@ -42,6 +43,7 @@ jobs: github.com:443 mcr.microsoft.com:443 production.cloudflare.docker.com:443 + public.ecr.aws:443 pypi.org:443 registry-1.docker.io:443 registry.npmjs.org:443 @@ -64,7 +66,7 @@ jobs: build_component: ${{ matrix.component }} clean_ecr_repo: needs: build - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: # See https://docs.stepsecurity.io/harden-runner/getting-started/ for instructions on # configuring harden-runner and identifying allowed endpoints. diff --git a/.github/workflows/deploy_release.yml b/.github/workflows/deploy_release.yml index 527c18d793..494f54d065 100644 --- a/.github/workflows/deploy_release.yml +++ b/.github/workflows/deploy_release.yml @@ -13,7 +13,7 @@ jobs: strategy: matrix: component: [frontend, backend, maintenance, database] - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 outputs: image_tag: ${{ steps.build_combine.outputs.image_tag }} steps: @@ -25,6 +25,7 @@ jobs: egress-policy: block allowed-endpoints: > *.actions.githubusercontent.com:443 + *.cloudfront.net:443 *.data.mcr.microsoft.com:443 api.ecr-public.us-east-1.amazonaws.com:443 api.github.com:443 diff --git a/.github/workflows/frontend.yml b/.github/workflows/frontend.yml index c417bb1892..7380c935a9 100644 --- a/.github/workflows/frontend.yml +++ b/.github/workflows/frontend.yml @@ -11,7 +11,7 @@ permissions: # added using https://github.com/step-security/secure-workflows jobs: lint_build: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 strategy: matrix: node-version: [20] @@ -31,7 +31,7 @@ jobs: - name: Checkout repository uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 + uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 with: node-version: ${{ matrix.node-version }} - run: npm ci @@ -40,7 +40,7 @@ jobs: - run: npm run build test_coverage: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 strategy: matrix: node-version: [20] @@ -60,11 +60,12 @@ jobs: - name: Checkout repository uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - name: Use Node.js ${{ matrix.node-version }} - uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 + uses: actions/setup-node@39370e3970a6d050c480ffad4ff0ed4d3fdee5af # v4.1.0 with: node-version: ${{ matrix.node-version }} - run: npm ci - - run: npm run test-frontend:coverage + - name: Run tests and generate coverage + run: npm run test-frontend:coverage env: CI: true - name: Upload coverage artifact @@ -77,7 +78,7 @@ jobs: upload_coverage: needs: test_coverage - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: # See https://docs.stepsecurity.io/harden-runner/getting-started/ for instructions on # configuring harden-runner and identifying allowed endpoints. @@ -88,12 +89,10 @@ jobs: egress-policy: block allowed-endpoints: > api.github.com:443 - api.codecov.io:443 cli.codecov.io:443 - codecov.io:443 github.com:443 + ingest.codecov.io:443 storage.googleapis.com:443 - uploader.codecov.io:443 - name: Checkout repository uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - name: Download coverage artifact @@ -110,8 +109,8 @@ jobs: name: Frontend docker_build: - runs-on: ubuntu-latest if: ${{ github.event.type }} == "PullRequest" + runs-on: ubuntu-22.04 steps: # See https://docs.stepsecurity.io/harden-runner/getting-started/ for instructions on # configuring harden-runner and identifying allowed endpoints. @@ -128,7 +127,8 @@ jobs: pypi.org:443 registry-1.docker.io:443 registry.npmjs.org:443 - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Checkout repository + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: fetch-depth: 0 - name: Build frontend diff --git a/.github/workflows/maintenance.yml b/.github/workflows/maintenance.yml index 2982b3f454..897d5d4ca1 100644 --- a/.github/workflows/maintenance.yml +++ b/.github/workflows/maintenance.yml @@ -10,7 +10,7 @@ permissions: # added using https://github.com/step-security/secure-workflows jobs: docker_build: if: ${{ github.event.type }} == "PullRequest" - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: # See https://docs.stepsecurity.io/harden-runner/getting-started/ for instructions on # configuring harden-runner and identifying allowed endpoints. @@ -20,17 +20,20 @@ jobs: disable-sudo: true egress-policy: block allowed-endpoints: > + *.cloudfront.net:443 archive.ubuntu.com:80 auth.docker.io:443 files.pythonhosted.org:443 github.com:443 production.cloudflare.docker.com:443 + public.ecr.aws:443 pypi.org:443 registry-1.docker.io:443 security.ubuntu.com:80 # For subfolders, currently a full checkout is required. # See: https://github.com/marketplace/actions/build-and-push-docker-images#path-context - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - name: Checkout repository + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 with: fetch-depth: 0 - name: Build maintenance image diff --git a/.github/workflows/pages.yml b/.github/workflows/pages.yml index 7c5482d3a2..987be3d954 100644 --- a/.github/workflows/pages.yml +++ b/.github/workflows/pages.yml @@ -12,7 +12,7 @@ permissions: # added using https://github.com/step-security/secure-workflows jobs: deploy: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: # See https://docs.stepsecurity.io/harden-runner/getting-started/ for instructions on # configuring harden-runner and identifying allowed endpoints. @@ -26,7 +26,7 @@ jobs: github.com:443 pypi.org:443 - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0 + - uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 with: python-version: 3.12 - name: Install dependencies diff --git a/.github/workflows/python.yml b/.github/workflows/python.yml index 7d79b2e4ef..af4b314c17 100644 --- a/.github/workflows/python.yml +++ b/.github/workflows/python.yml @@ -11,7 +11,7 @@ permissions: # added using https://github.com/step-security/secure-workflows jobs: tox: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 strategy: matrix: python-version: ["3.12"] @@ -30,7 +30,7 @@ jobs: pypi.org:443 - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0 + uses: actions/setup-python@0b93645e9fea7318ecaed2b359559ac225c90a2b # v5.3.0 with: python-version: ${{ matrix.python-version }} - name: Install dependencies diff --git a/.github/workflows/scorecards.yml b/.github/workflows/scorecards.yml index ca11ef0181..9c8cf78de4 100644 --- a/.github/workflows/scorecards.yml +++ b/.github/workflows/scorecards.yml @@ -21,7 +21,7 @@ permissions: read-all jobs: analysis: name: Scorecards analysis - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 permissions: # Needed to upload the results to code-scanning dashboard. security-events: write diff --git a/Backend/BackendFramework.csproj b/Backend/BackendFramework.csproj index 0fc805a80b..7160331f41 100644 --- a/Backend/BackendFramework.csproj +++ b/Backend/BackendFramework.csproj @@ -11,10 +11,10 @@ - - - - + + + + NU1701 @@ -24,7 +24,7 @@ - + diff --git a/Backend/Dockerfile b/Backend/Dockerfile index 10357c023e..6833352fcb 100644 --- a/Backend/Dockerfile +++ b/Backend/Dockerfile @@ -7,7 +7,7 @@ ############################################################ # Docker multi-stage build -FROM mcr.microsoft.com/dotnet/sdk:8.0.402-jammy AS builder +FROM mcr.microsoft.com/dotnet/sdk:8.0.404-jammy AS builder WORKDIR /app # Copy csproj and restore (fetch dependencies) as distinct layers. @@ -19,7 +19,7 @@ COPY . ./ RUN dotnet publish -c Release -o build # Build runtime image. -FROM mcr.microsoft.com/dotnet/aspnet:8.0.8-jammy +FROM mcr.microsoft.com/dotnet/aspnet:8.0.11-jammy ENV ASPNETCORE_URLS=http://+:5000 ENV COMBINE_IS_IN_CONTAINER=1 @@ -43,9 +43,9 @@ RUN mkdir -p $HOME # Setup app user and group to known UID/GID; no login. RUN groupmod --gid 999 app RUN usermod --uid 999 --gid app \ - --shell /sbin/nologin \ - --comment "Docker image user" \ - app + --shell /sbin/nologin \ + --comment "Docker image user" \ + app ## Set up application install directory. RUN mkdir $APP_HOME && \ diff --git a/Dockerfile b/Dockerfile index 74acc47977..df079b9ae8 100644 --- a/Dockerfile +++ b/Dockerfile @@ -7,7 +7,7 @@ ############################################################ # User guide build environment -FROM python:3.12.5-slim-bookworm AS user_guide_builder +FROM python:3.12.8-slim-bookworm AS user_guide_builder ENV PYTHONDONTWRITEBYTECODE=1 ENV PYTHONUNBUFFERED=1 @@ -24,7 +24,7 @@ COPY docs/user_guide docs/user_guide RUN tox -e user-guide # Frontend build environment. -FROM node:20.17.0-bookworm-slim AS frontend_builder +FROM node:20.18.1-bookworm-slim AS frontend_builder WORKDIR /app # Install app dependencies. diff --git a/README.md b/README.md index a33c4f226b..19c2433238 100644 --- a/README.md +++ b/README.md @@ -550,7 +550,7 @@ the corresponding videos and any transcript translations downloaded from Crowdin optionally attach them to a video file), run from within a Python virtual environment: ```bash -python scripts/subtitle_tutorial_video.py -s [-i -o [-i -o ] [-v] ``` ## Setup Local Kubernetes Cluster @@ -619,7 +619,7 @@ Notes for installing _Docker Desktop_ in Linux: Once _Docker Desktop_ has been installed, start it, and set it up as follows: 1. Click the gear icon in the upper right to open the settings dialog; -2. Click on the _Resources_ link on the left-hand side and set the Memory to at least 4 GB (see Note); +2. Click on the _Resources_ link on the left-hand side and set the Memory to at least 6 GB (see Note); 3. Click on the _Kubernetes_ link on the left-hand side; 4. Select _Enable Kubernetes_ and click _Apply & Restart_; 5. Click _Install_ on the dialog that is displayed. @@ -731,16 +731,20 @@ Install the Kubernetes resources to run _The Combine_ by running: python deploy/scripts/setup_combine.py [--target ] [--tag ] ``` -The default target is `localhost`; the default tag is `latest`. For development testing the script will usually be run -with no arguments. +Notes: -If an invalid target is entered, the script will list available targets and prompt the user his/her selection. -`deploy/scripts/setup_combine.py` assumes that the `kubectl` configuration file is setup to manage the desired -Kubernetes cluster. For most development users, there will only be the _Rancher Desktop/Docker Desktop_ cluster to -manage and the installation process will set that up correctly. If there are multiple clusters to manage, the -`--kubeconfig` and `--context` options will let you specify a different cluster. +- The default target is `localhost`; the default tag is `latest`. For development testing the script will usually be run + with no arguments. -Run the script with the `--help` option to see possible options for the script. +- If an invalid target is entered, the script will list available targets and prompt the user his/her selection. + `deploy/scripts/setup_combine.py` assumes that the `kubectl` configuration file is setup to manage the desired + Kubernetes cluster. For most development users, there will only be the _Rancher Desktop/Docker Desktop_ cluster to + manage and the installation process will set that up correctly. If there are multiple clusters to manage, the + `--kubeconfig` and `--context` options will let you specify a different cluster. + +- Run the script with the `--help` option to see possible options for the script. + +- The setup assumes `amd64` architecture. If the target architecture is `arm64`, add `--set global.cpuArch=arm64`. When the script completes, the resources will be installed on the specified cluster. It may take a few moments before all the containers are up and running. If you are using _Rancher Desktop_, you can use the @@ -749,22 +753,25 @@ all the containers are up and running. If you are using _Rancher Desktop_, you c ```console $ kubectl -n thecombine get deployments -NAME READY UP-TO-DATE AVAILABLE AGE -backend 1/1 1 1 10m -database 1/1 1 1 10m -frontend 1/1 1 1 10m -maintenance 1/1 1 1 10m +NAME READY UP-TO-DATE AVAILABLE AGE +backend 1/1 1 1 10m +database 1/1 1 1 10m +frontend 1/1 1 1 10m +maintenance 1/1 1 1 10m +otel-opentelemetry-collector 1/1 1 1 19m ``` or ```console $ kubectl -n thecombine get pods -NAME READY STATUS RESTARTS AGE -backend-5657559949-z2flp 1/1 Running 0 10m -database-794b4d956f-zjszm 1/1 Running 0 10m -frontend-7d6d79f8c5-lkhhz 1/1 Running 0 10m -maintenance-7f4b5b89b8-rhgk9 1/1 Running 0 10m +NAME READY STATUS RESTARTS AGE +backend-5657559949-z2flp 1/1 Running 0 10m +database-794b4d956f-zjszm 1/1 Running 0 10m +frontend-7d6d79f8c5-lkhhz 1/1 Running 0 10m +install-fonts-4jcsl 0/1 Completed 0 8m +maintenance-7f4b5b89b8-rhgk9 1/1 Running 0 10m +otel-opentelemetry-collector-5b5b69557b-zqk5d 1/1 Running 0 19m ``` ### Connecting to Your Cluster diff --git a/database/Dockerfile b/database/Dockerfile index 778443a6fc..40cb6e9e36 100644 --- a/database/Dockerfile +++ b/database/Dockerfile @@ -5,7 +5,7 @@ # - Intel/AMD 64-bit # - ARM 64-bit ############################################################ -FROM mongo:7.0.14-jammy +FROM mongo:7.0.15-jammy WORKDIR / diff --git a/deploy/Dockerfile b/deploy/Dockerfile index b0a7cc68e3..d68591591e 100644 --- a/deploy/Dockerfile +++ b/deploy/Dockerfile @@ -5,12 +5,12 @@ # - Intel/AMD 64-bit ############################################################ -FROM ubuntu:22.04 +FROM python:3.12.8-slim-bookworm USER root RUN apt-get update && \ - apt-get install -y python3 python3-pip nano curl openssh-client iputils-ping && \ + apt-get install -y python3-pip nano curl openssh-client iputils-ping && \ apt-get autoremove && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* diff --git a/deploy/ansible/playbook_k3s_airgapped_files.yml b/deploy/ansible/playbook_k3s_airgapped_files.yml index 7911849fef..99522e8d4e 100644 --- a/deploy/ansible/playbook_k3s_airgapped_files.yml +++ b/deploy/ansible/playbook_k3s_airgapped_files.yml @@ -32,13 +32,13 @@ dest: "{{ package_dir }}/{{ item }}" url: "https://github.com/k3s-io/k3s/releases/download/{{ k3s_version }}/{{ item }}" loop: - - k3s-airgap-images-amd64.tar.zst + - k3s-airgap-images-{{ cpu_arch }}.tar.zst - k3s - - sha256sum-amd64.txt + - sha256sum-{{ cpu_arch }}.txt - name: Verify k3s downloads shell: - cmd: sha256sum --check --ignore-missing sha256sum-amd64.txt + cmd: sha256sum --check --ignore-missing sha256sum-{{ cpu_arch }}.txt chdir: "{{ package_dir }}" changed_when: false @@ -50,9 +50,9 @@ - name: Download kubectl get_url: dest: "{{ package_dir }}/kubectl" - url: "https://dl.k8s.io/release/{{ kubectl_version }}/bin/linux/amd64/kubectl" + url: "https://dl.k8s.io/release/{{ kubectl_version }}/bin/linux/{{ cpu_arch }}/kubectl" - name: Download helm get_url: dest: "{{ package_dir }}/helm.tar.gz" - url: "https://get.helm.sh/helm-{{ helm_version }}-linux-amd64.tar.gz" + url: "https://get.helm.sh/helm-{{ helm_version }}-linux-{{ cpu_arch }}.tar.gz" diff --git a/deploy/ansible/roles/container_engine/defaults/main.yml b/deploy/ansible/roles/container_engine/defaults/main.yml index 1276e993b3..12c2d6e30e 100644 --- a/deploy/ansible/roles/container_engine/defaults/main.yml +++ b/deploy/ansible/roles/container_engine/defaults/main.yml @@ -3,3 +3,4 @@ container_packages: - containerd.io keyring_location: /etc/apt/keyrings +cpu_arch: amd64 diff --git a/deploy/ansible/roles/container_engine/tasks/main.yml b/deploy/ansible/roles/container_engine/tasks/main.yml index 8508c4bc46..c884eb68e1 100644 --- a/deploy/ansible/roles/container_engine/tasks/main.yml +++ b/deploy/ansible/roles/container_engine/tasks/main.yml @@ -40,7 +40,7 @@ - name: Add Docker repository apt_repository: - repo: "deb [arch=amd64 signed-by={{ keyring_location }}/docker.gpg] https://download.docker.com/linux/ubuntu {{ ansible_distribution_release }} stable" + repo: "deb [arch={{ cpu_arch }} signed-by={{ keyring_location }}/docker.gpg] https://download.docker.com/linux/ubuntu {{ ansible_distribution_release }} stable" state: present filename: docker diff --git a/deploy/ansible/roles/container_images/defaults/main.yml b/deploy/ansible/roles/container_images/defaults/main.yml index 02994e8520..aae28e44e4 100644 --- a/deploy/ansible/roles/container_images/defaults/main.yml +++ b/deploy/ansible/roles/container_images/defaults/main.yml @@ -4,3 +4,4 @@ source_image_dir: ../airgap-images airgap_image_dir: /var/lib/rancher/k3s/agent/images +cpu_arch: amd64 diff --git a/deploy/ansible/roles/container_images/tasks/main.yml b/deploy/ansible/roles/container_images/tasks/main.yml index d5edea306d..e4a68fd04c 100644 --- a/deploy/ansible/roles/container_images/tasks/main.yml +++ b/deploy/ansible/roles/container_images/tasks/main.yml @@ -1,6 +1,11 @@ --- +############################################################## +# Role: container_images +# # Setup airgap images in {{ airgap_image_dir }} to be # available when k3s and subsequent helm charts are installed. +# +############################################################## - name: Create airgap image directory file: @@ -18,9 +23,9 @@ group: root mode: 0644 loop: - - k3s-airgap-images-amd64.tar.zst - - middleware-airgap-images-amd64.tar.zst - - combine-airgap-images-amd64.tar.zst + - k3s-airgap-images-{{ cpu_arch }}.tar.zst + - middleware-airgap-images-{{ cpu_arch }}.tar.zst + - combine-airgap-images-{{ cpu_arch }}.tar.zst # Add k3s, kubectl and the k3s installation script to # /usr/local/bin @@ -51,7 +56,7 @@ - name: Create link to helm binary file: - src: /opt/helm/{{ helm_version }}/linux-amd64/helm + src: /opt/helm/{{ helm_version }}/linux-{{ cpu_arch }}/helm dest: /usr/local/bin/helm state: link owner: root diff --git a/deploy/ansible/roles/helm_install/defaults/main.yml b/deploy/ansible/roles/helm_install/defaults/main.yml index 54401b3cdc..785842645d 100644 --- a/deploy/ansible/roles/helm_install/defaults/main.yml +++ b/deploy/ansible/roles/helm_install/defaults/main.yml @@ -1,5 +1,5 @@ --- helm_version: v3.15.2 -helm_arch: linux-amd64 +cpu_arch: amd64 -helm_download_dir: /opt/helm-{{ helm_version }}-{{ helm_arch }} +helm_download_dir: /opt/helm-{{ helm_version }}-linux-{{ cpu_arch }} diff --git a/deploy/ansible/roles/helm_install/tasks/main.yml b/deploy/ansible/roles/helm_install/tasks/main.yml index 6956f09f06..76e51b4ea4 100644 --- a/deploy/ansible/roles/helm_install/tasks/main.yml +++ b/deploy/ansible/roles/helm_install/tasks/main.yml @@ -9,8 +9,7 @@ - name: Get Latest Release get_url: - # https://get.helm.sh/helm-v3.13.2-linux-amd64.tar.gz - url: "https://get.helm.sh/helm-{{ helm_version }}-{{ helm_arch }}.tar.gz" + url: "https://get.helm.sh/helm-{{ helm_version }}-linux-{{ cpu_arch }}.tar.gz" dest: "{{ helm_download_dir }}/helm.tar.gz" owner: root group: root @@ -20,11 +19,11 @@ command: cmd: "tar -zxvf {{ helm_download_dir }}/helm.tar.gz" chdir: "{{ helm_download_dir }}" - creates: "{{ helm_download_dir }}/{{ helm_arch }}/helm" + creates: "{{ helm_download_dir }}/linux-{{ cpu_arch }}/helm" - name: Link to extracted helm file file: - src: "{{ helm_download_dir }}/{{ helm_arch }}/helm" + src: "{{ helm_download_dir }}/linux-{{ cpu_arch }}/helm" path: /usr/local/bin/helm state: link owner: root diff --git a/deploy/ansible/vars/k3s_versions.yml b/deploy/ansible/vars/k3s_versions.yml index e3a79b990f..8081a4acb5 100644 --- a/deploy/ansible/vars/k3s_versions.yml +++ b/deploy/ansible/vars/k3s_versions.yml @@ -2,3 +2,4 @@ k3s_version: "v1.30.1%2Bk3s1" kubectl_version: "v1.30.2" helm_version: "v3.15.2" +cpu_arch: "amd64" diff --git a/deploy/helm/aws-login/templates/aws-ecr-login-cronjob.yaml b/deploy/helm/aws-login/templates/aws-ecr-login-cronjob.yaml index 4a60994388..30abff4b87 100644 --- a/deploy/helm/aws-login/templates/aws-ecr-login-cronjob.yaml +++ b/deploy/helm/aws-login/templates/aws-ecr-login-cronjob.yaml @@ -18,7 +18,7 @@ spec: spec: serviceAccountName: {{ .Values.awsEcr.serviceAccount }} containers: - - image: {{ .Values.awsEcr.image }}:{{ .Values.awsEcr.imageTag }} + - image: {{ .Values.awsEcr.image }}:{{ .Values.awsEcr.imageVersion }}-{{ .Values.global.cpuArch }} imagePullPolicy: IfNotPresent name: {{ .Values.awsEcr.cronJobName }} command: @@ -59,7 +59,11 @@ spec: configMapKeyRef: key: DOCKER_EMAIL name: {{ .Values.awsEcr.configName }} - resources: {} + resources: + requests: + memory: 128Mi + limits: + memory: 128Mi securityContext: capabilities: {} terminationMessagePath: /dev/termination-log diff --git a/deploy/helm/aws-login/templates/aws-ecr-login-oneshot.yaml b/deploy/helm/aws-login/templates/aws-ecr-login-oneshot.yaml index f4737396e8..857ec0b184 100644 --- a/deploy/helm/aws-login/templates/aws-ecr-login-oneshot.yaml +++ b/deploy/helm/aws-login/templates/aws-ecr-login-oneshot.yaml @@ -18,7 +18,7 @@ spec: spec: serviceAccountName: {{ .Values.awsEcr.serviceAccount }} containers: - - image: {{ .Values.awsEcr.image }}:{{ .Values.awsEcr.imageTag }} + - image: {{ .Values.awsEcr.image }}:{{ .Values.awsEcr.imageVersion }}-{{ .Values.global.cpuArch }} imagePullPolicy: IfNotPresent name: "{{ .Values.awsEcr.jobName }}" command: @@ -59,7 +59,11 @@ spec: configMapKeyRef: key: DOCKER_EMAIL name: "{{ .Values.awsEcr.configName }}" - resources: {} + resources: + requests: + memory: 128Mi + limits: + memory: 128Mi securityContext: capabilities: {} terminationMessagePath: /dev/termination-log diff --git a/deploy/helm/aws-login/values.yaml b/deploy/helm/aws-login/values.yaml index 2d81c4601a..86348a7fd6 100644 --- a/deploy/helm/aws-login/values.yaml +++ b/deploy/helm/aws-login/values.yaml @@ -15,14 +15,15 @@ global: awsAccessKeyId: "Override" awsSecretAccessKey: "Override" pullSecretName: aws-login-credentials + cpuArch: "amd64" awsEcr: configName: aws-ecr-config cron: yes cronJobName: ecr-cred-helper-cron dockerEmail: noreply@thecombine.app - image: sillsdev/aws-kubectl - imageTag: "0.3.0" + image: "public.ecr.aws/thecombine/aws-kubectl" + imageVersion: "0.4.0" jobName: ecr-cred-helper schedule: "0 */8 * * *" secretsName: aws-ecr-credentials diff --git a/deploy/helm/cert-proxy-client/templates/update-cert-cronjob.yaml b/deploy/helm/cert-proxy-client/templates/update-cert-cronjob.yaml index 8b6d303673..97a51a3da4 100644 --- a/deploy/helm/cert-proxy-client/templates/update-cert-cronjob.yaml +++ b/deploy/helm/cert-proxy-client/templates/update-cert-cronjob.yaml @@ -73,7 +73,11 @@ spec: configMapKeyRef: key: VERBOSE name: {{ .Values.envName | quote }} - resources: {} + resources: + requests: + memory: 128Mi + limits: + memory: 128Mi securityContext: capabilities: {} terminationMessagePath: /dev/termination-log diff --git a/deploy/helm/cert-proxy-client/templates/update-cert-oneshot.yaml b/deploy/helm/cert-proxy-client/templates/update-cert-oneshot.yaml index 5011eb043a..a6de7a6c37 100644 --- a/deploy/helm/cert-proxy-client/templates/update-cert-oneshot.yaml +++ b/deploy/helm/cert-proxy-client/templates/update-cert-oneshot.yaml @@ -72,7 +72,11 @@ spec: configMapKeyRef: key: VERBOSE name: {{ .Values.envName | quote }} - resources: {} + resources: + requests: + memory: 128Mi + limits: + memory: 128Mi securityContext: capabilities: {} terminationMessagePath: /dev/termination-log diff --git a/deploy/helm/cert-proxy-server/templates/deployment-cert-proxy-server.yaml b/deploy/helm/cert-proxy-server/templates/deployment-cert-proxy-server.yaml index 7c23b4cb71..c7ea2c55bd 100644 --- a/deploy/helm/cert-proxy-server/templates/deployment-cert-proxy-server.yaml +++ b/deploy/helm/cert-proxy-server/templates/deployment-cert-proxy-server.yaml @@ -66,9 +66,9 @@ spec: resources: requests: cpu: 2m - memory: 100M + memory: 128Mi limits: - memory: 150M + memory: 128Mi restartPolicy: Always {{- if ne .Values.global.pullSecretName "None" }} imagePullSecrets: diff --git a/deploy/helm/cert-proxy-server/templates/deployment-nuc-proxy.yaml b/deploy/helm/cert-proxy-server/templates/deployment-nuc-proxy.yaml index 3ac30a1463..21be6f8608 100644 --- a/deploy/helm/cert-proxy-server/templates/deployment-nuc-proxy.yaml +++ b/deploy/helm/cert-proxy-server/templates/deployment-nuc-proxy.yaml @@ -51,9 +51,9 @@ spec: resources: requests: cpu: 1m - memory: 10M + memory: 128Mi limits: - memory: 50M + memory: 128Mi volumeMounts: - name: nginx-html mountPath: /usr/share/nginx/html diff --git a/deploy/helm/create-admin-user/templates/job-create-admin-user.yaml b/deploy/helm/create-admin-user/templates/job-create-admin-user.yaml index 7c60ca5e2c..eaf79685fb 100644 --- a/deploy/helm/create-admin-user/templates/job-create-admin-user.yaml +++ b/deploy/helm/create-admin-user/templates/job-create-admin-user.yaml @@ -50,7 +50,11 @@ spec: image: {{ include "create-admin-user.containerImage" . }} imagePullPolicy: {{ .Values.global.imagePullPolicy }} name: create-admin-user - resources: {} + resources: + requests: + memory: 128Mi + limits: + memory: 128Mi volumeMounts: - mountPath: /home/app/.CombineFiles name: backend-data diff --git a/deploy/helm/thecombine/charts/backend/templates/deployment-backend.yaml b/deploy/helm/thecombine/charts/backend/templates/deployment-backend.yaml index bcfdc71c3a..60a2f75de8 100644 --- a/deploy/helm/thecombine/charts/backend/templates/deployment-backend.yaml +++ b/deploy/helm/thecombine/charts/backend/templates/deployment-backend.yaml @@ -100,7 +100,7 @@ spec: resources: requests: cpu: 5m - memory: 960Mi + memory: 2Gi {{- if .Values.global.includeResourceLimits }} limits: memory: 4Gi diff --git a/deploy/helm/thecombine/charts/database/templates/database.yaml b/deploy/helm/thecombine/charts/database/templates/database.yaml index 6e681ca546..7c30530888 100644 --- a/deploy/helm/thecombine/charts/database/templates/database.yaml +++ b/deploy/helm/thecombine/charts/database/templates/database.yaml @@ -51,7 +51,7 @@ spec: resources: requests: cpu: 25m - memory: 950Mi + memory: 1Gi {{- if .Values.global.includeResourceLimits }} limits: memory: 2Gi diff --git a/deploy/helm/thecombine/charts/frontend/templates/deployment-frontend.yaml b/deploy/helm/thecombine/charts/frontend/templates/deployment-frontend.yaml index 739efab059..36e025f392 100644 --- a/deploy/helm/thecombine/charts/frontend/templates/deployment-frontend.yaml +++ b/deploy/helm/thecombine/charts/frontend/templates/deployment-frontend.yaml @@ -77,10 +77,10 @@ spec: resources: requests: cpu: 1m - memory: 15M + memory: 128Mi {{- if .Values.global.includeResourceLimits }} limits: - memory: 40M + memory: 128Mi {{- end }} volumeMounts: - mountPath: /usr/share/nginx/fonts diff --git a/deploy/helm/thecombine/charts/maintenance/templates/cronjob-daily-backup.yaml b/deploy/helm/thecombine/charts/maintenance/templates/cronjob-daily-backup.yaml index e37bd8df77..fb3b7b8649 100644 --- a/deploy/helm/thecombine/charts/maintenance/templates/cronjob-daily-backup.yaml +++ b/deploy/helm/thecombine/charts/maintenance/templates/cronjob-daily-backup.yaml @@ -18,7 +18,7 @@ spec: spec: serviceAccountName: {{ .Values.serviceAccount.name }} containers: - - image: sillsdev/aws-kubectl:0.3.0 + - image: {{ .Values.awsEcr.image }}:{{ .Values.awsEcr.imageVersion }}-{{ .Values.global.cpuArch }} imagePullPolicy: Always name: daily-backup command: @@ -33,9 +33,9 @@ spec: resources: requests: cpu: 200m - memory: 150M + memory: 128Mi limits: - memory: 150M + memory: 128Mi securityContext: capabilities: {} terminationMessagePath: /dev/termination-log diff --git a/deploy/helm/thecombine/charts/maintenance/templates/cronjob-update-fonts.yaml b/deploy/helm/thecombine/charts/maintenance/templates/cronjob-update-fonts.yaml index 8b306ec0c5..ca088114b7 100644 --- a/deploy/helm/thecombine/charts/maintenance/templates/cronjob-update-fonts.yaml +++ b/deploy/helm/thecombine/charts/maintenance/templates/cronjob-update-fonts.yaml @@ -18,7 +18,7 @@ spec: spec: serviceAccountName: {{ .Values.serviceAccount.name }} containers: - - image: sillsdev/aws-kubectl:0.3.0 + - image: {{ .Values.awsEcr.image }}:{{ .Values.awsEcr.imageVersion }}-{{ .Values.global.cpuArch }} imagePullPolicy: Always name: update-fonts command: @@ -30,7 +30,11 @@ spec: - deployment/maintenance - -- - get-fonts.sh - resources: {} + resources: + requests: + memory: 128Mi + limits: + memory: 128Mi securityContext: capabilities: {} terminationMessagePath: /dev/termination-log diff --git a/deploy/helm/thecombine/charts/maintenance/templates/deployment-maintenance.yaml b/deploy/helm/thecombine/charts/maintenance/templates/deployment-maintenance.yaml index 6ffb0a0d5a..33821d5308 100644 --- a/deploy/helm/thecombine/charts/maintenance/templates/deployment-maintenance.yaml +++ b/deploy/helm/thecombine/charts/maintenance/templates/deployment-maintenance.yaml @@ -106,10 +106,10 @@ spec: resources: requests: cpu: 200m - memory: 1200Mi + memory: 1Gi {{- if .Values.global.includeResourceLimits }} limits: - memory: 4Gi + memory: 2Gi {{- end }} volumeMounts: - mountPath: {{ .Values.fontsDir }} diff --git a/deploy/helm/thecombine/charts/maintenance/templates/get-fonts-hook.yaml b/deploy/helm/thecombine/charts/maintenance/templates/get-fonts-hook.yaml index 8ca7f66f48..8c42de0438 100644 --- a/deploy/helm/thecombine/charts/maintenance/templates/get-fonts-hook.yaml +++ b/deploy/helm/thecombine/charts/maintenance/templates/get-fonts-hook.yaml @@ -26,7 +26,7 @@ spec: spec: serviceAccountName: {{ .Values.serviceAccount.name }} containers: - - image: sillsdev/aws-kubectl:0.3.0 + - image: {{ .Values.awsEcr.image }}:{{ .Values.awsEcr.imageVersion }}-{{ .Values.global.cpuArch }} imagePullPolicy: Always name: "install-fonts" command: @@ -45,7 +45,11 @@ spec: - {{ $lang | quote }} {{- end }} {{- end }} - resources: {} + resources: + requests: + memory: 128Mi + limits: + memory: 128Mi securityContext: capabilities: {} terminationMessagePath: /dev/termination-log diff --git a/deploy/helm/thecombine/charts/maintenance/values.yaml b/deploy/helm/thecombine/charts/maintenance/values.yaml index d1424183e6..4eb960ba65 100644 --- a/deploy/helm/thecombine/charts/maintenance/values.yaml +++ b/deploy/helm/thecombine/charts/maintenance/values.yaml @@ -26,6 +26,7 @@ global: imageRegistry: "" # Default AWS S3 location awsS3Location: "thecombine.app" + cpuArch: "amd64" imageName: combine_maint @@ -34,7 +35,10 @@ serviceAccount: role: role-maintenance roleBinding: role-maintenance-binding -serviceAccount.name: account-maintenance +awsEcr: + image: "public.ecr.aws/thecombine/aws-kubectl" + imageVersion: "0.4.0" + ####################################### # Variables controlling backups ####################################### diff --git a/deploy/requirements.txt b/deploy/requirements.txt index bb5f5c681e..157dc73827 100644 --- a/deploy/requirements.txt +++ b/deploy/requirements.txt @@ -4,9 +4,9 @@ # # pip-compile requirements.in # -ansible==10.4.0 +ansible==11.1.0 # via -r requirements.in -ansible-core==2.17.4 +ansible-core==2.18.1 # via ansible cachetools==5.5.0 # via google-auth @@ -16,17 +16,17 @@ certifi==2024.8.30 # requests cffi==1.17.1 # via cryptography -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests -cryptography==43.0.1 +cryptography==44.0.0 # via # ansible-core # pyopenssl -durationpy==0.8 +durationpy==0.9 # via kubernetes -google-auth==2.34.0 +google-auth==2.36.0 # via kubernetes -idna==3.8 +idna==3.10 # via requests jinja2==3.1.4 # via @@ -37,23 +37,23 @@ jinja2-base64-filters==0.1.4 # via -r requirements.in kubernetes==31.0.0 # via -r requirements.in -markupsafe==2.1.5 +markupsafe==3.0.2 # via jinja2 oauthlib==3.2.2 # via # kubernetes # requests-oauthlib -packaging==24.1 +packaging==24.2 # via ansible-core -pyasn1==0.6.0 +pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 +pyasn1-modules==0.4.1 # via google-auth pycparser==2.22 # via cffi -pyopenssl==24.2.1 +pyopenssl==24.3.0 # via -r requirements.in python-dateutil==2.9.0.post0 # via kubernetes @@ -72,11 +72,11 @@ resolvelib==1.0.1 # via ansible-core rsa==4.9 # via google-auth -six==1.16.0 +six==1.17.0 # via # kubernetes # python-dateutil -urllib3==2.2.2 +urllib3==2.2.3 # via # kubernetes # requests diff --git a/deploy/scripts/app_release.py b/deploy/scripts/app_release.py index d2b34a9d05..8851cf48ad 100755 --- a/deploy/scripts/app_release.py +++ b/deploy/scripts/app_release.py @@ -31,7 +31,7 @@ def get_release() -> str: num_commits = match[2] # Get the branch name result = run_cmd(["git", "branch", "--show-current"], chomp=True) - branch_name = re.sub("[/_]+", "-", result.stdout) + branch_name = re.sub("[/_]+", "-", result.stdout) or "HEADLESS" return f"{release_string}-{branch_name}.{num_commits}" message = f"Unrecognized release value in tag: {result.stdout}" raise ValueError(message) diff --git a/deploy/scripts/build.py b/deploy/scripts/build.py index fa395b26b1..b090535de0 100755 --- a/deploy/scripts/build.py +++ b/deploy/scripts/build.py @@ -50,7 +50,8 @@ class Job: class JobQueue: """Class to manage a queue of jobs.""" - def __init__(self, name: str) -> None: + def __init__(self, name: str, debug: bool = False) -> None: + self.debug = debug self.name = name self.status = JobStatus.RUNNING self.job_list: List[Job] = [] @@ -88,6 +89,18 @@ def start_next(self) -> bool: logging.debug(f"{self.name}.start_next(): no more jobs to run.") return False + def print_out(self) -> None: + logging.debug("####################") + logging.debug("Printing the stdout:\n") + self.output_stream.print() + logging.debug("####################") + + def print_err(self) -> None: + logging.debug("####################") + logging.debug("Printing the stderr:\n") + self.error_stream.print() + logging.debug("####################") + def check_jobs(self) -> JobStatus: """ Check if all jobs in the queue have completed. @@ -104,12 +117,14 @@ def check_jobs(self) -> JobStatus: # Current job has finished if self.curr_job.returncode == 0: logging.info(f"{self.name} job has finished.") - self.output_stream.print() + self.print_out() + if self.debug: + self.print_err() else: logging.error(f"{self.name} job failed.") - self.error_stream.print() + self.print_err() self.returncode = self.curr_job.returncode - # skip remaining jobs + # Skip remaining jobs self.job_list = [] self.status = JobStatus.ERROR return self.status @@ -244,16 +259,20 @@ def main() -> None: init_logging(args) # Setup required build engine - docker or nerdctl - container_cli = os.getenv("CONTAINER_CLI", "docker") - match container_cli: + container_cmd = [os.getenv("CONTAINER_CLI", "docker")] + match container_cmd[0]: case "nerdctl": - build_cmd = [container_cli, "-n", args.namespace, "build"] - push_cmd = [container_cli, "-n", args.namespace, "push"] + if args.debug: + container_cmd.append("--debug-full") + build_cmd = container_cmd + ["-n", args.namespace, "build"] + push_cmd = container_cmd + ["-n", args.namespace, "push"] case "docker": - build_cmd = [container_cli, "buildx", "build"] - push_cmd = [container_cli, "push"] + if args.debug: + container_cmd.extend(["-D", "-l", "debug"]) + build_cmd = container_cmd + ["buildx", "build"] + push_cmd = container_cmd + ["push"] case _: - logging.critical(f"Container CLI '{container_cli}' is not supported.") + logging.critical(f"Container CLI '{container_cmd[0]}' is not supported.") sys.exit(1) # Setup build options @@ -280,10 +299,11 @@ def main() -> None: job_set: Dict[str, JobQueue] = {} for component in to_do: spec = build_specs[component] + logging.info(f"Starting pre-build for {component}") spec.pre_build() image_name = get_image_name(args.repo, spec.name, args.tag) job_opts = ["-t", image_name, "-f", "Dockerfile", "."] - job_set[component] = JobQueue(component) + job_set[component] = JobQueue(component, debug=args.debug) logging.debug(f"Adding job {build_cmd + job_opts}") job_set[component].add_job(Job(build_cmd + job_opts, spec.dir)) if args.repo is not None: @@ -304,6 +324,7 @@ def main() -> None: time.sleep(5.0) # Run the post_build cleanup functions for component in to_do: + logging.info(f"Starting post-build for {component}") build_specs[component].post_build() # Print job summary if output mode is ALL diff --git a/deploy/scripts/sem_dom_import.py b/deploy/scripts/sem_dom_import.py index d91823f013..fa93a81333 100755 --- a/deploy/scripts/sem_dom_import.py +++ b/deploy/scripts/sem_dom_import.py @@ -218,7 +218,6 @@ def get_sem_doms(node: ElementTree.Element, parent: SemDomTreeMap, prev: SemDomM elif field.tag == "Abbreviation": for abbrev_node in field: lang, id_text = get_auni_text(abbrev_node) - logging.debug(f"id[{lang}]='{id_text}'") domain_set[lang].id = id_text elif field.tag == "Description": for descr_node in field: @@ -296,7 +295,7 @@ def generate_semantic_domains( # Languages can be found in the Name element for sub_elem in elem: lang, name_text = get_auni_text(sub_elem) - logging.info(f"Language code: {lang}") + logging.debug(f"Language code: {lang}") if lang not in domain_tree: domain_tree[lang] = {} if lang not in domain_nodes: @@ -306,9 +305,9 @@ def generate_semantic_domains( prev_domain = get_sem_doms(root, {}, prev_domain) for lang in domain_nodes: - logging.info(f"Number of {lang} Domains: {len(domain_nodes[lang])}") + logging.debug(f"Number of {lang} Domains: {len(domain_nodes[lang])}") for lang in domain_tree: - logging.info(f"Number of {lang} Tree Nodes: {len(domain_tree[lang])}") + logging.debug(f"Number of {lang} Tree Nodes: {len(domain_tree[lang])}") if not flatten_questions: SemanticDomainFull.flatten_questions = False write_json(output_dir) diff --git a/deploy/scripts/setup_cluster.py b/deploy/scripts/setup_cluster.py index 9faaa89fb8..71255590fe 100755 --- a/deploy/scripts/setup_cluster.py +++ b/deploy/scripts/setup_cluster.py @@ -110,6 +110,9 @@ def main() -> None: # Add the current script directory to the OS Environment variables os.environ["SCRIPTS_DIR"] = str(scripts_dir) + # Add an empty analytics key if not defined in the OS Environment variables + if "HONEYCOMB_API_KEY" not in os.environ: + os.environ["HONEYCOMB_API_KEY"] = "" # Verify the Kubernetes/Helm environment kube_env = KubernetesEnvironment(args) diff --git a/deploy/scripts/setup_files/collector_config.yaml b/deploy/scripts/setup_files/collector_config.yaml index e9a6cc2562..30e243b965 100644 --- a/deploy/scripts/setup_files/collector_config.yaml +++ b/deploy/scripts/setup_files/collector_config.yaml @@ -56,6 +56,6 @@ useGOMEMLIMIT: true resources: requests: cpu: 25m - memory: 256Mi + memory: 128Mi limits: - memory: 512Mi + memory: 128Mi diff --git a/deploy/scripts/setup_files/combine_config.yaml b/deploy/scripts/setup_files/combine_config.yaml index ca9030bbbe..25755f0e17 100644 --- a/deploy/scripts/setup_files/combine_config.yaml +++ b/deploy/scripts/setup_files/combine_config.yaml @@ -125,9 +125,10 @@ profiles: # Set of charts # The set of charts defines properties for each of the charts listed in the profiles above. -# There are 2 keys for each chart: +# There are 3 keys for each chart: # namespace: the namespace where helm should install the chart. Note that the namespace needs to be # created beforehand +# install_langs: add font support for languages specified beyond the defaults # secrets: a list of secrets that are required for each chart. Each secret contains: # config_item: the name of the configuration value that is referenced in the chart's template files # env_var: the name of the environment variable that holds the value for the config_item. diff --git a/dev-requirements.txt b/dev-requirements.txt index 1e9b624447..cd29e9f39b 100644 --- a/dev-requirements.txt +++ b/dev-requirements.txt @@ -12,7 +12,7 @@ babel==2.16.0 # via mkdocs-material beautifulsoup4==4.12.3 # via mkdocs-htmlproofer-plugin -black==24.8.0 +black==24.10.0 # via -r dev-requirements.in cachetools==5.5.0 # via @@ -26,7 +26,7 @@ cffi==1.17.1 # via cryptography chardet==5.2.0 # via tox -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests click==8.1.7 # via @@ -35,19 +35,23 @@ click==8.1.7 colorama==0.4.6 # via # -r dev-requirements.in + # click + # mkdocs # mkdocs-material # tox -cryptography==43.0.1 +cryptography==44.0.0 # via # pyopenssl # types-pyopenssl -distlib==0.3.8 +distlib==0.3.9 # via virtualenv -dnspython==2.6.1 +dnspython==2.7.0 # via pymongo +durationpy==0.9 + # via kubernetes eradicate==2.3.0 # via flake8-eradicate -filelock==3.15.4 +filelock==3.16.1 # via # tox # virtualenv @@ -61,19 +65,19 @@ flake8==7.1.1 # pep8-naming flake8-broken-line==1.0.0 # via -r dev-requirements.in -flake8-bugbear==24.8.19 +flake8-bugbear==24.10.31 # via -r dev-requirements.in -flake8-comprehensions==3.15.0 +flake8-comprehensions==3.16.0 # via -r dev-requirements.in flake8-eradicate==1.5.0 # via -r dev-requirements.in ghp-import==2.1.0 # via mkdocs -google-auth==2.34.0 +google-auth==2.36.0 # via kubernetes humanfriendly==10.0 # via -r dev-requirements.in -idna==3.8 +idna==3.10 # via requests isort==5.13.2 # via -r dev-requirements.in @@ -85,7 +89,7 @@ jinja2==3.1.4 # mkdocs-material jinja2-base64-filters==0.1.4 # via -r dev-requirements.in -kubernetes==30.1.0 +kubernetes==31.0.0 # via -r dev-requirements.in markdown==3.7 # via @@ -93,7 +97,7 @@ markdown==3.7 # mkdocs-htmlproofer-plugin # mkdocs-material # pymdown-extensions -markupsafe==2.1.5 +markupsafe==3.0.2 # via # jinja2 # mkdocs @@ -110,15 +114,15 @@ mkdocs==1.6.1 # mkdocs-static-i18n mkdocs-get-deps==0.2.0 # via mkdocs -mkdocs-htmlproofer-plugin==1.2.1 +mkdocs-htmlproofer-plugin==1.3.0 # via -r dev-requirements.in -mkdocs-material==9.5.34 +mkdocs-material==9.5.47 # via -r dev-requirements.in mkdocs-material-extensions==1.3.1 # via mkdocs-material mkdocs-static-i18n==1.2.3 # via -r dev-requirements.in -mypy==1.11.2 +mypy==1.13.0 # via -r dev-requirements.in mypy-extensions==1.0.0 # via @@ -128,7 +132,7 @@ oauthlib==3.2.2 # via # kubernetes # requests-oauthlib -packaging==24.1 +packaging==24.2 # via # black # mkdocs @@ -142,7 +146,7 @@ pathspec==0.12.1 # mkdocs pep8-naming==0.14.1 # via -r dev-requirements.in -platformdirs==4.2.2 +platformdirs==4.3.6 # via # black # mkdocs-get-deps @@ -150,11 +154,11 @@ platformdirs==4.2.2 # virtualenv pluggy==1.5.0 # via tox -pyasn1==0.6.0 +pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 +pyasn1-modules==0.4.1 # via google-auth pycodestyle==2.12.1 # via flake8 @@ -164,16 +168,18 @@ pyflakes==3.2.0 # via flake8 pygments==2.18.0 # via mkdocs-material -pymdown-extensions==10.9 +pymdown-extensions==10.12 # via mkdocs-material -pymongo==4.8.0 +pymongo==4.10.1 # via -r dev-requirements.in -pyopenssl==24.2.1 +pyopenssl==24.3.0 # via -r dev-requirements.in -pyproject-api==1.7.1 +pyproject-api==1.8.0 # via tox -pyreadline3==3.4.1 - # via -r dev-requirements.in +pyreadline3==3.5.4 + # via + # -r dev-requirements.in + # humanfriendly python-dateutil==2.9.0.post0 # via # ghp-import @@ -188,7 +194,7 @@ pyyaml==6.0.2 # pyyaml-env-tag pyyaml-env-tag==0.1 # via mkdocs -regex==2024.7.24 +regex==2024.11.6 # via mkdocs-material requests==2.32.3 # via @@ -200,36 +206,36 @@ requests-oauthlib==2.0.0 # via kubernetes rsa==4.9 # via google-auth -six==1.16.0 +six==1.17.0 # via # kubernetes # python-dateutil soupsieve==2.6 # via beautifulsoup4 -tox==4.18.0 +tox==4.23.2 # via -r dev-requirements.in types-cffi==1.16.0.20240331 # via types-pyopenssl types-pyopenssl==24.1.0.20240722 # via -r dev-requirements.in -types-python-dateutil==2.9.0.20240906 +types-python-dateutil==2.9.0.20241003 # via -r dev-requirements.in -types-pyyaml==6.0.12.20240808 +types-pyyaml==6.0.12.20240917 # via -r dev-requirements.in -types-requests==2.32.0.20240905 +types-requests==2.32.0.20241016 # via -r dev-requirements.in -types-setuptools==74.1.0.20240906 +types-setuptools==75.6.0.20241126 # via types-cffi typing-extensions==4.12.2 # via mypy -urllib3==2.2.2 +urllib3==2.2.3 # via # kubernetes # requests # types-requests -virtualenv==20.26.3 +virtualenv==20.28.0 # via tox -watchdog==5.0.2 +watchdog==6.0.0 # via mkdocs websocket-client==1.8.0 # via kubernetes diff --git a/docs/tutorial_subtitles/export_1_flex_to_combine/export_1_flex_to_combine.spa.txt b/docs/tutorial_subtitles/export_1_flex_to_combine/export_1_flex_to_combine.spa.txt new file mode 100644 index 0000000000..3cd1453e42 --- /dev/null +++ b/docs/tutorial_subtitles/export_1_flex_to_combine/export_1_flex_to_combine.spa.txt @@ -0,0 +1,44 @@ +Veamos cómo mover datos léxicos de un proyecto en FieldWorks a un proyecto en The Combine. +Para empezar, abra su proyecto en FieldWorks. +Aquí utilizo un proyecto de ejemplo con palabras del idioma Naskapi. +Con el proyecto FLEx deseado abierto, haga clic en el menú “Archivo” (“File”), luego seleccione “Exportar…” (“Export…”) en la parte inferior del menú. +En el diálogo “Exportar” (“Export”) que aparece, haga clic en la opción “Full Lexicon” “LIFT 0.13 XML”, luego haga clic en el botón “Exportar…” (“Export…”). +Aparece otro diálogo para que seleccione dónde se guardarán los archivos exportados. +Necesitarás crear una nueva carpeta para los archivos. +Voy a Desktop y hago clic en el botón “Crear nueva carpeta” (“Make New Folder”) para crear una carpeta Naskapi. +Seleccione la nueva carpeta que acaba de crear y haga clic en el botón “Aceptar” (“OK”). +En el Explorador de archivos, vaya a la carpeta que contiene esta nueva carpeta. +Haga clic derecho en la carpeta que acaba de crear para la exportación y seleccione “Comprimir en archivo ZIP” (“Compress to ZIP file”). +¿Ve el archivo ZIP que se ha creado? +Esto es lo que vamos a importar en The Combine. +Ahora abrimos un navegador web y vamos a thecombine.app. +Una vez que hemos iniciado sesión, vemos dos secciones: “Seleccionar proyecto” (“Select Project”) y “Crear proyecto” (“Create Project”). +En “Seleccionar proyecto” (“Select Project”) podemos abrir un proyecto creado previamente. +ES posible importar los datos léxicos a un proyecto existente, pero eso lo veremos más adelante. +En “Crear proyecto” (“Create Project”), vamos a crear un nuevo proyecto utilizando la exportación desde FLEx. +Primero teclearé un nombre para el proyecto, en mi caso: Naskapi. +Tenga en cuenta que hay campos más abajo donde podemos especificar el Idioma vernáculo (Vernacular Language) y un Idioma de análisis (Analysis Language) para el proyecto. +Esto no es necesario cuando importamos datos porque los idiomas del proyecto se recogerán automáticamente de los datos. +Para subir los datos existentes, haga clic en el botón “Navegar” (“Browse”). +Aparecerá un diálogo del explorador de archivos para seleccionar los datos LIFT exportados desde FLEx. +Navego hasta el Desktop, donde exporté mis datos, selecciono el archivo “Naskapi.zip” y hago clic en el botón “Abrir” (“Open”). +Vea que The Combine tiene el texto “Archivo seleccionado: Naskapi.zip”. +¡Genial! +En “Idioma vernáculo” (“Vernacular Language”) hay ahora un menú desplegable. +Se utiliza para seleccionar cuál de las lenguas de los datos va a ser el idioma vernáculo. +The Combine sólo soporta la entrada de datos para un idioma vernáculo por proyecto. +El idioma vernáculo no se puede cambiar después de crear el proyecto. +Si necesita recopilar u organizar datos léxicos para un idioma diferente, basta con crear otro proyecto. +Tenga en cuenta que ya no puede especificar un idioma de análisis. +Esto se debe a que todos los idiomas de análisis presentes en el archivo ZIP se añaden automáticamente, y puede añadir y eliminar los idiomas de análisis en el proyecto en cualquier momento. +Sólo queda hacer clic en el botón “Crear proyecto” (“Create Project”). +Una vez creado el proyecto, se accede a la página de configuración del proyecto. +Para accederla en el futuro, puede hacer clic en el icono de engranaje en la barra superior. +Tenga en cuenta que en la pestaña “Idiomas” (“Languages”), podemos ver el idioma vernáculo así como revisar y cambiar los idiomas de análisis. +Hagamos clic en la pestaña “Importar/Exportar” (“Import/Export”). +Aquí es donde puede importar datos léxicos a un proyecto existente. +Esa opción está desactivada ahora porque ya hemos importado datos a este proyecto. +Sólo se permite una importación por proyecto en The Combine. +Aquí también es donde podemos exportar datos desde The Combine para importarlos a FieldWorks, pero ese es un tema para otro video. +Espero que este video te ayude a comenzar con The Combine. +¡Que tenga un día maravilloso! diff --git a/docs/tutorial_subtitles/export_1_flex_to_combine/times.txt b/docs/tutorial_subtitles/export_1_flex_to_combine/times.txt index 81ed983d49..b263ac1ccb 100644 --- a/docs/tutorial_subtitles/export_1_flex_to_combine/times.txt +++ b/docs/tutorial_subtitles/export_1_flex_to_combine/times.txt @@ -1,44 +1,44 @@ -0:8 -0:14 -0:19 -0:30 -0:44 -0:50 -0:54 -1:11 -1:17 -1:25 -1:35 -1:39 -1:44 -1:56 -2:3 -2:9 +0:7.5 +0:12 +0:17 +0:26.5 +0:39.5 +0:46 +0:49.5 +1:2 +1:8 +1:15 +1:26 +1:29 +1:34 +1:41 +1:52 +1:57.5 +2:5 +2:11 2:17 -2:24 -2:32 -2:41 -2:48.5 -2:54 -3:1.5 -3:16.5 -3:24 -3:25.5 -3:29 -3:38 -3:44 -3:49 -3:56.5 -4:2 -4:13.5 -4:20.5 -4:26.5 -4:33 -4:44 -4:47.5 -4:52.5 -4:59.5 -5:4 -5:13 -5:16 -5:19 +2:26 +2:34 +2:39 +2:46 +2:59 +3:5 +3:7 +3:11.5 +3:18 +3:23 +3:28.5 +3:36.5 +3:41.5 +3:52.5 +3:57.5 +4:2.5 +4:9 +4:19 +4:23 +4:29 +4:34.75 +4:38 +4:48 +4:51 +4:54 diff --git a/docs/tutorial_subtitles/merge_dups_1_basics/merge_dups_1_basics.spa.txt b/docs/tutorial_subtitles/merge_dups_1_basics/merge_dups_1_basics.spa.txt index 6740d1daf0..160548db5a 100644 --- a/docs/tutorial_subtitles/merge_dups_1_basics/merge_dups_1_basics.spa.txt +++ b/docs/tutorial_subtitles/merge_dups_1_basics/merge_dups_1_basics.spa.txt @@ -1,42 +1,42 @@ Cuando se recopilan palabras por dominio semántico, a veces se introduce la misma palabra varias veces. The Combine tiene una herramienta para encontrar entradas duplicadas y combinarlas en una sola entrada. Iniciemos sesión en thecombine.app y seleccionemos un proyecto que tenga entradas duplicadas. -Haga clic en el botón “Limpieza de datos” en la barra superior, luego seleccione “Combinar duplicados”. +Haga clic en el botón “Limpieza de datos” (“Data Cleanup”) en la barra superior, luego seleccione “Combinar duplicados” (“Merge Duplicates”). The Combine encontrará conjuntos de palabras con idéntica forma vernácula y se los presentará de uno en uno. En este proyecto, el primer conjunto de duplicados potenciales es un par de palabras con la forma vernácula “hard”. -Uno tiene glosa “difficult" (difícil) y el otro tiene glosa “not soft" (no suave). +Uno tiene glosa “difficult” (difícil) y el otro tiene glosa “not soft” (no suave). ¿Son dos acepciones diferentes de la misma palabra o dos palabras diferentes? -Si no estamos seguros, o no queremos tomar esa decisión ahora, podemos pulsar el botón “Aplazar” en la parte inferior. -Eso nos mueve al siguiente conjunto y evita que el conjunto aplazado vuelva a aparecer aquí en la herramienta “Combinar duplicados”. +Si no estamos seguros, o no queremos tomar esa decisión ahora, podemos pulsar el botón “Aplazar” (“Defer”) en la parte inferior. +Eso nos mueve al siguiente conjunto y evita que el conjunto aplazado vuelva a aparecer aquí en la herramienta “Combinar duplicados” (“Merge Duplicates”). Hay otra herramienta para considerar los conjuntos que se han aplazado. Lo veremos más adelante en este video. Este segundo conjunto de duplicados potenciales tiene dos palabras con la forma vernácula “bank”. Uno tiene glosa “side of river” (lado del río) y el otro tiene glosa “financial institution” (institución financiera). Definitivamente son palabras diferentes—palabras homógrafas que no queremos combinar. -Haga clic en el botón “Guardar y continuar” en la parte inferior para confirmar que son palabras distintas. -Este par no volverá a aparecer en “Combinar duplicados”, ni en la lista de conjuntos aplazados. -Ya hemos visto lo que hacen los botones “Guardar y continuar” y “Aplazar”. -Una nota importante: si un usuario cambia más tarde alguna de las palabras de un conjunto de duplicados potenciales, entonces ese conjunto puede aparecer de nuevo en la herramienta “Combinar duplicados”. -Por ejemplo, si hace clic en "Guardar y continuar" para un conjunto de palabras que no son duplicados, y más tarde añade un dominio semántico a una acepción de una de las palabras, ese conjunto puede aparecer de nuevo como duplicados potenciales. -O si hace clic en “Aplazar” para un conjunto, y luego otro usuario añade una grabación de audio para una de las palabras, entonces ese conjunto aparecerá de nuevo en “Combinar duplicados”, en lugar de entre los conjuntos diferidos. +Haga clic en el botón “Guardar y continuar” (“Save & Continue”) en la parte inferior para confirmar que son palabras distintas. +Este par no volverá a aparecer en “Combinar duplicados” (“Merge Duplicates”), ni en la lista de conjuntos aplazados. +Ya hemos visto lo que hacen los botones “Guardar y continuar” (“Save & Continue”) y “Aplazar” (“Defer”). +Una nota importante: si un usuario cambia más tarde alguna de las palabras de un conjunto de duplicados potenciales, entonces ese conjunto puede aparecer de nuevo en la herramienta “Combinar duplicados” (“Merge Duplicates”). +Por ejemplo, si hace clic en "Guardar y continuar" (“Save & Continue”) para un conjunto de palabras que no son duplicados, y más tarde añade un dominio semántico a una acepción de una de las palabras, ese conjunto puede aparecer de nuevo como duplicados potenciales. +O si hace clic en “Aplazar” (“Defer”) para un conjunto, y luego otro usuario añade una grabación de audio para una de las palabras, entonces ese conjunto aparecerá de nuevo en “Combinar duplicados” (“Merge Duplicates”), en lugar de entre los conjuntos diferidos. Sigamos adelante. The Combine pregunta si queremos seguir combinando. Si hacemos clic en “No”, nos llevará a la página “Limpieza de datos”. -Si hacemos clic en "Sí", buscará más conjuntos de duplicados potenciales. +Si hacemos clic en “Sí” (“Yes”), buscará más conjuntos de duplicados potenciales. Después de que The Combine no pueda encontrar más palabras con idéntica forma vernácula, le sugerirá palabras con formas vernáculas similares. Esto ayudará a detectar duplicados donde una de las palabras tenga error tipográfico o se utilice ortografía alternativa. Aquí vemos dos palabras con forma vernácula “present”, pero la segunda tiene dos “t” al final. Mmm, debe de ser una errata. Un “gift” (regalo) se escribe “p-r-e-s-e-n-t” no “p-r-e-s-e-n-t-t”. -No puede editar el texto en la herramienta "Combinar duplicados". +No puede editar el texto en la herramienta “Combinar duplicados” (“Merge Duplicates”). Sin embargo, puede hacer clic en el icono de la bandera para añadir una bandera y mencionar lo que hay que arreglar. -Tenga en cuenta que para que se guarde la bandera, debe hacer clic en “Guardar y continuar”. -Podemos dejar la combinación en cualquier momento haciendo clic en el botón “Limpieza de datos”, pero entonces se descartarán los cambios no guardados. -De vuelta en “Limpieza de datos”, hay una herramienta “Revisar entradas”. +Tenga en cuenta que para que se guarde la bandera, debe hacer clic en “Guardar y continuar” (“Save & Continue”). +Podemos dejar la combinación en cualquier momento haciendo clic en el botón “Limpieza de datos” (“Data Cleanup”), pero entonces se descartarán los cambios no guardados. +De vuelta en “Limpieza de datos” (“Data Cleanup”), hay una herramienta “Revisar entradas” (“Review Entries”). Ahí puede revisar y editar todas las palabras en el proyecto y corregir las que se hayan marcado al combinar duplicados. -Cubriremos la herramienta “Revisar entradas” en otro vídeo. -Tenga en cuenta que aquí hay otra opción que antes no estaba aquí: “Revisar duplicados aplazados”. +Cubriremos la herramienta “Revisar entradas” (“Review Entries”) en otro vídeo. +Tenga en cuenta que aquí hay otra opción que antes no estaba aquí: “Revisar duplicados aplazados” (“Review Deferred Duplicates”). Lo haga clic para revisar los conjuntos de duplicados potenciales que se aplazaron anteriormente. -Bueno, eso es todo para este primer video tutorial sobre la herramienta "Combinar duplicados". +Bueno, eso es todo para este primer video tutorial sobre la herramienta “Combinar duplicados” (“Merge Duplicates”). En el siguiente video, veremos cómo mover, borrar y combinar acepciones. ¡Que tenga un día maravilloso! diff --git a/docs/user_guide/assets/licenses/backend_licenses.txt b/docs/user_guide/assets/licenses/backend_licenses.txt index 635b47a558..0892713a32 100644 --- a/docs/user_guide/assets/licenses/backend_licenses.txt +++ b/docs/user_guide/assets/licenses/backend_licenses.txt @@ -118,35 +118,35 @@ License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Microsoft.Extensions.Configuration -PackageVersion: 8.0.0 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Microsoft.Extensions.Configuration.Abstractions -PackageVersion: 8.0.0 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Microsoft.Extensions.Configuration.Binder -PackageVersion: 8.0.1 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Microsoft.Extensions.DependencyInjection -PackageVersion: 8.0.0 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Microsoft.Extensions.DependencyInjection.Abstractions -PackageVersion: 8.0.2 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT @@ -160,63 +160,63 @@ License: https://github.com/dotnet/core-setup/blob/master/LICENSE.TXT LicenseUrl: https://github.com/dotnet/core-setup/blob/master/LICENSE.TXT ############################################################### PackageId: Microsoft.Extensions.Diagnostics.Abstractions -PackageVersion: 8.0.0 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Microsoft.Extensions.FileProviders.Abstractions -PackageVersion: 8.0.0 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Microsoft.Extensions.Hosting.Abstractions -PackageVersion: 8.0.0 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Microsoft.Extensions.Logging -PackageVersion: 8.0.0 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Microsoft.Extensions.Logging.Abstractions -PackageVersion: 8.0.2 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Microsoft.Extensions.Logging.Configuration -PackageVersion: 8.0.0 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Microsoft.Extensions.Options -PackageVersion: 8.0.2 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Microsoft.Extensions.Options.ConfigurationExtensions -PackageVersion: 8.0.0 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Microsoft.Extensions.Primitives -PackageVersion: 8.0.0 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT @@ -348,51 +348,51 @@ License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: OpenTelemetry -PackageVersion: 1.8.1 +PackageVersion: 1.10.0 PackageProjectUrl: https://opentelemetry.io/ Authors: OpenTelemetry Authors License: Apache-2.0 LicenseUrl: https://licenses.nuget.org/Apache-2.0 ############################################################### PackageId: OpenTelemetry.Api -PackageVersion: 1.8.1 +PackageVersion: 1.10.0 PackageProjectUrl: https://opentelemetry.io/ Authors: OpenTelemetry Authors License: Apache-2.0 LicenseUrl: https://licenses.nuget.org/Apache-2.0 ############################################################### PackageId: OpenTelemetry.Api.ProviderBuilderExtensions -PackageVersion: 1.8.1 +PackageVersion: 1.10.0 PackageProjectUrl: https://opentelemetry.io/ Authors: OpenTelemetry Authors License: Apache-2.0 LicenseUrl: https://licenses.nuget.org/Apache-2.0 ############################################################### PackageId: OpenTelemetry.Exporter.Console -PackageVersion: 1.8.1 +PackageVersion: 1.10.0 PackageProjectUrl: https://opentelemetry.io/ Authors: OpenTelemetry Authors License: Apache-2.0 LicenseUrl: https://licenses.nuget.org/Apache-2.0 ############################################################### PackageId: OpenTelemetry.Exporter.OpenTelemetryProtocol -PackageVersion: 1.8.1 +PackageVersion: 1.10.0 PackageProjectUrl: https://opentelemetry.io/ Authors: OpenTelemetry Authors License: Apache-2.0 LicenseUrl: https://licenses.nuget.org/Apache-2.0 ############################################################### PackageId: OpenTelemetry.Extensions.Hosting -PackageVersion: 1.8.1 +PackageVersion: 1.10.0 PackageProjectUrl: https://opentelemetry.io/ Authors: OpenTelemetry Authors License: Apache-2.0 LicenseUrl: https://licenses.nuget.org/Apache-2.0 ############################################################### PackageId: OpenTelemetry.Instrumentation.AspNetCore -PackageVersion: 1.8.1 +PackageVersion: 1.9.0 PackageProjectUrl: https://opentelemetry.io/ -Authors: OpenTelemetry Authors +Authors: OpenTelemetry authors License: Apache-2.0 LicenseUrl: https://licenses.nuget.org/Apache-2.0 ############################################################### @@ -573,28 +573,28 @@ License: https://opensource.org/licenses/Zlib LicenseUrl: https://opensource.org/licenses/Zlib ############################################################### PackageId: Swashbuckle.AspNetCore -PackageVersion: 6.8.1 +PackageVersion: 6.9.0 PackageProjectUrl: https://github.com/domaindrivendev/Swashbuckle.AspNetCore Authors: domaindrivendev License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Swashbuckle.AspNetCore.Swagger -PackageVersion: 6.8.1 +PackageVersion: 6.9.0 PackageProjectUrl: https://github.com/domaindrivendev/Swashbuckle.AspNetCore Authors: domaindrivendev License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Swashbuckle.AspNetCore.SwaggerGen -PackageVersion: 6.8.1 +PackageVersion: 6.9.0 PackageProjectUrl: https://github.com/domaindrivendev/Swashbuckle.AspNetCore Authors: domaindrivendev License: MIT LicenseUrl: https://licenses.nuget.org/MIT ############################################################### PackageId: Swashbuckle.AspNetCore.SwaggerUI -PackageVersion: 6.8.1 +PackageVersion: 6.9.0 PackageProjectUrl: https://github.com/domaindrivendev/Swashbuckle.AspNetCore Authors: domaindrivendev License: MIT @@ -643,7 +643,7 @@ License: http://go.microsoft.com/fwlink/?LinkId=329770 LicenseUrl: http://go.microsoft.com/fwlink/?LinkId=329770 ############################################################### PackageId: System.Diagnostics.DiagnosticSource -PackageVersion: 8.0.0 +PackageVersion: 9.0.0 PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT @@ -971,16 +971,9 @@ Authors: Microsoft License: http://go.microsoft.com/fwlink/?LinkId=329770 LicenseUrl: http://go.microsoft.com/fwlink/?LinkId=329770 ############################################################### -PackageId: System.Text.Encodings.Web -PackageVersion: 4.7.2 -PackageProjectUrl: https://github.com/dotnet/corefx -Authors: Microsoft -License: MIT -LicenseUrl: https://licenses.nuget.org/MIT -############################################################### PackageId: System.Text.Json -PackageVersion: 4.7.2 -PackageProjectUrl: https://github.com/dotnet/corefx +PackageVersion: 8.0.5 +PackageProjectUrl: https://dot.net/ Authors: Microsoft License: MIT LicenseUrl: https://licenses.nuget.org/MIT diff --git a/docs/user_guide/assets/licenses/frontend_licenses.txt b/docs/user_guide/assets/licenses/frontend_licenses.txt index 2782dbbba2..da7e4554fb 100644 --- a/docs/user_guide/assets/licenses/frontend_licenses.txt +++ b/docs/user_guide/assets/licenses/frontend_licenses.txt @@ -42514,7 +42514,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -is-core-module 2.14.0 +is-core-module 2.15.1 MIT The MIT License (MIT) @@ -42671,6 +42671,31 @@ This library is a fork of 'better-json-errors' by Kat Marchán, extended and distributed under the terms of the MIT license above. +levenshtein-search 0.1.2 +MIT +MIT License + +Copyright (c) 2018 Tal Einat + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + lines-and-columns 1.2.4 MIT The MIT License (MIT) diff --git a/docs/user_guide/docs/goals.es.md b/docs/user_guide/docs/goals.es.md index a417401426..fc2343e32f 100644 --- a/docs/user_guide/docs/goals.es.md +++ b/docs/user_guide/docs/goals.es.md @@ -27,7 +27,8 @@ There are icons at the top of each column to ![Review Entries column sort icon](../images/reviewEntriesColumnSort.png){width=20} sort the data. In a column with predominantly text content (Vernacular, Glosses, Note, or Flag), you can sort alphabetically or filter -with a text search. +with a text search. By default, the text search is a fuzzy match: it is not case sensitive and it allows for one or two +typos. If you want exact text matches, use quotes around your filter. In the Number of Senses column or Pronunciations column, you can sort or filter by the number of senses or recordings that entries have. In the Pronunciations column, you can also filter by speaker name. diff --git a/docs/user_guide/docs/goals.md b/docs/user_guide/docs/goals.md index fc08a36fe5..b7ff8e3051 100644 --- a/docs/user_guide/docs/goals.md +++ b/docs/user_guide/docs/goals.md @@ -26,7 +26,8 @@ There are icons at the top of each column to ![Review Entries column sort icon](images/reviewEntriesColumnSort.png){width=20} sort the data. In a column with predominantly text content (Vernacular, Glosses, Note, or Flag), you can sort alphabetically or filter -with a text search. +with a text search. By default, the text search is a fuzzy match: it is not case sensitive and it allows for one or two +typos. If you want exact text matches, use quotes around your filter. In the Number of Senses column or Pronunciations column, you can sort or filter by the number of senses or recordings that entries have. In the Pronunciations column, you can also filter by speaker name. diff --git a/docs/user_guide/docs/goals.zh.md b/docs/user_guide/docs/goals.zh.md index a9a17ddcd4..b5cb930de0 100644 --- a/docs/user_guide/docs/goals.zh.md +++ b/docs/user_guide/docs/goals.zh.md @@ -26,7 +26,8 @@ There are icons at the top of each column to ![Review Entries column sort icon](../images/reviewEntriesColumnSort.png){width=20} sort the data. In a column with predominantly text content (Vernacular, Glosses, Note, or Flag), you can sort alphabetically or filter -with a text search. +with a text search. By default, the text search is a fuzzy match: it is not case sensitive and it allows for one or two +typos. If you want exact text matches, use quotes around your filter. In the Number of Senses column or Pronunciations column, you can sort or filter by the number of senses or recordings that entries have. In the Pronunciations column, you can also filter by speaker name. diff --git a/maintenance/Dockerfile b/maintenance/Dockerfile index 9e86310ad8..c72f762ab2 100644 --- a/maintenance/Dockerfile +++ b/maintenance/Dockerfile @@ -16,7 +16,7 @@ # - ARM 64-bit ############################################################ -FROM sillsdev/aws-kubectl:0.3.0 +FROM public.ecr.aws/thecombine/aws-kubectl:0.4.0-$TARGETARCH USER root diff --git a/maintenance/requirements.txt b/maintenance/requirements.txt index 56a4f0d055..447197cf64 100644 --- a/maintenance/requirements.txt +++ b/maintenance/requirements.txt @@ -12,19 +12,19 @@ certifi==2024.8.30 # requests cffi==1.17.1 # via cryptography -charset-normalizer==3.3.2 +charset-normalizer==3.4.0 # via requests -cryptography==43.0.1 +cryptography==44.0.0 # via pyopenssl -dnspython==2.6.1 +dnspython==2.7.0 # via pymongo -durationpy==0.8 +durationpy==0.9 # via kubernetes -google-auth==2.34.0 +google-auth==2.36.0 # via kubernetes humanfriendly==10.0 # via -r requirements.in -idna==3.8 +idna==3.10 # via requests kubernetes==31.0.0 # via -r requirements.in @@ -32,18 +32,20 @@ oauthlib==3.2.2 # via # kubernetes # requests-oauthlib -pyasn1==0.6.0 +pyasn1==0.6.1 # via # pyasn1-modules # rsa -pyasn1-modules==0.4.0 +pyasn1-modules==0.4.1 # via google-auth pycparser==2.22 # via cffi -pymongo==4.10.0 +pymongo==4.10.1 # via -r requirements.in -pyopenssl==24.2.1 +pyopenssl==24.3.0 # via -r requirements.in +pyreadline3==3.5.4 + # via humanfriendly python-dateutil==2.9.0.post0 # via kubernetes pyyaml==6.0.2 @@ -56,11 +58,11 @@ requests-oauthlib==2.0.0 # via kubernetes rsa==4.9 # via google-auth -six==1.16.0 +six==1.17.0 # via # kubernetes # python-dateutil -urllib3==2.2.2 +urllib3==2.2.3 # via # kubernetes # requests diff --git a/package-lock.json b/package-lock.json index 467c489958..6e11eaad11 100644 --- a/package-lock.json +++ b/package-lock.json @@ -31,6 +31,7 @@ "i18next-browser-languagedetector": "^8.0.0", "i18next-http-backend": "^2.6.0", "js-base64": "^3.7.7", + "levenshtein-search": "^0.1.2", "make-dir": "^4.0.0", "material-react-table": "^2.9.2", "motion": "^10.16.2", @@ -63,9 +64,9 @@ "@testing-library/user-event": "^14.5.2", "@types/crypto-js": "^4.2.2", "@types/css-mediaquery": "^0.1.2", - "@types/jest": "^29.5.5", + "@types/jest": "^29.5.14", "@types/loadable__component": "^5.13.8", - "@types/node": "^20.14.0", + "@types/node": "^20.17.0", "@types/nspell": "^2.1.5", "@types/react": "^18.2.61", "@types/react-beautiful-dnd": "^13.1.8", @@ -82,7 +83,7 @@ "css-mediaquery": "^0.1.2", "eslint": "^8.51.0", "eslint-import-resolver-typescript": "^3.6.0", - "eslint-plugin-import": "^2.29.0", + "eslint-plugin-import": "^2.31.0", "eslint-plugin-react": "^7.33.2", "eslint-plugin-react-hooks": "^4.3.0", "eslint-plugin-unused-imports": "^3.1.0", @@ -4073,6 +4074,12 @@ "integrity": "sha512-EYNwp3bU+98cpU4lAWYYL7Zz+2gryWH1qbdDTidVd6hkiR6weksdbMadyXKXNPEkQFhXM+hVO9ZygomHXp+AIw==", "dev": true }, + "node_modules/@rtsao/scc": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz", + "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", + "dev": true + }, "node_modules/@rushstack/eslint-patch": { "version": "1.10.3", "resolved": "https://registry.npmjs.org/@rushstack/eslint-patch/-/eslint-patch-1.10.3.tgz", @@ -7806,9 +7813,9 @@ } }, "node_modules/@types/jest": { - "version": "29.5.12", - "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.12.tgz", - "integrity": "sha512-eDC8bTvT/QhYdxJAulQikueigY5AsdBRH2yDKW3yveW7svY3+DzN84/2NUgkw10RTiJbWqZrTtoGVdYlvFJdLw==", + "version": "29.5.14", + "resolved": "https://registry.npmjs.org/@types/jest/-/jest-29.5.14.tgz", + "integrity": "sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==", "dev": true, "dependencies": { "expect": "^29.0.0", @@ -7875,12 +7882,13 @@ "dev": true }, "node_modules/@types/node": { - "version": "20.14.0", - "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.0.tgz", - "integrity": "sha512-5cHBxFGJx6L4s56Bubp4fglrEpmyJypsqI6RgzMfBHWUJQGWAAi8cWcgetEbZXHYXo9C2Fa4EEds/uSyS4cxmA==", + "version": "20.17.6", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.17.6.tgz", + "integrity": "sha512-VEI7OdvK2wP7XHnsuXbAJnEpEkF6NjSN45QJlL4VGqZSXsnicpesdTWsg9RISeSdYd3yeRj/y3k5KGjUXYnFwQ==", "dev": true, + "license": "MIT", "dependencies": { - "undici-types": "~5.26.4" + "undici-types": "~6.19.2" } }, "node_modules/@types/node-forge": { @@ -10358,9 +10366,9 @@ "dev": true }, "node_modules/cookie": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.6.0.tgz", - "integrity": "sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw==", + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", "dev": true, "engines": { "node": ">= 0.6" @@ -12110,9 +12118,9 @@ } }, "node_modules/eslint-module-utils": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.1.tgz", - "integrity": "sha512-rXDXR3h7cs7dy9RNpUlQf80nX31XWJEyGq1tRMo+6GsO5VmTe4UTwtmonAD4ZkAsrfMVDA2wlGJ3790Ys+D49Q==", + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.0.tgz", + "integrity": "sha512-wALZ0HFoytlyh/1+4wuZ9FJCD/leWHQzzrxJ8+rebyReSLk7LApMyd3WJaLVoN+D5+WIdJyDK1c6JnE65V4Zyg==", "dev": true, "dependencies": { "debug": "^3.2.7" @@ -12154,34 +12162,36 @@ } }, "node_modules/eslint-plugin-import": { - "version": "2.29.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz", - "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==", + "version": "2.31.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.31.0.tgz", + "integrity": "sha512-ixmkI62Rbc2/w8Vfxyh1jQRTdRTF52VxwRVHl/ykPAmqG+Nb7/kNn+byLP0LxPgI7zWA16Jt82SybJInmMia3A==", "dev": true, "dependencies": { - "array-includes": "^3.1.7", - "array.prototype.findlastindex": "^1.2.3", + "@rtsao/scc": "^1.1.0", + "array-includes": "^3.1.8", + "array.prototype.findlastindex": "^1.2.5", "array.prototype.flat": "^1.3.2", "array.prototype.flatmap": "^1.3.2", "debug": "^3.2.7", "doctrine": "^2.1.0", "eslint-import-resolver-node": "^0.3.9", - "eslint-module-utils": "^2.8.0", - "hasown": "^2.0.0", - "is-core-module": "^2.13.1", + "eslint-module-utils": "^2.12.0", + "hasown": "^2.0.2", + "is-core-module": "^2.15.1", "is-glob": "^4.0.3", "minimatch": "^3.1.2", - "object.fromentries": "^2.0.7", - "object.groupby": "^1.0.1", - "object.values": "^1.1.7", + "object.fromentries": "^2.0.8", + "object.groupby": "^1.0.3", + "object.values": "^1.2.0", "semver": "^6.3.1", + "string.prototype.trimend": "^1.0.8", "tsconfig-paths": "^3.15.0" }, "engines": { "node": ">=4" }, "peerDependencies": { - "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9" } }, "node_modules/eslint-plugin-import/node_modules/brace-expansion": { @@ -12944,9 +12954,9 @@ } }, "node_modules/express": { - "version": "4.21.0", - "resolved": "https://registry.npmjs.org/express/-/express-4.21.0.tgz", - "integrity": "sha512-VqcNGcj/Id5ZT1LZ/cfihi3ttTn+NJmkli2eZADigjq29qTlWi/hAQ43t/VLPq8+UX06FCEx3ByOYet6ZFblng==", + "version": "4.21.1", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.1.tgz", + "integrity": "sha512-YSFlK1Ee0/GC8QaO91tHcDxJiE/X4FbpAyQWkxAvG6AXCuR65YzK8ua6D9hvi/TzUfZMpc+BwuM1IPw8fmQBiQ==", "dev": true, "dependencies": { "accepts": "~1.3.8", @@ -12954,7 +12964,7 @@ "body-parser": "1.20.3", "content-disposition": "0.5.4", "content-type": "~1.0.4", - "cookie": "0.6.0", + "cookie": "0.7.1", "cookie-signature": "1.0.6", "debug": "2.6.9", "depd": "2.0.0", @@ -14707,9 +14717,9 @@ } }, "node_modules/is-core-module": { - "version": "2.14.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.14.0.tgz", - "integrity": "sha512-a5dFJih5ZLYlRtDc0dZWP7RiKr6xIKzmn/oAYCDvdLThadVgyJwlaoQPmRtMSpz+rk0OGAgIu+TcM9HUF0fk1A==", + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", + "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", "dependencies": { "hasown": "^2.0.2" }, @@ -17255,6 +17265,12 @@ "node": ">=6" } }, + "node_modules/levenshtein-search": { + "version": "0.1.2", + "resolved": "https://registry.npmjs.org/levenshtein-search/-/levenshtein-search-0.1.2.tgz", + "integrity": "sha512-MEwjuzHgVaibXrmH1Kh0maHxLPtkWy+tKaWXa4o3eASKE4rX+vKBnh+ektEIoDm8s4HwOk1cVxlNAdSABi+YlA==", + "license": "MIT" + }, "node_modules/levn": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/levn/-/levn-0.4.1.tgz", @@ -24922,10 +24938,11 @@ "dev": true }, "node_modules/undici-types": { - "version": "5.26.5", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", - "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", - "dev": true + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==", + "dev": true, + "license": "MIT" }, "node_modules/unfetch": { "version": "4.2.0", diff --git a/package.json b/package.json index 8936b2d427..755f10a4b9 100644 --- a/package.json +++ b/package.json @@ -59,6 +59,7 @@ "i18next-browser-languagedetector": "^8.0.0", "i18next-http-backend": "^2.6.0", "js-base64": "^3.7.7", + "levenshtein-search": "^0.1.2", "make-dir": "^4.0.0", "material-react-table": "^2.9.2", "motion": "^10.16.2", @@ -91,9 +92,9 @@ "@testing-library/user-event": "^14.5.2", "@types/crypto-js": "^4.2.2", "@types/css-mediaquery": "^0.1.2", - "@types/jest": "^29.5.5", + "@types/jest": "^29.5.14", "@types/loadable__component": "^5.13.8", - "@types/node": "^20.14.0", + "@types/node": "^20.17.0", "@types/nspell": "^2.1.5", "@types/react": "^18.2.61", "@types/react-beautiful-dnd": "^13.1.8", @@ -110,7 +111,7 @@ "css-mediaquery": "^0.1.2", "eslint": "^8.51.0", "eslint-import-resolver-typescript": "^3.6.0", - "eslint-plugin-import": "^2.29.0", + "eslint-plugin-import": "^2.31.0", "eslint-plugin-react": "^7.33.2", "eslint-plugin-react-hooks": "^4.3.0", "eslint-plugin-unused-imports": "^3.1.0", @@ -160,6 +161,6 @@ ] }, "volta": { - "node": "20.14.0" + "node": "20.17.0" } } diff --git a/public/locales/ar/translation.json b/public/locales/ar/translation.json index d6ffed2f0a..89f8c7fa9d 100644 --- a/public/locales/ar/translation.json +++ b/public/locales/ar/translation.json @@ -84,7 +84,7 @@ "upload?": "تحميل البيانات الموجودة؟", "uploadFormat": "يجب أن يكون الملف مجلد مضغوط يحتوي على <1>نموذج تبادل المعجم (LIFT).", "create": "إنشاء مشروع", - "fileSelected": "تم تحديد الملف", + "fileSelected": "تم تحديد الملف: {{ val }}", "language": "ستستخرج معلومات لغة المشروع تلقائيًا من البيانات التي تم تحميلها.", "languageOptionOther": "آخر", "languageSelect": "اختر من بين اللغات الموجودة في الملف:", diff --git a/public/locales/en/translation.json b/public/locales/en/translation.json index 7df6a4b104..2e6e9f5d78 100644 --- a/public/locales/en/translation.json +++ b/public/locales/en/translation.json @@ -103,7 +103,7 @@ "upload?": "Upload existing data?", "uploadFormat": "File must be a zipped folder containing a <1>Lexicon Interchange FormaT (LIFT) project.", "create": "Create Project", - "fileSelected": "File selected", + "fileSelected": "File selected: {{ val }}", "language": "Project language information will be automatically extracted from uploaded data.", "languageOptionOther": "other", "languageSelect": "Select from languages in file:", diff --git a/public/locales/es/translation.json b/public/locales/es/translation.json index cf596f89cc..dae2ef6480 100644 --- a/public/locales/es/translation.json +++ b/public/locales/es/translation.json @@ -92,7 +92,7 @@ "upload?": "¿Cargar los datos existentes?", "uploadFormat": "El archivo debe ser una carpeta comprimida que contenga un proyecto <1>Lexicon Interchange FormaT (LIFT).", "create": "Crear Proyecto", - "fileSelected": "Archivo seleccionado", + "fileSelected": "Archivo seleccionado: {{ val }}", "language": "La información lingüística del proyecto se extraerá automáticamente de los datos cargados.", "languageOptionOther": "otro", "languageSelect": "Seleccionar de idiomas en el archivo:", diff --git a/public/locales/fr/translation.json b/public/locales/fr/translation.json index 7b197f8adb..f52a48df47 100644 --- a/public/locales/fr/translation.json +++ b/public/locales/fr/translation.json @@ -83,7 +83,7 @@ "nameTaken": "Nom pris", "upload?": "Télécharger des données existantes ?", "create": "Créer un projet", - "fileSelected": "Fichier sélectionné", + "fileSelected": "Fichier sélectionné : {{ val }}", "success": "Projet créé !" }, "selectProject": { "title": "Sélectionner un projet" }, diff --git a/public/locales/pt/translation.json b/public/locales/pt/translation.json index 054e5ad865..35437ca353 100644 --- a/public/locales/pt/translation.json +++ b/public/locales/pt/translation.json @@ -84,7 +84,7 @@ "upload?": "Carregar dados existentes?", "uploadFormat": "O arquivo deve ser uma pasta zipada que contenha um <1>Lexicon Interchange FormaT (LIFT) projeto.", "create": "Criar Projeto", - "fileSelected": "Arquivo selecionado", + "fileSelected": "Arquivo selecionado: {{ val }}", "success": "Projeto Criado!" }, "selectProject": { "title": "Selecionar Projeto" }, diff --git a/public/locales/zh/translation.json b/public/locales/zh/translation.json index 024526153b..619f4a4033 100644 --- a/public/locales/zh/translation.json +++ b/public/locales/zh/translation.json @@ -87,7 +87,7 @@ "upload?": "上传现有数据?", "uploadFormat": "文件必须是一个包含 <1>Lexicon Interchange FormaT (LIFT) 项目的压缩文件夹。", "create": "创建项目", - "fileSelected": "文件已选定", + "fileSelected": "文件已选定: {{ val }}", "language": "项目语言资料将从上传的数据中提取。", "languageOptionOther": "其它", "languageSelect": "从文件中选择语言:", diff --git a/src/components/Dialogs/UploadImage.tsx b/src/components/Dialogs/UploadImage.tsx index 92c232f67f..d12cf914b9 100644 --- a/src/components/Dialogs/UploadImage.tsx +++ b/src/components/Dialogs/UploadImage.tsx @@ -50,7 +50,7 @@ export default function ImageUpload(props: ImageUploadProps): ReactElement { {/* Displays the name of the selected file */} {filename && ( - {t("createProject.fileSelected")}: {filename} + {t("createProject.fileSelected", { val: filename })} )} diff --git a/src/components/ProjectExport/ExportButton.tsx b/src/components/ProjectExport/ExportButton.tsx index 30e696c9a9..8c9585a439 100644 --- a/src/components/ProjectExport/ExportButton.tsx +++ b/src/components/ProjectExport/ExportButton.tsx @@ -1,6 +1,6 @@ +import { Tooltip } from "@mui/material"; import { ButtonProps } from "@mui/material/Button"; -import { enqueueSnackbar } from "notistack"; -import { ReactElement } from "react"; +import { ReactElement, useEffect, useState } from "react"; import { useTranslation } from "react-i18next"; import { isFrontierNonempty } from "backend"; @@ -18,16 +18,11 @@ interface ExportButtonProps { /** A button for exporting project to Lift file */ export default function ExportButton(props: ExportButtonProps): ReactElement { const dispatch = useAppDispatch(); + const [exports, setExports] = useState(false); const { t } = useTranslation(); async function exportProj(): Promise { - await isFrontierNonempty(props.projectId).then(async (isNonempty) => { - if (isNonempty) { - await dispatch(asyncExportProject(props.projectId)); - } else { - enqueueSnackbar(t("projectExport.cannotExportEmpty")); - } - }); + await dispatch(asyncExportProject(props.projectId)); } const exportResult = useAppSelector( @@ -38,17 +33,25 @@ export default function ExportButton(props: ExportButtonProps): ReactElement { exportResult.status === ExportStatus.Success || exportResult.status === ExportStatus.Downloading; + useEffect(() => { + isFrontierNonempty(props.projectId).then(setExports); + }, [props.projectId]); + return ( - - {t("buttons.export")} - + + + + {t("buttons.export")} + + + ); } diff --git a/src/components/ProjectScreen/CreateProject.tsx b/src/components/ProjectScreen/CreateProject.tsx index 38daa16d3f..3f2d2738d6 100644 --- a/src/components/ProjectScreen/CreateProject.tsx +++ b/src/components/ProjectScreen/CreateProject.tsx @@ -256,7 +256,7 @@ export default function CreateProject(): ReactElement { {/* Uploaded file name and remove button */} {languageData && ( - {`${t("createProject.fileSelected")}: ${languageData.name}`} + {t("createProject.fileSelected", { val: languageData.name })} updateLanguageData()}> diff --git a/src/components/ProjectSettings/ProjectImport.tsx b/src/components/ProjectSettings/ProjectImport.tsx index 1ad927132c..58fceb0829 100644 --- a/src/components/ProjectSettings/ProjectImport.tsx +++ b/src/components/ProjectSettings/ProjectImport.tsx @@ -78,9 +78,7 @@ export default function ProjectImport( {/* Displays the name of the selected file */} {liftFile && ( - {t("createProject.fileSelected")} - {": "} - {liftFile.name} + {t("createProject.fileSelected", { val: liftFile.name })} )} diff --git a/src/components/ProjectSettings/tests/index.test.tsx b/src/components/ProjectSettings/tests/index.test.tsx index ef1ee8e3a4..f999aa2bef 100644 --- a/src/components/ProjectSettings/tests/index.test.tsx +++ b/src/components/ProjectSettings/tests/index.test.tsx @@ -34,6 +34,7 @@ jest.mock("backend", () => ({ getAllUsers: () => Promise.resolve([]), getCurrentPermissions: () => mockGetCurrentPermissions(), getUserRoles: () => Promise.resolve([]), + isFrontierNonempty: () => Promise.resolve(false), })); jest.mock("components/Project/ProjectActions"); // Mock "i18n", else `thrown: "Error: Error: connect ECONNREFUSED ::1:80 [...]` diff --git a/src/components/Pronunciations/AudioRecorder.tsx b/src/components/Pronunciations/AudioRecorder.tsx index 981e949d27..2e8af49740 100644 --- a/src/components/Pronunciations/AudioRecorder.tsx +++ b/src/components/Pronunciations/AudioRecorder.tsx @@ -1,4 +1,4 @@ -import { ReactElement, useContext } from "react"; +import { ReactElement, useContext, useEffect, useState } from "react"; import { useTranslation } from "react-i18next"; import { toast } from "react-toastify"; @@ -22,15 +22,28 @@ export default function AudioRecorder(props: RecorderProps): ReactElement { (state: StoreState) => state.currentProjectState.speaker?.id ); const recorder = useContext(RecorderContext); + const [clicked, setClicked] = useState(false); const { t } = useTranslation(); - async function startRecording(): Promise { + useEffect(() => { + // Re-enable clicking when the word id has changed + setClicked(false); + }, [props.id]); + + async function startRecording(): Promise { + if (clicked) { + // Prevent recording again before this word has updated. + return false; + } + const recordingId = recorder.getRecordingId(); if (recordingId && recordingId !== props.id) { // Prevent interfering with an active recording on a different entry. - return; + return false; } + setClicked(true); + // Prevent starting a recording before a previous one is finished. await stopRecording(); @@ -40,10 +53,12 @@ export default function AudioRecorder(props: RecorderProps): ReactElement { errorMessage += ` ${t("pronunciations.recordingPermission")}`; } toast.error(errorMessage); + return false; } + return true; } - async function stopRecording(): Promise { + async function stopRecording(): Promise { // Prevent triggering this function if no recording is active. if (recorder.getRecordingId() === undefined) { return; @@ -53,8 +68,9 @@ export default function AudioRecorder(props: RecorderProps): ReactElement { props.onClick(); } const file = await recorder.stopRecording(); - if (!file) { + if (!file || !file.size) { toast.error(t("pronunciations.recordingError")); + setClicked(false); return; } if (!props.noSpeaker) { diff --git a/src/components/Pronunciations/RecorderIcon.tsx b/src/components/Pronunciations/RecorderIcon.tsx index 552c3e71d1..22899f1cd0 100644 --- a/src/components/Pronunciations/RecorderIcon.tsx +++ b/src/components/Pronunciations/RecorderIcon.tsx @@ -19,7 +19,7 @@ export const recordIconId = "recordingIcon"; interface RecorderIconProps { disabled?: boolean; id: string; - startRecording: () => void; + startRecording: () => Promise; stopRecording: () => void; } @@ -41,11 +41,12 @@ export default function RecorderIcon(props: RecorderIconProps): ReactElement { checkMicPermission().then(setHasMic); }, []); - function toggleIsRecordingToTrue(): void { + async function toggleIsRecordingToTrue(): Promise { if (!isRecording) { // Only start a recording if there's not another on in progress. - dispatch(recording(props.id)); - props.startRecording(); + if (await props.startRecording()) { + dispatch(recording(props.id)); + } } else { // This triggers if user clicks-and-holds on one entry's record icon, // drags the mouse outside that icon before releasing, diff --git a/src/components/Pronunciations/tests/RecorderIcon.test.tsx b/src/components/Pronunciations/tests/RecorderIcon.test.tsx index 6fda104dac..03834390a4 100644 --- a/src/components/Pronunciations/tests/RecorderIcon.test.tsx +++ b/src/components/Pronunciations/tests/RecorderIcon.test.tsx @@ -31,7 +31,7 @@ function mockRecordingState(wordId: string): Partial { const mockWordId = "1234567890"; -const mockStartRecording = jest.fn(); +const mockStartRecording = jest.fn(() => Promise.resolve(true)); const mockStopRecording = jest.fn(); const renderRecorderIcon = async (wordId = ""): Promise => { diff --git a/src/goals/ReviewEntries/ReviewEntriesTable/filterFn.ts b/src/goals/ReviewEntries/ReviewEntriesTable/filterFn.ts index 42f7f7ae26..b400d51fb5 100644 --- a/src/goals/ReviewEntries/ReviewEntriesTable/filterFn.ts +++ b/src/goals/ReviewEntries/ReviewEntriesTable/filterFn.ts @@ -10,18 +10,58 @@ import { } from "api/models"; import { type Hash } from "types/hash"; +// eslint-disable-next-line @typescript-eslint/no-var-requires +const { fuzzySearch } = require("levenshtein-search"); + +/** Checks if string starts and ends with quote marks. + * For simplicity, allows mismatched quote types. */ +export function isQuoted(filter: string): boolean { + return /^["'\p{Pi}].*["'\p{Pf}]$/u.test(filter); +} + +/** Number of typos allowed, depending on filter-length. */ +function levDist(len: number): number { + return len < 3 ? 0 : len < 6 ? 1 : 2; +} + +/** Checks if value contains a substring that fuzzy-matches the filter. */ +export function fuzzyContains(value: string, filter: string): boolean { + filter = filter.toLowerCase(); + value = value.toLowerCase(); + // `fuzzySearch(...)` returns a generator; + // `.next()` on a generator always returns an object with boolean property `done` + return !fuzzySearch(filter, value, levDist(filter.length)).next().done; +} + +/** Check if string matches filter. + * If filter quoted, exact match. Otherwise, fuzzy match. */ +export function matchesFilter(value: string, filter: string): boolean { + filter = filter.trim(); + return isQuoted(filter) + ? value.includes(filter.substring(1, filter.length - 1).trim()) + : fuzzyContains(value, filter); +} + /* Custom `filterFn` functions for `MaterialReactTable` columns. * (Can always assume that `filterValue` will be truthy.) */ -/** Requires the accessor return type to be `Dictionary[]`. */ +/** Requires the accessor return type to be `string`. */ +export const filterFnString: MRT_FilterFn = ( + row, + id, + filterValue: string +) => { + return matchesFilter(row.getValue(id), filterValue); +}; + +/** Requires the accessor return type to be `Definition[]`. */ export const filterFnDefinitions: MRT_FilterFn = ( row, id, filterValue: string ) => { const definitions = row.getValue(id); - const filter = filterValue.trim().toLowerCase(); - return definitions.some((d) => d.text.toLowerCase().includes(filter)); + return definitions.some((d) => matchesFilter(d.text, filterValue)); }; /** Requires the accessor return type to be `Gloss[]`. */ @@ -31,8 +71,7 @@ export const filterFnGlosses: MRT_FilterFn = ( filterValue: string ) => { const glosses = row.getValue(id); - const filter = filterValue.trim().toLowerCase(); - return glosses.some((g) => g.def.toLowerCase().includes(filter)); + return glosses.some((g) => matchesFilter(g.def, filterValue)); }; /** Requires the accessor return type to be `SemanticDomain[]`. */ @@ -79,10 +118,15 @@ export const filterFnPronunciations = /* Match either number of pronunciations or a speaker name. * (Whitespace will match all audio, even without a speaker.) */ const audio = row.getValue(id); - const filter = filterValue.trim().toLocaleLowerCase(); + const filter = filterValue.trim(); return ( + (audio.length && !filter) || audio.length === parseInt(filter) || - audio.some((p) => !filter || speakers[p.speakerId]?.includes(filter)) + audio.some( + (p) => + p.speakerId in speakers && + matchesFilter(speakers[p.speakerId], filter) + ) ); }; @@ -97,6 +141,5 @@ export const filterFnFlag: MRT_FilterFn = ( // A filter has been typed and the word isn't flagged return false; } - const filter = filterValue.trim().toLowerCase(); - return flag.text.toLowerCase().includes(filter); + return matchesFilter(flag.text, filterValue); }; diff --git a/src/goals/ReviewEntries/ReviewEntriesTable/index.tsx b/src/goals/ReviewEntries/ReviewEntriesTable/index.tsx index 5bfd9b55a5..c4ffde3060 100644 --- a/src/goals/ReviewEntries/ReviewEntriesTable/index.tsx +++ b/src/goals/ReviewEntries/ReviewEntriesTable/index.tsx @@ -221,6 +221,7 @@ export default function ReviewEntriesTable(props: { Cell: ({ row }: CellProps) => , enableColumnOrdering: false, enableHiding: false, + filterFn: ff.filterFnString, header: t("reviewEntries.columns.vernacular"), id: ColumnId.Vernacular, size: BaselineColumnSize - 40, @@ -327,6 +328,7 @@ export default function ReviewEntriesTable(props: { // Note column columnHelper.accessor((w) => w.note.text || undefined, { Cell: ({ row }: CellProps) => , + filterFn: ff.filterFnString, header: t("reviewEntries.columns.note"), id: ColumnId.Note, size: BaselineColumnSize - 40, diff --git a/src/goals/ReviewEntries/ReviewEntriesTable/tests/filterFn.test.ts b/src/goals/ReviewEntries/ReviewEntriesTable/tests/filterFn.test.ts index 28731cc322..0e982664be 100644 --- a/src/goals/ReviewEntries/ReviewEntriesTable/tests/filterFn.test.ts +++ b/src/goals/ReviewEntries/ReviewEntriesTable/tests/filterFn.test.ts @@ -7,22 +7,136 @@ const mockId = "id"; const mockRow = { getValue: mockGetValue }; describe("filterFn", () => { + describe("isQuoted", () => { + const quotedStrings = [ + "'Single quotes'", + '"Double quotes"', + "“Angled quotes”", + "‹Single-bracket quotes›", + "«Double-bracket quotes»", + ]; + test("With quotes", () => { + quotedStrings.forEach((s) => expect(ff.isQuoted(s)).toBeTruthy()); + }); + + const unquotedStrings = [ + "", + "hi", + '"', + "'Single-quote start", + "“Angled-quote start", + "Angle-quote end”", + ]; + test("Without quotes", () => { + unquotedStrings.forEach((s) => expect(ff.isQuoted(s)).toBeFalsy()); + }); + }); + + describe("fuzzyContains", () => { + const testString = "I am a string with many possible substrings."; + + test("short: no typos allowed", () => { + ["i", "am", "a s"].forEach((s) => + expect(ff.fuzzyContains(testString, s)).toBeTruthy() + ); + ["@", "aq"].forEach((s) => + expect(ff.fuzzyContains(testString, s)).toBeFalsy() + ); + }); + + test("medium: 1 typo allowed", () => { + ["i b", "ama", "strim"].forEach((s) => + expect(ff.fuzzyContains(testString, s)).toBeTruthy() + ); + ["i'm", "astrr"].forEach((s) => + expect(ff.fuzzyContains(testString, s)).toBeFalsy() + ); + }); + + test("long: 2 typos allowed", () => { + ["i'm a string", "with man88"].forEach((s) => + expect(ff.fuzzyContains(testString, s)).toBeTruthy() + ); + ["i'm a ztring", "with man888"].forEach((s) => + expect(ff.fuzzyContains(testString, s)).toBeFalsy() + ); + }); + }); + + describe("matchesFilter", () => { + const value = "Hello world!"; + const filterWithTypo = "H3llo"; + const filterWrongCase = "HELLO"; + const filterExact = "Hello"; + + it("unquoted: trims whitespace, fuzzy match", () => { + expect(ff.matchesFilter(value, "goodbye")).toBeFalsy(); + expect(ff.matchesFilter(value, ` ${filterWithTypo}`)).toBeTruthy(); + expect(ff.matchesFilter(value, `${filterWrongCase}\t`)).toBeTruthy(); + expect(ff.matchesFilter(value, `\t${filterExact} `)).toBeTruthy(); + }); + + it("quoted: trims whitespace, exact match", () => { + expect(ff.matchesFilter(value, `"${filterWithTypo}"`)).toBeFalsy(); + expect(ff.matchesFilter(value, `"${filterWrongCase}"`)).toBeFalsy(); + expect(ff.matchesFilter(value, ` "\t${filterExact} "\n`)).toBeTruthy(); + }); + }); + + describe("filterFnString", () => { + const filterFn = ff.filterFnString as any; + beforeEach(() => { + mockGetValue.mockReturnValue("Hello world!"); + }); + + it("unquoted: trims whitespace, fuzzy match", () => { + expect(filterFn(mockRow, mockId, "goodbye")).toBeFalsy(); + expect(filterFn(mockRow, mockId, " H3LLO")).toBeTruthy(); + }); + + it("quoted: trims whitespace, exact match", () => { + expect(filterFn(mockRow, mockId, '"H3llo"')).toBeFalsy(); + expect(filterFn(mockRow, mockId, '"HELLO"')).toBeFalsy(); + expect(filterFn(mockRow, mockId, '" Hello"\n')).toBeTruthy(); + }); + }); + describe("filterFnDefinitions", () => { const filterFn = ff.filterFnDefinitions as any; - it("trims whitespace and isn't case sensitive", () => { + beforeEach(() => { mockGetValue.mockReturnValue([ newDefinition("hello"), newDefinition("WORLD"), ]); - expect(filterFn(mockRow, mockId, " WoRlD\t")).toBeTruthy(); + }); + + it("unquoted: trims whitespace, fuzzy match", () => { + expect(filterFn(mockRow, mockId, "earth")).toBeFalsy(); + expect(filterFn(mockRow, mockId, " wrld\t")).toBeTruthy(); + }); + + it("quoted: trims whitespace, exact match", () => { + expect(filterFn(mockRow, mockId, '"h3llo"')).toBeFalsy(); + expect(filterFn(mockRow, mockId, '"HELLO"')).toBeFalsy(); + expect(filterFn(mockRow, mockId, '" hello"\n')).toBeTruthy(); }); }); describe("filterFnGlosses", () => { const filterFn = ff.filterFnGlosses as any; - it("trims whitespace and isn't case sensitive", () => { + beforeEach(() => { mockGetValue.mockReturnValue([newGloss("hello"), newGloss("WORLD")]); - expect(filterFn(mockRow, mockId, " WoRlD\t")).toBeTruthy(); + }); + + it("unquoted: trims whitespace, fuzzy match", () => { + expect(filterFn(mockRow, mockId, "earth")).toBeFalsy(); + expect(filterFn(mockRow, mockId, " wrld\t")).toBeTruthy(); + }); + + it("quoted: trims whitespace, exact match", () => { + expect(filterFn(mockRow, mockId, '"h3llo"')).toBeFalsy(); + expect(filterFn(mockRow, mockId, '"HELLO"')).toBeFalsy(); + expect(filterFn(mockRow, mockId, '" hello"\n')).toBeTruthy(); }); }); @@ -73,31 +187,61 @@ describe("filterFn", () => { // filterFnPronunciations returns a filter function when given a speaker dictionary const filterFn = (ff.filterFnPronunciations as any)(speakers); - it("matches number of pronunciations", () => { + it("numeric: matches number of pronunciations", () => { mockGetValue.mockReturnValue([newPronunciation(), newPronunciation()]); expect(filterFn(mockRow, mockId, " 2")).toBeTruthy(); expect(filterFn(mockRow, mockId, "2.0")).toBeTruthy(); expect(filterFn(mockRow, mockId, "1")).toBeFalsy(); }); - it("matches speaker name", () => { + it("whitespace: matches any audio", () => { + mockGetValue.mockReturnValueOnce([]); + expect(filterFn(mockRow, mockId, " ")).toBeFalsy(); + mockGetValue.mockReturnValueOnce([newPronunciation()]); + expect(filterFn(mockRow, mockId, " ")).toBeTruthy(); + }); + + it("unquoted: fuzzy-matches speaker name", () => { mockGetValue.mockReturnValue([newPronunciation("filename", speakerId)]); expect(filterFn(mockRow, mockId, "2")).toBeTruthy(); expect(filterFn(mockRow, mockId, " NAME\t\t")).toBeTruthy(); + expect(filterFn(mockRow, mockId, "numb3r")).toBeTruthy(); expect(filterFn(mockRow, mockId, "other person")).toBeFalsy(); }); + + it("quoted: exact-matches speaker name", () => { + mockGetValue.mockReturnValue([newPronunciation("filename", speakerId)]); + expect(filterFn(mockRow, mockId, "'2'")).toBeTruthy(); + expect(filterFn(mockRow, mockId, "'NAME'")).toBeFalsy(); + expect(filterFn(mockRow, mockId, " '\tname ' \t")).toBeTruthy(); + }); }); describe("filterFnFlag", () => { const filterFn = ff.filterFnFlag as any; - it("trims whitespace and isn't case sensitive", () => { - mockGetValue.mockReturnValue(newFlag("hello, WORLD")); - expect(filterFn(mockRow, mockId, " WoRlD\t")).toBeTruthy(); + beforeEach(() => { + mockGetValue.mockReturnValue(newFlag("Hello world!")); + }); + + it("unquoted: trims whitespace, fuzzy match", () => { + expect(filterFn(mockRow, mockId, "goodbye")).toBeFalsy(); + expect(filterFn(mockRow, mockId, " H3LLO")).toBeTruthy(); + }); + + it("quoted: trims whitespace, exact match", () => { + expect(filterFn(mockRow, mockId, '"H3llo"')).toBeFalsy(); + expect(filterFn(mockRow, mockId, '"HELLO"')).toBeFalsy(); + expect(filterFn(mockRow, mockId, ' "\tHello "\n')).toBeTruthy(); }); it("doesn't match if flag not active", () => { - mockGetValue.mockReturnValue({ active: false, text: "hi" }); - expect(filterFn(mockRow, mockId, " ")).toBeFalsy(); + const text = "hi"; + + mockGetValue.mockReturnValueOnce({ active: true, text }); + expect(filterFn(mockRow, mockId, text)).toBeTruthy(); + + mockGetValue.mockReturnValueOnce({ active: false, text }); + expect(filterFn(mockRow, mockId, text)).toBeFalsy(); }); }); });