diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml new file mode 100644 index 0000000000..9a35e688f9 --- /dev/null +++ b/.github/workflows/ci-tests.yml @@ -0,0 +1,327 @@ +name: Build and Run Tests + +on: + schedule: + - cron: '0 5 * * *' #Runs daily at 5 AM UTC + push: + branches: + - master + - develop + pull_request: + branches: + - develop + +env: + CC_TEST_REPORTER_ID: "${{ secrets.CC_TEST_REPORTER_ID }}" + +jobs: + functional-tests: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + role-to-assume: ${{ secrets.AWS_ROLE_TO_ASSUME }} + aws-region: eu-west-1 + + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v2 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Set up Enviroments + run: | + export RECORD_RUNTIME=true + export LC_ALL=C.UTF-8 + export LANG=C.UTF-8 + export LANGUAGE=C.UTF-8 + export PATH=/root/.local/bin:$PATH + rm -rf tmp/cache tmp/cache1 tmp/cache2 tmp/cache3 tmp/cache4 tmp/cache5 && mkdir -p tmp/cache tmp/cache1 tmp/cache2 tmp/cache3 tmp/cache4 tmp/cache5 + + - name: Set up Configuration Files + run: | + cp config/config.yml.example config/config.yml + cp config/database.yml.example config/database.yml + cp config/sidekiq.yml.example config/sidekiq.yml + cp config/credentials.json.example config/credentials.json + cp config/sidekiq-test.yml.example config/sidekiq-test.yml + + - name: Build Docker Container + run: | + docker compose build + - name: Run syntax checks + run: | + docker compose run api bash -c 'touch /tmp/no-syntax-errors && find app lib config -name *.rb -exec bash -c "ruby -c {} >/dev/null || rm /tmp/no-syntax-errors" ";" && ls /tmp/no-syntax-errors' + - name: Run Container + run: docker compose -f docker-compose.yml -f docker-test.yml up -d + + - name: Wait for the server to be ready + run: | + tail -f log/test.log & + until curl --silent -I -f --fail http://localhost:3000 ; do printf .; sleep 1; done + - name: Set up parallel environment + run: docker compose exec -T api test/setup-parallel-env.sh + + - name: Precompile Assets + run: docker compose exec -T api bundle exec rake assets:precompile + + - name: Prepare Parallel Runtime Log + run: | + sleep 10 + touch tmp/parallel_runtime_test.log + chmod +w tmp/parallel_runtime_test.log + + - name: Run Functional Tests + id: functional-tests + env: + TEST_RETRY_COUNT: 3 + run: | + docker compose exec -e TEST_RETRY_COUNT=$TEST_RETRY_COUNT -T -e PATTERN='models mailers integration workers lib contract' api test/run-tests.sh + + - name: After Functional Test + env: + GITHUB_EVENT: ${{ github.event_name}} + GIT_BRANCH: ${{ github.head_ref || github.ref_name }} + GITHUB_REPO: ${{ github.repository }} + GITHUB_TEST_RESULT: ${{ steps.functional-tests.outcome}} + GITHUB_BUILD_NUMBER: ${{ github.run_number }} + GITHUB_COMMIT_SHA: ${{ github.sha }} + GITHUB_JOB_NAME: ${{ github.job }} + CC_TEST_REPORTER_ID: ${{ secrets.CC_TEST_REPORTER_ID }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + ROLE_TO_ASSUME: ${{ secrets.AWS_ROLE_TO_ASSUME }} + AWS_REGION: eu-west-1 + run: | + docker compose exec -T -e GIT_BRANCH=$GIT_BRANCH \ + -e GITHUB_EVENT=$GITHUB_EVENT \ + -e GITHUB_TEST_RESULT=$GITHUB_TEST_RESULT \ + -e GITHUB_REPO=$GITHUB_REPO \ + -e GITHUB_BUILD_NUMBER=$GITHUB_BUILD_NUMBER \ + -e GIT_COMMIT_SHA=$GITHUB_COMMIT_SHA \ + -e CC_TEST_REPORTER_ID=$CC_TEST_REPORTER_ID \ + -e GITHUB_JOB_NAME=$GITHUB_JOB_NAME \ + -e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \ + -e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \ + -e ROLE_TO_ASSUME=$AWS_ROLE_TO_ASSUME \ + -e AWS_REGION=$AWS_REGION \ + -e AWS_CONFIG_FILE=/app/credentials api test/test-coverage.sh + + + unit-tests: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + role-to-assume: ${{ secrets.AWS_ROLE_TO_ASSUME }} + aws-region: eu-west-1 + + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v2 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: set up enviromnts + run: | + export RECORD_RUNTIME=true + export LC_ALL=C.UTF-8 + export LANG=C.UTF-8 + export LANGUAGE=C.UTF-8 + export PATH=/root/.local/bin:$PATH + rm -rf tmp/cache tmp/cache1 tmp/cache2 tmp/cache3 tmp/cache4 tmp/cache5 && mkdir -p tmp/cache tmp/cache1 tmp/cache2 tmp/cache3 tmp/cache4 tmp/cache5 + + - name: Set up Configuration Files + run: | + cp config/config.yml.example config/config.yml + cp config/database.yml.example config/database.yml + cp config/sidekiq.yml.example config/sidekiq.yml + cp config/credentials.json.example config/credentials.json + cp config/sidekiq-test.yml.example config/sidekiq-test.yml + + - name: Build Docker Container + run: | + docker compose build + - name: Run syntax checks + run: | + docker compose run api bash -c 'touch /tmp/no-syntax-errors && find app lib config -name *.rb -exec bash -c "ruby -c {} >/dev/null || rm /tmp/no-syntax-errors" ";" && ls /tmp/no-syntax-errors' + - name: Run Container + run: docker compose -f docker-compose.yml -f docker-test.yml up -d + + - name: Wait for the server to be ready + run: | + tail -f log/test.log & + until curl --silent -I -f --fail http://localhost:3000 ; do printf .; sleep 1; done + - name: Set up parallel environment + run: docker compose exec -T api test/setup-parallel-env.sh + + - name: Precompile Assets + run: docker compose exec -T api bundle exec rake assets:precompile + + - name: Prepare Parallel Runtime Log + run: | + sleep 10 + touch tmp/parallel_runtime_test.log + chmod +w tmp/parallel_runtime_test.log + + - name: Run Unit Tests + id: unit-tests + env: + TEST_RETRY_COUNT: 3 + run: | + docker compose exec -e TEST_RETRY_COUNT=$TEST_RETRY_COUNT -T -e PATTERN='controllers contract' api test/run-tests.sh + + - name: After Unit Test + env: + GITHUB_EVENT: ${{ github.event_name}} + GIT_BRANCH: ${{ github.head_ref || github.ref_name }} + GITHUB_REPO: ${{ github.repository }} + GITHUB_TEST_RESULT: ${{ steps.unit-tests.outcome}} + GITHUB_BUILD_NUMBER: ${{ github.run_number }} + GITHUB_COMMIT_SHA: ${{ github.sha }} + GITHUB_JOB_NAME: ${{ github.job }} + CC_TEST_REPORTER_ID: ${{ secrets.CC_TEST_REPORTER_ID }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + ROLE_TO_ASSUME: ${{ secrets.AWS_ROLE_TO_ASSUME }} + AWS_REGION: eu-west-1 + run: | + docker compose exec -T -e GIT_BRANCH=$GIT_BRANCH \ + -e GITHUB_EVENT=$GITHUB_EVENT \ + -e GITHUB_TEST_RESULT=$GITHUB_TEST_RESULT \ + -e GITHUB_REPO=$GITHUB_REPO \ + -e GITHUB_BUILD_NUMBER=$GITHUB_BUILD_NUMBER \ + -e GIT_COMMIT_SHA=$GITHUB_COMMIT_SHA \ + -e CC_TEST_REPORTER_ID=$CC_TEST_REPORTER_ID \ + -e GITHUB_JOB_NAME=$GITHUB_JOB_NAME \ + -e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \ + -e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \ + -e ROLE_TO_ASSUME=$AWS_ROLE_TO_ASSUME \ + -e AWS_REGION=$AWS_REGION \ + -e AWS_CONFIG_FILE=/app/credentials api test/test-coverage.sh + + + contract-tests: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Configure AWS Credentials + uses: aws-actions/configure-aws-credentials@v4 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + role-to-assume: ${{ secrets.AWS_ROLE_TO_ASSUME }} + aws-region: eu-west-1 + + - name: Login to Amazon ECR + id: login-ecr + uses: aws-actions/amazon-ecr-login@v2 + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Set up Enviroments + run: | + export RECORD_RUNTIME=true + export LC_ALL=C.UTF-8 + export LANG=C.UTF-8 + export LANGUAGE=C.UTF-8 + export PATH=/root/.local/bin:$PATH + rm -rf tmp/cache tmp/cache1 tmp/cache2 tmp/cache3 tmp/cache4 tmp/cache5 && mkdir -p tmp/cache tmp/cache1 tmp/cache2 tmp/cache3 tmp/cache4 tmp/cache5 + + - name: Set up Configuration Files + run: | + cp config/config.yml.example config/config.yml + cp config/database.yml.example config/database.yml + cp config/sidekiq.yml.example config/sidekiq.yml + cp config/credentials.json.example config/credentials.json + cp config/sidekiq-test.yml.example config/sidekiq-test.yml + + - name: Build Docker Container + run: | + docker compose build + + - name: Run syntax checks + run: | + docker compose run api bash -c 'touch /tmp/no-syntax-errors && find app lib config -name *.rb -exec bash -c "ruby -c {} >/dev/null || rm /tmp/no-syntax-errors" ";" && ls /tmp/no-syntax-errors' + + - name: Run Container + run: docker compose -f docker-compose.yml -f docker-test.yml up -d + + - name: Wait for the server to be ready + run: | + tail -f log/test.log & + until curl --silent -I -f --fail http://localhost:3000 ; do printf .; sleep 1; done + + - name: Set up Parallel Environment + run: docker compose exec -T api test/setup-parallel-env.sh + + - name: Precompile Assets + run: docker compose exec -T api bundle exec rake assets:precompile + + - name: Prepare Parallel Runtime Log + run: | + sleep 10 + touch tmp/parallel_runtime_test.log + chmod +w tmp/parallel_runtime_test.log + + - name: Run Contract Tests + id: run-tests + env: + TEST_RETRY_COUNT: 3 + run: | + docker compose exec -e TEST_RETRY_COUNT=$TEST_RETRY_COUNT -T -e PATTERN='controllers models mailers integration workers lib' api test/run-tests.sh + + - name: After Contract Test + env: + GITHUB_EVENT: ${{ github.event_name}} + GIT_BRANCH: ${{ github.head_ref || github.ref_name }} + GITHUB_REPO: ${{ github.repository }} + GITHUB_TEST_RESULT: ${{ steps.run-tests.outcome}} + GITHUB_BUILD_NUMBER: ${{ github.run_number }} + GITHUB_COMMIT_SHA: ${{ github.sha }} + GITHUB_JOB_NAME: ${{ github.job }} + CC_TEST_REPORTER_ID: ${{ secrets.CC_TEST_REPORTER_ID }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + ROLE_TO_ASSUME: ${{ secrets.AWS_ROLE_TO_ASSUME }} + AWS_REGION: eu-west-1 + run: | + docker compose exec -T -e GIT_BRANCH=$GIT_BRANCH \ + -e GITHUB_EVENT=$GITHUB_EVENT \ + -e GITHUB_TEST_RESULT=$GITHUB_TEST_RESULT \ + -e GITHUB_REPO=$GITHUB_REPO \ + -e GITHUB_BUILD_NUMBER=$GITHUB_BUILD_NUMBER \ + -e GIT_COMMIT_SHA=$GITHUB_COMMIT_SHA \ + -e CC_TEST_REPORTER_ID=$CC_TEST_REPORTER_ID \ + -e GITHUB_JOB_NAME=$GITHUB_JOB_NAME \ + -e AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID \ + -e AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY \ + -e ROLE_TO_ASSUME=$AWS_ROLE_TO_ASSUME \ + -e AWS_REGION=$AWS_REGION \ + -e AWS_CONFIG_FILE=/app/credentials api test/test-coverage.sh diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 1df883d904..0000000000 --- a/.travis.yml +++ /dev/null @@ -1,64 +0,0 @@ -language: minimal -dist: jammy -before_install: -- export RECORD_RUNTIME=true -- export LC_ALL=C.UTF-8 -- export LANG=C.UTF-8 -- export LANGUAGE=C.UTF-8 -- export PATH=/root/.local/bin:$PATH -- rm -rf tmp/cache tmp/cache1 tmp/cache2 tmp/cache3 tmp/cache4 tmp/cache5 && mkdir -p tmp/cache tmp/cache1 - tmp/cache2 tmp/cache3 tmp/cache4 tmp/cache5 -before_script: -- echo "$DOCKER_PASSWORD" | docker login -u "$DOCKER_USERNAME" --password-stdin -- echo '[default]' > credentials && echo "aws_access_key_id = $AWS_ACCESS_KEY_ID" >> credentials && echo "aws_secret_access_key = $AWS_SECRET_ACCESS_KEY" >> credentials -- cp config/config.yml.example config/config.yml -- cp config/database.yml.example config/database.yml -- cp config/sidekiq.yml.example config/sidekiq.yml -- cp config/credentials.json.example config/credentials.json -- cp config/sidekiq-test.yml.example config/sidekiq-test.yml -- docker-compose build -- docker-compose run api bash -c 'touch /tmp/no-syntax-errors && find app lib config -name *.rb -exec bash -c "ruby -c {} >/dev/null || rm /tmp/no-syntax-errors" ";" && ls /tmp/no-syntax-errors' -- docker-compose -f docker-compose.yml -f docker-test.yml up -d -- tail -f log/test.log & -- until curl --silent -I -f --fail http://localhost:3000 ; do printf .; sleep 1; done -- docker-compose exec api test/setup-parallel-env.sh -- docker-compose exec api bundle exec rake assets:precompile -- sleep 10 -- touch tmp/parallel_runtime_test.log -- chmod +w tmp/parallel_runtime_test.log -after_script: -- docker-compose exec - -e TRAVIS_BRANCH=$TRAVIS_BRANCH - -e TRAVIS_PULL_REQUEST=$TRAVIS_PULL_REQUEST - -e TRAVIS_TEST_RESULT=$TRAVIS_TEST_RESULT - -e TRAVIS_REPO_SLUG=$TRAVIS_REPO_SLUG - -e TRAVIS_BUILD_NUMBER=$TRAVIS_BUILD_NUMBER - -e GIT_COMMIT_SHA=$GIT_COMMIT_SHA - -e GIT_COMMITTED_AT=$GIT_COMMITTED_AT - -e CC_TEST_REPORTER_ID=$CC_TEST_REPORTER_ID - -e PATH=$PATH - -e TRAVIS_JOB_NAME=$TRAVIS_JOB_NAME - -e AWS_CONFIG_FILE=/app/credentials - api test/test-coverage.sh -jobs: - include: - - stage: tests - name: functional-tests - script: docker-compose exec -e TEST_RETRY_COUNT=3 -e PATTERN='models mailers integration workers lib contract' api test/run-tests.sh - - stage: tests - name: unit-tests - script: docker-compose exec -e TEST_RETRY_COUNT=3 -e PATTERN='controllers contract' api test/run-tests.sh - - stage: tests - name: contract-tests - script: docker-compose exec -e TEST_RETRY_COUNT=3 -e PATTERN='controllers models mailers integration workers lib' api test/run-tests.sh -notifications: - slack: - secure: dhqNhrJ0FVPnjtxa7R6k0s+1h/gMFNeK8zYJLZw+mK/FJ41K1u82Y8E6IDFbgNcKyAJ27ielvzGgWGSkDVltEnPR+ph15OMcy05TM9Pr2tWNusbDECOaEQgn4vGOq0shmiahE9tTOQpgc1TzhzIF9o1xgocah2PCLKiiH06kiiRlNZkaeQSJRFrXsPDDK8jIUtkLLUvFGQA6fq/lOh4tN6/N+K6+fo86wSxarkjv3d6h2flqvQqvqkbNpkv/UBC2Y1QACP+EX6uA0ySer8K5X6Q0Trrkjjacwvo5j74UVa+UYrBD+vr7Mgnr5aNFNId6M2nHd92ZiPM+6VDllCWsDLvJ2qFFy8cOO693EjrU7puaPp29+sptriIC71dk1oHSIEpPWwuaEZKzpwP4Swe322ne2th2VrjMhIye2Ru0519Lr2Dl4iTVV+hxoeVta3Nng23rUjrGoAbcw2FO1jmhANp8JWatv/V4PmlrS1/kYbiCfltWOMbKAD9f1EOTFnPCJsp3hPL238Ic+kvNJ8LM+ItNNR5cly+JPto6nSnMO8uhig9i78Mp2hVpnvrwhDLYntWEP2vcaNhP8oRys7X2iq5PRmsRwr9SyJReczd6i5gnvskXqKat6mNfPeGMBKSYof/1ve3Um0wEtwaYxvU5y/ZezFc3Kzzi6vt4P86j+mg= -env: - global: - - secure: NwJYV3kghSW2hReSDOnY0sG1oe7IDcix/pZprWHkfkpc+520355n71uj7gofQ2tOO7x2rg2+j9q1GVx9hbP0EsOCIl2u0csnA7lrfe9COp9cGdM7gMzSN6CvuwW9OME9eark37ACSHW1LD1TGB6OcjOkoQxTCM4E00Nj1zAYdT4gbhnBLnxW0FPpEqRB+0XN6ev53HdE5+KrwOIG/chYL/FdaTUdjfyikoduJp5pPjHt1Nd+LSoEt3WbYs8IJeSJIdzCYrD/gXHdjGF1f8v4PFCFyl1ashFtLPMuS+4DBZjR4grSbhpTBwoJAr6oRbWJNAkmaxC26s574U/7QgP6MFlJzhgKim2gvyGUmTSKgxZViyn1y7oend+vWE0281lHDMPqtO3fNel6HteFm/GoJ3lSvZPd8ddveJNacPZB6+K7KzvJWEoRy5eYdUjaejA6KiaSkblpqXoMWwDH+RjCde/HANxMhrECa9yCZlLKBdI5dPccyQwxKnebyZkU37VeWFc9fw2EUy9gmAunV5SH/Osz7b+IX+h3Mf8WxSvQutCUrD+1kspzkEVEXX41SawKD30OfMX9ZBZB9ueisyjB4kW0GzvCGlANMAQ4VDS4/FOmc+5IhcDN8i8PvzE5IGooETOtcwMWVZMrbA0/vq2Z44WQpS2Lq1NYGYhqAN947lk= - - secure: lLyERoM5p4ggOme89E1b/LW8PRwZvMyn14KRU0qa5Ce4DWPQ5qNB96fpXPeshAaD5163rxWGGA2z0a1XbMa98QH7XO0u88Azg1Y/d5yMJiHsFvKaIR7oEJqtgIs1sSgnC2q0/B0Sk8Zyqx45IvND0VRuHE2pI2IkZR3H65ro78BsuXzwX3zwoqU0Es7n9MawBoiTbZMs4MZiaHXgnQj2AdbWEx9jdigBM3omMc/29WRTVyFtPCL/RHg5TVe00Z5vNU2+uLYqF1U+HB+Cru0tyRJsPcCay3ksAy8PpHqX0KrFF04vSaN4K0RVScVk/0cXvXGuuud2GRujDyqcoClkuJCwyqRN1wJKto7B3kr+MblEIdTW13ef+VABIFR71+2uVlJJduMrmX2pzdIoroi0dg1T32sxxL8jes6hE437YGKtx1UKcJr94+ZhOHWyCbNF9mRRqBluFet8YyNaBdQKp5V685izsmpVrn/xShmcUWL7+MlDNbekEi3GDKMqmVNs8TNZZROSLiA1wwyn8AuB2u2/3sPHZGwroiRVUIx73Rg765KYfoR+gyonNgbn+AhXT0+7mAuLwqu/6msHjuNk0RRdRq4h+G8bh3jf7hbzUaiQC4F7J0Il+F3/DeegdwWNGWrE2+8Aa5619mJD3o+brOlLdmMHv3hLixx8ogBUKbo= - - secure: lB1eqOWmQVA6seElLmHg9tRzJN8jjDQ4UT6pcROGubk2Ru94gIsJnkFAeCqbp3AyfFWJAHIdCQH1q34krOMfEc+bEQnC5hSjY8PeYbdRnpTx32EDd2bl3ErnUuYq7OyXbHUpono7rzLiUWCjnsGOlz40R7LvN+wIIdP8cUD3F2OezalVSyNwIQPKmwTwIcTQrMcUdeOvlTTZ0BtbNZbHWKd6UZ7RTUOfnPy1un2dS3DAtQIbLM9Q4f4FpsZOqNntFuC8XfC+7wc1u4BA3mjat6iX9J46q2XThlhYqbIG/X79CqIB1lfpnh6Uh9yn4drrmldfnJbhl9w7T8yAF7kt2QzmydH8pnj1X5VcvtaTdMyox2ivCysfpQWkO62i0PbCxqX2+UMy9LHj3Wmis3ydx1flaRxjstykgySOFO9gvx4JOBfS9FjeRyd04YKDn/z9dbVZ9eJ3HYusf8QyPv1ZcaURpVqUszxEjg1bH36gtBC5fMoKF9Mk/SXJY1P6yQ5tRMy0C5A343HQ8xI6rUFomxe2Pk089wP92anphtF0KlF8ZjSEDAPbEXN6qsh+GCszVZQ3YRlQoLGj2fpJI236y28GgbcMmEhlKT2P/AtC78sMQyjGIY2rOH0pVBtYQz1qch2h/6z9tDvSOwI/SmcLwhE7tawIOiCWI3d/oXRFJMM= - - secure: vwNGJmMth0yKn0Y2I9UzSlmTctcZ070P3iZsf7pVn0swEyGgSpNPugVQ66H8lAYU28F24ANlKQGutzg1+LoNvwV2gHhvppe229yUQBUTsdfCOXnL586agNNzgfP5eTveuXSMvQfvrHohhsFSXAR5PoDTw3lfnvwO1L9gguhc0DIU6iIE6drmN/89Cj3tA7hFkbkDmfAJHhwF8djrUw4qBuiMUapGDtSsGT1adINJgngbpV5magNIg8MSEsvJfyTSVVCBqQGZWzNF82KGPtxMk8JWq5afwf76wFzE246VjAEtKlnSfme8KDhhKzyl7aLMo3mPC6FZdfavlGVYe18ZF9XE/c7jqMFtDS6OgybFlN+0rVpuUR+DGDpOUt9x7EJKamfCkjCO4a8m+Nhc63YkwLOwGDUtTjSI/iDQY/HxMqrzZqNZLb4972ZdezY6wFwfLldE8eGL7rM98SlS4RIWL4voJ31PEW3PwQRsPp7KbemSU2E/F2Je3MgdU+meK6esEaPDZqNwe51kT8amNM44srHazl05EsIo50pofMxTvjp7ZLa8i+O2JySSABfuX2weZMLwMBgvYS5qaWz85KXOCyiVcO3xl01MXLqKwp9l9vBkseFTZ1Hi/My5tif0W+a3HN2Zk333gwt85wKbbaHvHpYwm2kK/vZgQz3Rrw+l7rw= - - secure: u8gX2CSDCD9q4g9LfUodDZOCd1YpfhP3F5LTbwBpXVV3Nz9Dp3sl6vZXN0IJJQQ/sC6DMWL5+38oL+OmaMCZ2M9PzFbNoW6Lez9ZEm1YVVBbWU5/VGaGX86VfJAh70AGP0R7SurgfcRQARgt1kIzckSl5hAnSf2Wxw+TU+anaFCzvpGHaHbpr4HtPts1MQU49u7/wFhlPe6c+1LH95k1xIHKVISjpBam5E6mbt5hsqV9lsKoXmFttdfSrub8Pg+Ntivzcv8yimQPMr5KjbnuWBm/8eJfiKosUdKqo83oalC47Lex4KXvf8WW4DXEHH7VD5i+PibMPPcerwk5y3mXwUIA8ZzC+yi8MvBoqQ+Bi1tpMtyRZiCcqDyFqyNi9/8vu6LJ/zthzc09kkJX10rQD9EDz6jBPWiGepAWPEMN5PBC8Zp+Gia/eilg+F+6ux1atkdH8ll5noWy016yYW+Ib6TlTciKN2lj3nZ30J1t7UGeisjqVG/wGMT0yU0Pfh0dUppGkXM6AKpQz7HgftWGiuHXe2cD3E9Ag4D2ZlIoelc3SMYbtuR2cLMtHfUZA1QIbGfVcDjJ1X5OqQ3i0Ax9/KORMYERnk7UxB4W4rNieTO1vJPdsTcPijQm6EjyGvuceMsZfBxbxEeUtHarC85rKWm5c3bpLINYkka26XvEa2w= - - secure: CFEUDISuHfcQB6Bn6re0CT/dnQYlXd3PvVi1MVrhjSrvmJgdVbG5kqg+QQiz+6xkUnsIAIklZjfHAEV6uOOrTbrN9enJCcFRhZNPZzhbYN8NOnyRiOiuH2akEXSgNyEow0yzaOAXcIwgQqkBI4X1LtOJDA5QY6guVtiQlAfn1PK4uiwc7kLYZA+c7tfIGdOtS60Dk+gFxrYP3xMt6ycnCXswBDjpi3MPCjMtFEe44pxCLMYFX+8QWFo+Vi2nBINvcGurSBpmKqPGNXmVkmjwcg5L1USdqORW2smOTrGXSRC8rIiFZZqW0awD3Fnn06gWymwHYcIQzTin8NRcP9qspWHvVOiZ3kLKoljpHwUV0ol5qwTN6xDojwDg00wJx/wUvH7kJwxSivMfdl66ssI9wBSajNJkShvZekiofj//6pAB9qQNlBp87yWhKGX+ixDWKAUYfDzUwnbP741JFvY9JejMO2Tht1p1uBdKnKyrr5tGRerQFlciUFUZXtRVHxghtO01N4xekMtv7DfZ2F6Zwfv3BCExNq03jSUcBuPTR9J3zgvyi+vv+2mO6FmOsp/jWzro42RBLVyq2hNHrJcwzseA/zy49mW1oax84ylz5reacCs7fQBpUB74JuV/PZPCdsZKnn84IP26mC1HaCk+2ybxlcoONi7ze2i5SkXP3Ko= diff --git a/Gemfile.lock b/Gemfile.lock index 65e7439737..160a12b73b 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -243,7 +243,7 @@ GEM encryptor (3.0.0) equalizer (0.0.11) erubi (1.12.0) - et-orbi (1.2.7) + et-orbi (1.2.11) tzinfo ethon (0.16.0) ffi (>= 1.15.0) @@ -302,8 +302,8 @@ GEM fog-core nokogiri (>= 1.5.11, < 2.0.0) formatador (1.1.0) - fugit (1.5.2) - et-orbi (~> 1.1, >= 1.1.8) + fugit (1.11.1) + et-orbi (~> 1, >= 1.2.11) raabro (~> 1.4) fx (0.8.0) activerecord (>= 6.0.0) @@ -726,8 +726,8 @@ GEM mime-types (>= 1.16, < 4.0) netrc (~> 0.8) retriable (3.1.2) - rexml (3.2.8) - strscan (>= 3.0.9) + rexml (3.3.3) + strscan rotp (6.3.0) rqrcode (2.1.1) chunky_png (~> 1.0) diff --git a/README.md b/README.md index ec52a7f36e..bb36956944 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,6 @@ # Check API -[![Test Coverage](https://api.codeclimate.com/v1/badges/583c7f562a78e7039e13/test_coverage)](https://codeclimate.com/github/meedan/check-api/test_coverage) -[![Maintainability](https://api.codeclimate.com/v1/badges/583c7f562a78e7039e13/maintainability)](https://codeclimate.com/github/meedan/check-api/maintainability) -[![Travis](https://travis-ci.org/meedan/check-api.svg?branch=develop)](https://travis-ci.org/meedan/check-api/) +[![Build and Run Tests](https://github.com/meedan/check-api/actions/workflows/ci-tests.yml/badge.svg)](https://github.com/meedan/check-api/actions/workflows/ci-tests.yml) Part of the [Check platform](https://meedan.com/check). Refer to the [main repository](https://github.com/meedan/check) for instructions. diff --git a/app/graph/mutations/claim_description_mutations.rb b/app/graph/mutations/claim_description_mutations.rb index 06eeba4525..dc8a9ac977 100644 --- a/app/graph/mutations/claim_description_mutations.rb +++ b/app/graph/mutations/claim_description_mutations.rb @@ -8,13 +8,12 @@ module SharedCreateAndUpdateFields included do argument :description, GraphQL::Types::String, required: false argument :context, GraphQL::Types::String, required: false, as: :claim_context + argument :project_media_id, GraphQL::Types::Int, required: false, camelize: false end end class Create < Mutations::CreateMutation include SharedCreateAndUpdateFields - - argument :project_media_id, GraphQL::Types::Int, required: true, camelize: false end class Update < Mutations::UpdateMutation diff --git a/app/graph/mutations/explainer_item_mutations.rb b/app/graph/mutations/explainer_item_mutations.rb new file mode 100644 index 0000000000..eb07e5ca9d --- /dev/null +++ b/app/graph/mutations/explainer_item_mutations.rb @@ -0,0 +1,11 @@ +module ExplainerItemMutations + MUTATION_TARGET = 'explainer_item'.freeze + PARENTS = ['explainer', 'project_media'].freeze + + class Create < Mutations::CreateMutation + argument :explainer_id, GraphQL::Types::Int, required: true + argument :project_media_id, GraphQL::Types::Int, required: true + end + + class Destroy < Mutations::DestroyMutation; end +end diff --git a/app/graph/mutations/explainer_mutations.rb b/app/graph/mutations/explainer_mutations.rb index 7856561994..03c217c2e2 100644 --- a/app/graph/mutations/explainer_mutations.rb +++ b/app/graph/mutations/explainer_mutations.rb @@ -6,10 +6,11 @@ module SharedCreateAndUpdateFields extend ActiveSupport::Concern included do - argument :title, GraphQL::Types::String, required: true + argument :title, GraphQL::Types::String, required: false argument :description, GraphQL::Types::String, required: false argument :url, GraphQL::Types::String, required: false argument :language, GraphQL::Types::String, required: false + argument :tags, [GraphQL::Types::String, null: true], required: false end end diff --git a/app/graph/mutations/fact_check_mutations.rb b/app/graph/mutations/fact_check_mutations.rb index 8d1f5f4509..b6378feabd 100644 --- a/app/graph/mutations/fact_check_mutations.rb +++ b/app/graph/mutations/fact_check_mutations.rb @@ -1,6 +1,6 @@ module FactCheckMutations MUTATION_TARGET = 'fact_check'.freeze - PARENTS = ['claim_description'].freeze + PARENTS = ['claim_description', 'team'].freeze module SharedCreateAndUpdateFields extend ActiveSupport::Concern @@ -8,6 +8,8 @@ module SharedCreateAndUpdateFields included do argument :url, GraphQL::Types::String, required: false argument :language, GraphQL::Types::String, required: false + argument :tags, [GraphQL::Types::String, null: true], required: false + argument :rating, GraphQL::Types::String, required: false end end diff --git a/app/graph/types/article_union.rb b/app/graph/types/article_union.rb index 58a7e7479a..b1857b12ec 100644 --- a/app/graph/types/article_union.rb +++ b/app/graph/types/article_union.rb @@ -2,5 +2,6 @@ class ArticleUnion < BaseUnion description 'A union type of all article types we can handle' possible_types( ExplainerType, + FactCheckType, ) end diff --git a/app/graph/types/claim_description_type.rb b/app/graph/types/claim_description_type.rb index 7871e02e2e..df35cdbf61 100644 --- a/app/graph/types/claim_description_type.rb +++ b/app/graph/types/claim_description_type.rb @@ -8,13 +8,14 @@ class ClaimDescriptionType < DefaultObject field :context, GraphQL::Types::String, null: true, resolver_method: :claim_context field :user, UserType, null: true field :project_media, ProjectMediaType, null: true + field :project_media_was, ProjectMediaType, null: true field :fact_check, FactCheckType, null: true do argument :report_status, GraphQL::Types::String, required: false, camelize: false end def fact_check(report_status: nil) ability = context[:ability] || Ability.new - status = object.project_media.report_status + status = object.project_media&.report_status can_read = ability.can?(:read, object) || status == 'published' (can_read && (!report_status || status == report_status)) ? object.fact_check : nil end diff --git a/app/graph/types/explainer_item_type.rb b/app/graph/types/explainer_item_type.rb new file mode 100644 index 0000000000..81e3d194bf --- /dev/null +++ b/app/graph/types/explainer_item_type.rb @@ -0,0 +1,10 @@ +class ExplainerItemType < DefaultObject + description 'Explainer item type' + + implements GraphQL::Types::Relay::Node + + field :explainer_id, GraphQL::Types::Int, null: false + field :project_media_id, GraphQL::Types::Int, null: false + field :explainer, ExplainerType, null: false + field :project_media, ProjectMediaType, null: false +end diff --git a/app/graph/types/explainer_type.rb b/app/graph/types/explainer_type.rb index dea3441e0c..c27bcf75c9 100644 --- a/app/graph/types/explainer_type.rb +++ b/app/graph/types/explainer_type.rb @@ -1,5 +1,5 @@ class ExplainerType < DefaultObject - description "Explainer type" + description 'Explainer type' implements GraphQL::Types::Relay::Node @@ -12,10 +12,5 @@ class ExplainerType < DefaultObject field :team_id, GraphQL::Types::Int, null: true field :user, UserType, null: true field :team, PublicTeamType, null: true - - field :tags, TagType.connection_type, null: true - - def tags - Tag.where(annotation_type: 'tag', annotated_type: object.class.name, annotated_id: object.id) - end + field :tags, [GraphQL::Types::String, null: true], null: true end diff --git a/app/graph/types/fact_check_type.rb b/app/graph/types/fact_check_type.rb index 3cd177d458..0befd82e5b 100644 --- a/app/graph/types/fact_check_type.rb +++ b/app/graph/types/fact_check_type.rb @@ -10,4 +10,8 @@ class FactCheckType < DefaultObject field :language, GraphQL::Types::String, null: true field :user, UserType, null: true field :claim_description, ClaimDescriptionType, null: true + field :tags, [GraphQL::Types::String, null: true], null: true + field :rating, GraphQL::Types::String, null: true + field :imported, GraphQL::Types::Boolean, null: true + field :report_status, GraphQL::Types::String, null: true end diff --git a/app/graph/types/me_type.rb b/app/graph/types/me_type.rb index 71d1e950e1..0033ca3d31 100644 --- a/app/graph/types/me_type.rb +++ b/app/graph/types/me_type.rb @@ -122,7 +122,29 @@ def team_users(status: nil) return TeamUser.none unless object == User.current team_users = object.team_users team_users = team_users.where(status: status) if status - team_users + team_users.joins(:team).order('name ASC') + end + + field :team_users_count, GraphQL::Types::Int, null: true do + argument :status, GraphQL::Types::String, required: false + end + + def team_users_count(status: nil) + team_users(status: status).count + end + + field :accessible_teams, TeamType.connection_type, null: true + + def accessible_teams + return Team.none unless object == User.current + teams = User.current.is_admin? ? Team.all : User.current.teams.where('team_users.status' => 'member') + teams.order('name ASC') + end + + field :accessible_teams_count, GraphQL::Types::Int, null: true + + def accessible_teams_count + accessible_teams.count end field :annotations, AnnotationType.connection_type, null: true do diff --git a/app/graph/types/mutation_type.rb b/app/graph/types/mutation_type.rb index 63be2053c6..a87ea5528c 100644 --- a/app/graph/types/mutation_type.rb +++ b/app/graph/types/mutation_type.rb @@ -149,4 +149,7 @@ class MutationType < BaseObject field :createApiKey, mutation: ApiKeyMutations::Create field :destroyApiKey, mutation: ApiKeyMutations::Destroy + + field :createExplainerItem, mutation: ExplainerItemMutations::Create + field :destroyExplainerItem, mutation: ExplainerItemMutations::Destroy end diff --git a/app/graph/types/project_media_type.rb b/app/graph/types/project_media_type.rb index eed899f195..398a083944 100644 --- a/app/graph/types/project_media_type.rb +++ b/app/graph/types/project_media_type.rb @@ -7,6 +7,7 @@ class ProjectMediaType < DefaultObject field :media_id, GraphQL::Types::Int, null: true field :user_id, GraphQL::Types::Int, null: true + field :fact_check_id, GraphQL::Types::Int, null: true field :url, GraphQL::Types::String, null: true field :full_url, GraphQL::Types::String, null: true field :quote, GraphQL::Types::String, null: true @@ -319,6 +320,15 @@ def suggested_main_item &.source end + field :suggested_main_relationship, RelationshipType, null: true + + def suggested_main_relationship + Relationship + .where("relationship_type = ?", Relationship.suggested_type.to_yaml) + .where(target_id: object.id) + .first + end + field :confirmed_similar_relationships, RelationshipType.connection_type, null: true def confirmed_similar_relationships @@ -370,4 +380,18 @@ def is_secondary field :similar_items, ProjectMediaType.connection_type, null: true field :media_slug, GraphQL::Types::String, null: true + + field :fact_check, FactCheckType, null: true + + field :explainers, ExplainerType.connection_type, null: true + + field :explainer_items, ExplainerItemType.connection_type, null: true + + field :articles_count, GraphQL::Types::Int, null: true + + def articles_count + count = object.explainers.count + count += 1 if object.fact_check + count + end end diff --git a/app/graph/types/query_type.rb b/app/graph/types/query_type.rb index 9725ce21b1..8ff93f7a16 100644 --- a/app/graph/types/query_type.rb +++ b/app/graph/types/query_type.rb @@ -235,6 +235,8 @@ def feed_team(id: nil, feed_id: nil, team_slug: nil) feed request tipline_message + fact_check + explainer ].each do |type| field type, "#{type.to_s.camelize}Type", diff --git a/app/graph/types/source_type.rb b/app/graph/types/source_type.rb index c9932e24d1..f4031fdef0 100644 --- a/app/graph/types/source_type.rb +++ b/app/graph/types/source_type.rb @@ -34,4 +34,14 @@ def medias field :medias_count, GraphQL::Types::Int, null: true field :collaborators, UserType.connection_type, null: true + + def image + super_admin? ? "#{CheckConfig.get('checkdesk_base_url')}/images/user.png" : object.image + end + + private + + def super_admin? + object.user&.is_admin && !object.user&.is_member_of?(Team.current) + end end diff --git a/app/graph/types/team_type.rb b/app/graph/types/team_type.rb index 329b272c6c..2ad5021e9b 100644 --- a/app/graph/types/team_type.rb +++ b/app/graph/types/team_type.rb @@ -288,10 +288,66 @@ def tipline_messages(uid:) field :articles, ::ArticleUnion.connection_type, null: true do argument :article_type, GraphQL::Types::String, required: true, camelize: false - end - def articles(article_type:) - object.explainers if article_type == 'explainer' + # Sort and pagination + argument :offset, GraphQL::Types::Int, required: false, default_value: 0 + argument :sort, GraphQL::Types::String, required: false, default_value: 'title' + argument :sort_type, GraphQL::Types::String, required: false, camelize: false, default_value: 'ASC' + + # Filters + argument :user_ids, [GraphQL::Types::Int, null: true], required: false, camelize: false + argument :tags, [GraphQL::Types::String, null: true], required: false, camelize: false + argument :language, [GraphQL::Types::String, null: true], required: false, camelize: false + argument :updated_at, GraphQL::Types::String, required: false, camelize: false # JSON + argument :text, GraphQL::Types::String, required: false, camelize: false # Search by text + argument :standalone, GraphQL::Types::Boolean, required: false, camelize: false # Not applied to any item (fact-checks only) + argument :publisher_ids, [GraphQL::Types::Int, null: true], required: false, camelize: false + argument :report_status, [GraphQL::Types::String, null: true], required: false, camelize: false + argument :rating, [GraphQL::Types::String, null: true], required: false, camelize: false + argument :imported, GraphQL::Types::Boolean, required: false, camelize: false # Only for fact-checks + argument :target_id, GraphQL::Types::Int, required: false, camelize: false # Exclude articles already applied to the `ProjectMedia` with this ID + end + + def articles(**args) + sort = args[:sort].to_s + order = [:title, :language, :updated_at, :id].include?(sort.downcase.to_sym) ? sort.downcase.to_sym : :title + order_type = args[:sort_type].to_s.downcase.to_sym == :desc ? :desc : :asc + articles = Explainer.none + if args[:article_type] == 'explainer' + articles = object.filtered_explainers(args) + elsif args[:article_type] == 'fact-check' + articles = object.filtered_fact_checks(args) + end + articles.offset(args[:offset].to_i).order(order => order_type) + end + + field :articles_count, GraphQL::Types::Int, null: true do + argument :article_type, GraphQL::Types::String, required: false, camelize: false + + # Filters + argument :user_ids, [GraphQL::Types::Int, null: true], required: false, camelize: false + argument :tags, [GraphQL::Types::String, null: true], required: false, camelize: false + argument :language, [GraphQL::Types::String, null: true], required: false, camelize: false + argument :updated_at, GraphQL::Types::String, required: false, camelize: false # JSON + argument :text, GraphQL::Types::String, required: false, camelize: false # Search by text + argument :standalone, GraphQL::Types::Boolean, required: false, camelize: false # Not applied to any item (fact-checks only) + argument :publisher_ids, [GraphQL::Types::Int, null: true], required: false, camelize: false + argument :report_status, [GraphQL::Types::String, null: true], required: false, camelize: false + argument :rating, [GraphQL::Types::String, null: true], required: false, camelize: false + argument :imported, GraphQL::Types::Boolean, required: false, camelize: false # Only for fact-checks + argument :target_id, GraphQL::Types::Int, required: false, camelize: false # Exclude articles already applied to the `ProjectMedia` with this ID + end + + def articles_count(**args) + count = nil + if args[:article_type] == 'explainer' + count = object.filtered_explainers(args).count + elsif args[:article_type] == 'fact-check' + count = object.filtered_fact_checks(args).count + elsif args[:article_type].blank? + count = object.filtered_explainers(args).count + object.filtered_fact_checks(args).count + end + count end field :api_key, ApiKeyType, null: true do diff --git a/app/graph/types/user_type.rb b/app/graph/types/user_type.rb index 04f1f914f0..d14b767670 100644 --- a/app/graph/types/user_type.rb +++ b/app/graph/types/user_type.rb @@ -17,4 +17,18 @@ class UserType < DefaultObject def source Source.find(object.source_id) end + + def name + super_admin? ? CheckConfig.get('super_admin_name') : object.name + end + + def profile_image + super_admin? ? "#{CheckConfig.get('checkdesk_base_url')}/images/user.png" : object.profile_image + end + + private + + def super_admin? + object&.is_admin && !object&.is_member_of?(Team.current) + end end diff --git a/app/lib/tipline_search_result.rb b/app/lib/tipline_search_result.rb new file mode 100644 index 0000000000..7482872661 --- /dev/null +++ b/app/lib/tipline_search_result.rb @@ -0,0 +1,58 @@ +class TiplineSearchResult + attr_accessor :team, :title, :body, :image_url, :language, :url, :type, :format + + def initialize(team:, title:, body:, image_url:, language:, url:, type:, format:) + self.team = team + self.title = title + self.body = body + self.image_url = image_url + self.language = language + self.url = url + self.type = type # :explainer or :fact_check + self.format = format # :text or :image + end + + def should_send_in_language?(language) + return true if self.team.get_languages.to_a.size < 2 + tbi = TeamBotInstallation.where(team_id: self.team.id, user: BotUser.alegre_user).last + should_send_report_in_different_language = !tbi&.alegre_settings&.dig('single_language_fact_checks_enabled') + self.language == language || should_send_report_in_different_language + end + + def team_report_setting_value(key, language) + self.team.get_report.to_h.with_indifferent_access.dig(language, key) + end + + def footer(language) + footer = [] + prefixes = { + whatsapp: 'WhatsApp: ', + facebook: 'FB Messenger: m.me/', + twitter: 'Twitter: twitter.com/', + telegram: 'Telegram: t.me/', + viber: 'Viber: ', + line: 'LINE: ', + instagram: 'Instagram: instagram.com/' + } + [:signature, :whatsapp, :facebook, :twitter, :telegram, :viber, :line, :instagram].each do |field| + value = self.team_report_setting_value(field.to_s, language) + footer << "#{prefixes[field]}#{value}" unless value.blank? + end + footer.join("\n") + end + + def text(language = nil, hide_body = false) + text = [] + text << "*#{self.title.strip}*" unless self.title.blank? + text << self.body.to_s unless hide_body + text << self.url unless self.url.blank? + text = text.collect do |part| + self.team.get_shorten_outgoing_urls ? UrlRewriter.shorten_and_utmize_urls(part, self.team.get_outgoing_urls_utm_code) : part + end + unless language.nil? + footer = self.footer(language) + text << footer if !footer.blank? && self.team_report_setting_value('use_signature', language) + end + text.join("\n\n") + end +end diff --git a/app/models/ability.rb b/app/models/ability.rb index 825d05d775..42d45f7fa6 100644 --- a/app/models/ability.rb +++ b/app/models/ability.rb @@ -170,9 +170,10 @@ def collaborator_perms v_obj = obj.item_type.constantize.find(obj.item_id) if obj.item_type == 'ProjectMedia' !v_obj.nil? and v_obj.team_id == @context_team.id and v_obj.media.user_id = @user.id end - can [:create, :update, :read, :destroy], FactCheck, { claim_description: { project_media: { team_id: @context_team.id } } } + can [:create, :update, :read, :destroy], FactCheck, { claim_description: { team_id: @context_team.id } } can [:create, :update, :read, :destroy], Explainer, team_id: @context_team.id - can [:create, :update, :read], ClaimDescription, { project_media: { team_id: @context_team.id } } + can [:create, :update, :read], ClaimDescription, { team_id: @context_team.id } + can [:create, :update, :read, :destroy], ExplainerItem, { project_media: { team_id: @context_team.id } } end def bot_permissions diff --git a/app/models/annotations/tag.rb b/app/models/annotations/tag.rb index 4d8d15355d..d365976f03 100644 --- a/app/models/annotations/tag.rb +++ b/app/models/annotations/tag.rb @@ -89,7 +89,7 @@ def self.run_bulk_create_callbacks(ids_json, pmids_json) def get_tag_text_reference if self.tag.is_a?(String) team_id = self.team&.id - tag_text = TagText.where(text: self.tag, team_id: team_id).last + tag_text = TagText.where(text: self.tag.strip, team_id: team_id).last if tag_text.nil? && team_id.present? tag_text = TagText.new tag_text.text = self.tag diff --git a/app/models/bot/smooch.rb b/app/models/bot/smooch.rb index 40e8b5bf77..9177a69039 100644 --- a/app/models/bot/smooch.rb +++ b/app/models/bot/smooch.rb @@ -833,14 +833,11 @@ def self.save_text_message(message) extra = { url: link.url } pm = ProjectMedia.joins(:media).where('medias.url' => link.url, 'project_medias.team_id' => team.id).last end - if pm.nil? type = link.nil? ? 'Claim' : 'Link' pm = self.create_project_media(message, type, extra) end - self.add_hashtags(text, pm) - pm rescue SecurityError self.ban_user(message) @@ -924,9 +921,7 @@ def self.save_media_message(message) end end FileUtils.rm_f filepath - self.add_hashtags(text, pm) - pm end end diff --git a/app/models/claim_description.rb b/app/models/claim_description.rb index 28e73e3cc9..24bed73609 100644 --- a/app/models/claim_description.rb +++ b/app/models/claim_description.rb @@ -1,13 +1,21 @@ class ClaimDescription < ApplicationRecord include Article - belongs_to :project_media + has_paper_trail on: [:create, :update], ignore: [:updated_at, :created_at], if: proc { |_x| User.current.present? }, versions: { class_name: 'Version' } + + before_validation :set_team, on: :create + belongs_to :project_media, optional: true + belongs_to :team has_one :fact_check, dependent: :destroy accepts_nested_attributes_for :fact_check, reject_if: proc { |attributes| attributes['summary'].blank? } - validates_presence_of :project_media - validates_uniqueness_of :project_media_id + validates_presence_of :team + validates_uniqueness_of :project_media_id, allow_nil: true + after_commit :update_fact_check, on: [:update] + after_update :update_report_status + after_update :replace_media + after_update :migrate_claim_and_fact_check_logs, if: proc { |cd| cd.saved_change_to_project_media_id? && !cd.project_media_id.nil? } # To avoid GraphQL conflict with name `context` alias_attribute :claim_context, :context @@ -32,4 +40,77 @@ def article_elasticsearch_data(action = 'create_or_update') } self.index_in_elasticsearch(data) end + + def project_media_was + ProjectMedia.find_by_id(self.project_media_id_before_last_save) + end + + def version_metadata(_changes) + { fact_check: self.fact_check&.title }.to_json + end + + private + + def set_team + team = (self.project_media&.team || Team.current) + self.team = team unless team.nil? + end + + def update_fact_check + fact_check = self.fact_check + if fact_check && self.project_media_id + fact_check.updated_at = Time.now + fact_check.save! + fact_check.update_item_status + end + end + + # Pause report when claim/fact-check is removed + def update_report_status + if self.project_media_id.nil? && !self.project_media_id_before_last_save.nil? + # Update report status + pm = ProjectMedia.find(self.project_media_id_before_last_save) + report = Annotation.where(annotation_type: 'report_design', annotated_type: 'ProjectMedia', annotated_id: pm.id).last + unless report.nil? + report = report.load + data = report.data.clone.with_indifferent_access + data[:state] = 'paused' + report.data = data + report.save! + end + + # Update fact-check report status + fact_check = self.fact_check + if fact_check + fact_check.report_status = 'paused' + fact_check.save! + end + end + end + + # Replace item if fact-check is from a blank media + def replace_media + if !self.project_media_id_before_last_save.nil? && ProjectMedia.find_by_id(self.project_media_id_before_last_save)&.type_of_media == 'Blank' + old_pm = ProjectMedia.find(self.project_media_id_before_last_save) + new_pm = self.project_media + old_pm.replace_by(new_pm) + end + end + + def migrate_claim_and_fact_check_logs + # Migrate ClaimDescription logs + cd_versions = Version.from_partition(self.team_id).where(item_type: 'ClaimDescription', item_id: self.id) + # Exclude the one related to add/remove based on object_changes field. + cd_versions = cd_versions.reject do |v| + oc = begin JSON.parse(v.object_changes) rescue {} end + oc.length == 1 && oc.keys.include?('project_media_id') + end + Version.from_partition(self.team_id).where(id: cd_versions.map(&:id)).update_all(associated_id: self.project_media_id) + fc_id = self.fact_check&.id + unless fc_id.nil? + # Migrate FactCheck logs and exclude create event + Version.from_partition(self.team_id).where(item_type: 'FactCheck', item_id: fc_id) + .where.not(event: 'create').update_all(associated_id: self.project_media_id) + end + end end diff --git a/app/models/concerns/alegre_v2.rb b/app/models/concerns/alegre_v2.rb index de219a6db4..2dde5fc159 100644 --- a/app/models/concerns/alegre_v2.rb +++ b/app/models/concerns/alegre_v2.rb @@ -158,6 +158,8 @@ def content_hash(project_media, field) return Rails.cache.read("url_sha:#{project_media.url}") elsif !project_media.is_text? return project_media.media.file.filename.split(".").first + else + return Digest::MD5.hexdigest(project_media.send(field).to_s) end end end @@ -178,6 +180,26 @@ def delete_package(project_media, field, params={}, quiet=false) ).merge(params) end + def generic_package_text(project_media, field, params, fuzzy=false, match_across_content_types=true) + package = generic_package(project_media, field).merge( + params + ).merge( + models: self.indexing_models_to_use(project_media), + text: project_media.send(field), + fuzzy: fuzzy == 'true' || fuzzy.to_i == 1, + match_across_content_types: match_across_content_types, + ) + team_id = project_media.team_id + language = self.language_for_similarity(team_id) + package[:language] = language if !language.nil? + package[:min_es_score] = self.get_min_es_score(team_id) + package + end + + def delete_package_text(project_media, field, params) + generic_package_text(project_media, field, params) + end + def generic_package_media(project_media, params) generic_package(project_media, nil).merge( url: media_file_url(project_media), @@ -241,6 +263,10 @@ def store_package_audio(project_media, _field, params) generic_package_audio(project_media, params) end + def store_package_text(project_media, field, params) + generic_package_text(project_media, field, params) + end + def get_sync(project_media, field=nil, params={}) request_sync( store_package(project_media, field, params), @@ -283,6 +309,7 @@ def get_target_field(project_media, field) end def parse_similarity_results(project_media, field, results, relationship_type) + results ||= [] Hash[results.collect{|result| result["context"] = isolate_relevant_context(project_media, result) [ diff --git a/app/models/concerns/article.rb b/app/models/concerns/article.rb index 854fb6ca81..e4b1ed48be 100644 --- a/app/models/concerns/article.rb +++ b/app/models/concerns/article.rb @@ -6,8 +6,6 @@ module Article included do include CheckElasticSearch - has_paper_trail on: [:create, :update], ignore: [:updated_at, :created_at], if: proc { |_x| User.current.present? }, versions: { class_name: 'Version' } - belongs_to :user before_validation :set_user @@ -15,6 +13,7 @@ module Article after_commit :update_elasticsearch_data, :send_to_alegre, :notify_bots, on: [:create, :update] after_commit :destroy_elasticsearch_data, on: :destroy + after_save :create_tag_texts_if_needed end def text_fields @@ -46,7 +45,7 @@ def notify_bots 'ClaimDescription' => 'save_claim_description', 'FactCheck' => 'save_fact_check' }[self.class.name] - BotUser.enqueue_event(event, self.project_media.team_id, self) + BotUser.enqueue_event(event, self.project_media.team_id, self) unless self.project_media.nil? end protected @@ -54,6 +53,7 @@ def notify_bots def index_in_elasticsearch(data) # touch project media to update `updated_at` date pm = self.project_media + return if pm.nil? pm = ProjectMedia.find_by_id(pm.id) unless pm.nil? updated_at = Time.now @@ -64,9 +64,25 @@ def index_in_elasticsearch(data) end end + def create_tag_texts_if_needed + self.class.delay.create_tag_texts_if_needed(self.team_id, self.tags) if self.respond_to?(:tags) && !self.tags.blank? + end + module ClassMethods + def create_tag_texts_if_needed(team_id, tags) + tags.to_a.map(&:strip).each do |tag| + next if TagText.where(text: tag, team_id: team_id).exists? + tag_text = TagText.new + tag_text.text = tag + tag_text.team_id = team_id + tag_text.skip_check_ability = true + tag_text.save + end + end + def send_to_alegre(id) obj = self.find_by_id(id) + return if obj.project_media.nil? obj.text_fields.each do |field| ::Bot::Alegre.send_field_to_similarity_index(obj.project_media, field) end unless obj.nil? diff --git a/app/models/concerns/project_media_associations.rb b/app/models/concerns/project_media_associations.rb index 5f9ca5c1b1..40378101ab 100644 --- a/app/models/concerns/project_media_associations.rb +++ b/app/models/concerns/project_media_associations.rb @@ -21,6 +21,8 @@ module ProjectMediaAssociations has_one :claim_description, dependent: :destroy belongs_to :source, optional: true has_many :tipline_requests, as: :associated + has_many :explainer_items, dependent: :destroy + has_many :explainers, through: :explainer_items has_annotations end end diff --git a/app/models/concerns/project_media_cached_fields.rb b/app/models/concerns/project_media_cached_fields.rb index 6f5c966c42..87ccf7dd17 100644 --- a/app/models/concerns/project_media_cached_fields.rb +++ b/app/models/concerns/project_media_cached_fields.rb @@ -13,16 +13,17 @@ def title_or_description_update [ { model: ClaimDescription, - affected_ids: proc { |cd| [cd.project_media] }, + affected_ids: proc { |cd| [cd.project_media_id, cd.project_media_id_before_last_save] }, events: { save: :recalculate } }, { model: FactCheck, - affected_ids: proc { |fc| [fc.claim_description.project_media] }, + affected_ids: proc { |fc| [fc.claim_description.project_media_id] }, events: { - save: :recalculate + save: :recalculate, + destroy: :recalculate } }, { @@ -71,9 +72,10 @@ def title_or_description_update FACT_CHECK_EVENT = { model: FactCheck, - affected_ids: proc { |fc| [fc.claim_description.project_media] }, + affected_ids: proc { |fc| [fc.claim_description.project_media_id] }, events: { - save: :recalculate + save: :recalculate, + destroy: :recalculate } } @@ -176,6 +178,11 @@ def title_or_description_update } ] + cached_field :fact_check_id, + start_as: nil, + recalculate: :recalculate_fact_check_id, + update_on: [FACT_CHECK_EVENT] + cached_field :fact_check_title, start_as: nil, recalculate: :recalculate_fact_check_title, @@ -540,6 +547,10 @@ def recalculate_last_seen [v1, v2].max.to_i end + def recalculate_fact_check_id + self.claim_description&.fact_check&.id + end + def recalculate_fact_check_title self.claim_description&.fact_check&.title end diff --git a/app/models/concerns/project_media_creators.rb b/app/models/concerns/project_media_creators.rb index 1c8e1dbcaf..6b4e405b7a 100644 --- a/app/models/concerns/project_media_creators.rb +++ b/app/models/concerns/project_media_creators.rb @@ -253,6 +253,9 @@ def create_claim_description_and_fact_check publish_report: !!fact_check['publish_report'], signature: Digest::MD5.hexdigest([self.set_fact_check.to_json, self.team_id].join(':')), claim_description: cd, + report_status: (fact_check['publish_report'] ? 'published' : 'unpublished'), + rating: self.set_status, + tags: self.set_tags.to_a.map(&:strip), skip_check_ability: true }) end @@ -260,6 +263,6 @@ def create_claim_description_and_fact_check end def create_tags - self.set_tags.each { |tag| Tag.create!(annotated: self, tag: tag, skip_check_ability: true) } if self.set_tags.is_a?(Array) + self.set_tags.each { |tag| Tag.create!(annotated: self, tag: tag.strip, skip_check_ability: true) } if self.set_tags.is_a?(Array) end end diff --git a/app/models/concerns/project_media_getters.rb b/app/models/concerns/project_media_getters.rb index ba7af18797..b8662224e7 100644 --- a/app/models/concerns/project_media_getters.rb +++ b/app/models/concerns/project_media_getters.rb @@ -153,7 +153,7 @@ def get_title return self.send(title_mapping[title_field]).to_s end title = self.original_title - [self.analysis['file_title'], self.analysis['title'], self.fact_check_title, self.claim_description_content].each do |value| + [self.analysis['file_title'], self.analysis['title'], self.fact_check_title(true), self.claim_description_content].each do |value| title = value if !value.blank? && value != '-' && value != '​' end title.to_s @@ -174,7 +174,7 @@ def media_slug end def get_description - return self.fact_check_summary if self.get_main_channel == CheckChannels::ChannelCodes::FETCH + return self.fact_check_summary(true) if self.get_main_channel == CheckChannels::ChannelCodes::FETCH analysis_description = self.has_analysis_description? ? self.analysis_description : nil self.claim_description_content || analysis_description || self.original_description end @@ -209,4 +209,8 @@ def get_creator_name def team_avatar self.team.avatar end + + def fact_check + self.claim_description&.fact_check + end end diff --git a/app/models/concerns/smooch_messages.rb b/app/models/concerns/smooch_messages.rb index 27311dc759..2e8a2d590e 100644 --- a/app/models/concerns/smooch_messages.rb +++ b/app/models/concerns/smooch_messages.rb @@ -291,11 +291,15 @@ def bundle_list_of_messages_to_items(list, last) if message['type'] == 'text' # Get an item for long text (message that match number of words condition) if message['payload'].nil? - messages << message if ::Bot::Alegre.get_number_of_words(message['text'].to_s) > CheckConfig.get('min_number_of_words_for_tipline_submit_shortcut', 10, :integer) + contains_link = Twitter::TwitterText::Extractor.extract_urls(message['text']) + messages << message if !contains_link.blank? || ::Bot::Alegre.get_number_of_words(message['text'].to_s) > CheckConfig.get('min_number_of_words_for_tipline_submit_shortcut', 10, :integer) text << message['text'] end elsif !message['mediaUrl'].blank? # Get an item for each media file + if !message['text'].blank? && ::Bot::Alegre.get_number_of_words(message['text'].to_s) > CheckConfig.get('min_number_of_words_for_tipline_submit_shortcut', 10, :integer) + message['caption'] = message['text'] + end message['text'] = [message['text'], message['mediaUrl'].to_s].compact.join("\n#{Bot::Smooch::MESSAGE_BOUNDARY}") text << message['text'] messages << self.adjust_media_type(message) @@ -394,19 +398,43 @@ def save_message(message_json, app_id, author = nil, request_type = 'default_req message['archived'] = (request_type == 'relevant_search_result_requests' ? self.default_archived_flag : CheckArchivedFlags::FlagCodes::UNCONFIRMED) associated = self.create_project_media_from_message(message) end - unless associated.nil? - # Remember that we received this message. - hash = self.message_hash(message) - Rails.cache.write("smooch:message:#{hash}", associated.id) - self.smooch_save_tipline_request(message, associated, app_id, author, request_type, associated_obj) - # If item is published (or parent item), send a report right away - self.get_platform_from_message(message) - self.send_report_to_user(message['authorId'], message, associated, message['language'], 'fact_check_report') if self.should_try_to_send_report?(request_type, associated) + self.smoooch_post_save_message_actions(message, associated, app_id, author, request_type, associated_obj) + # Check if message contains caption then create an item and force relationship + self.relate_item_and_caption(message, associated, app_id, author, request_type, associated_obj) unless message['caption'].blank? end end end + def smoooch_post_save_message_actions(message, associated, app_id, author, request_type, associated_obj) + # Remember that we received this message. + hash = self.message_hash(message) + Rails.cache.write("smooch:message:#{hash}", associated.id) + self.smooch_save_tipline_request(message, associated, app_id, author, request_type, associated_obj) + # If item is published (or parent item), send a report right away + self.get_platform_from_message(message) + self.send_report_to_user(message['authorId'], message, associated, message['language'], 'fact_check_report') if self.should_try_to_send_report?(request_type, associated) + end + + def relate_item_and_caption(message, associated, app_id, author, request_type, associated_obj) + message['_id'] = SecureRandom.hex + message['type'] = 'text' + message['request_body'] = message['text'] + message['text'] = message['caption'] + message.delete('caption') + message.delete('mediaUrl') + target = self.create_project_media_from_message(message) + unless target.nil? + smoooch_post_save_message_actions(message, target, app_id, author, request_type, associated_obj) + r = Relationship.new + r.skip_check_ability = true + r.relationship_type = Relationship.suggested_type + r.source_id = associated.id + r.target_id = target.id + r.save! + end + end + def smooch_save_tipline_request(message, associated, app_id, author, request_type, associated_obj) message['text'] = message['request_body'] unless message['request_body'].blank? message.delete('request_body') diff --git a/app/models/concerns/smooch_search.rb b/app/models/concerns/smooch_search.rb index 8dd4729892..8c66f2c1c0 100644 --- a/app/models/concerns/smooch_search.rb +++ b/app/models/concerns/smooch_search.rb @@ -4,6 +4,7 @@ module SmoochSearch extend ActiveSupport::Concern module ClassMethods + # This method runs in background def search(app_id, uid, language, message, team_id, workflow, provider = nil) platform = self.get_platform_from_message(message) @@ -11,16 +12,26 @@ def search(app_id, uid, language, message, team_id, workflow, provider = nil) sm = CheckStateMachine.new(uid) self.get_installation(self.installation_setting_id_keys, app_id) if self.config.blank? RequestStore.store[:smooch_bot_provider] = provider unless provider.blank? - results = self.get_search_results(uid, message, team_id, language).select do |pm| - pm = Relationship.confirmed_parent(pm) - report = pm.get_dynamic_annotation('report_design') - !report.nil? && !!report.should_send_report_in_this_language?(language) - end.collect{ |pm| Relationship.confirmed_parent(pm) }.uniq - if results.empty? + query = self.get_search_query(uid, message) + results = self.get_search_results(uid, query, team_id, language).collect{ |pm| Relationship.confirmed_parent(pm) }.uniq + reports = results.select{ |pm| pm.report_status == 'published' }.collect{ |pm| pm.get_dynamic_annotation('report_design') }.reject{ |r| r.nil? }.collect{ |r| r.report_design_to_tipline_search_result }.select{ |r| r.should_send_in_language?(language) } + + # Extract explainers from matched media if they don't have published fact-checks but they have explainers + reports = results.collect{ |pm| pm.explainers.to_a }.flatten.uniq.first(3).map(&:as_tipline_search_result) if !results.empty? && reports.empty? + + # Search for explainers if fact-checks were not found + if reports.empty? && query['type'] == 'text' + explainers = self.search_for_explainers(uid, query['text'], team_id, language).first(3).select{ |explainer| explainer.as_tipline_search_result.should_send_in_language?(language) } + Rails.logger.info "[Smooch Bot] Text similarity search got #{explainers.count} explainers while looking for '#{query['text']}' for team #{team_id}" + results = explainers.collect{ |explainer| explainer.project_medias.to_a }.flatten.uniq.reject{ |pm| pm.blank? }.first(3) + reports = explainers.map(&:as_tipline_search_result) + end + + if reports.empty? self.bundle_messages(uid, '', app_id, 'default_requests', nil, true) self.send_final_message_to_user(uid, self.get_custom_string('search_no_results', language), workflow, language, 'no_results') else - self.send_search_results_to_user(uid, results, team_id, platform) + self.send_search_results_to_user(uid, reports, team_id, platform) sm.go_to_search_result self.save_search_results_for_user(uid, results.map(&:id)) self.delay_for(1.second, { queue: 'smooch_priority' }).ask_for_feedback_when_all_search_results_are_received(app_id, language, workflow, uid, platform, provider, 1) @@ -80,7 +91,7 @@ def filter_search_results(pms, after, feed_id, team_ids) end def is_a_valid_search_result(pm) - pm.report_status == 'published' && [CheckArchivedFlags::FlagCodes::NONE, CheckArchivedFlags::FlagCodes::UNCONFIRMED].include?(pm.archived) + (pm.report_status == 'published' || pm.explainers.count > 0) && [CheckArchivedFlags::FlagCodes::NONE, CheckArchivedFlags::FlagCodes::UNCONFIRMED].include?(pm.archived) end def reject_temporary_results(results) @@ -91,7 +102,7 @@ def reject_temporary_results(results) def parse_search_results_from_alegre(results, after = nil, feed_id = nil, team_ids = nil) pms = reject_temporary_results(results).sort_by{ |a| [a[1][:model] != Bot::Alegre::ELASTICSEARCH_MODEL ? 1 : 0, a[1][:score]] }.to_h.keys.reverse.collect{ |id| Relationship.confirmed_parent(ProjectMedia.find_by_id(id)) } - filter_search_results(pms, after, feed_id, team_ids).uniq(&:id).first(3) + filter_search_results(pms, after, feed_id, team_ids).uniq(&:id).sort_by{ |pm| pm.report_status == 'published' ? 0 : 1 }.first(3) end def date_filter(team_id) @@ -111,11 +122,14 @@ def get_text_similarity_threshold value == 0.0 ? 0.85 : value end - def get_search_results(uid, last_message, team_id, language) + def get_search_query(uid, last_message) + list = self.list_of_bundled_messages_from_user(uid) + self.bundle_list_of_messages(list, last_message, true) + end + + def get_search_results(uid, message, team_id, language) results = [] begin - list = self.list_of_bundled_messages_from_user(uid) - message = self.bundle_list_of_messages(list, last_message, true) type = message['type'] after = self.date_filter(team_id) query = message['text'] @@ -243,22 +257,22 @@ def search_by_keywords_for_similar_published_fact_checks(words, after, team_ids, results end - def send_search_results_to_user(uid, results, team_id, platform) + def send_search_results_to_user(uid, reports, team_id, platform) team = Team.find(team_id) redis = Redis.new(REDIS_CONFIG) language = self.get_user_language(uid) - reports = results.collect{ |r| r.get_dynamic_annotation('report_design') } - # Get reports languages - reports_language = reports.map { |r| r&.report_design_field_value('language') }.uniq - if team.get_languages.to_a.size > 1 && !reports_language.include?(language) + reports_languages = reports.map(&:language).uniq + + if team.get_languages.to_a.size > 1 && !reports_languages.include?(language) self.send_message_to_user(uid, self.get_string(:no_results_in_language, language).gsub('%{language}', CheckCldr.language_code_to_name(language, language)), {}, false, true, 'no_results') sleep 1 end - reports.reject{ |r| r.blank? }.each do |report| + + reports.each do |report| response = nil - no_body = (platform == 'Facebook Messenger' && !report.report_design_field_value('published_article_url').blank?) - response = self.send_message_to_user(uid, report.report_design_text(nil, no_body), {}, false, true, 'search_result') if report.report_design_field_value('use_text_message') - response = self.send_message_to_user(uid, '', { 'type' => 'image', 'mediaUrl' => report.report_design_image_url }, false, true, 'search_result') if !report.report_design_field_value('use_text_message') && report.report_design_field_value('use_visual_card') + no_body = (platform == 'Facebook Messenger' && !report.url.blank?) + response = self.send_message_to_user(uid, report.text(nil, no_body), {}, false, true, 'search_result') if report.format == :text + response = self.send_message_to_user(uid, '', { 'type' => 'image', 'mediaUrl' => report.image_url }, false, true, 'search_result') if report.format == :image id = self.get_id_from_send_response(response) redis.rpush("smooch:search:#{uid}", id) unless id.blank? end @@ -284,5 +298,22 @@ def ask_for_feedback_when_all_search_results_are_received(app_id, language, work self.delay_for(1.second, { queue: 'smooch_priority' }).ask_for_feedback_when_all_search_results_are_received(app_id, language, workflow, uid, platform, provider, attempts + 1) if attempts < max # Try for 20 seconds end end + + def search_for_explainers(uid, query, team_id, language) + results = nil + begin + text = ::Bot::Smooch.extract_claim(query) + if Bot::Alegre.get_number_of_words(text) == 1 + results = Explainer.where(team_id: team_id).where('description ILIKE ? OR title ILIKE ?', "%#{text}%", "%#{text}%") + results = results.where(language: language) if should_restrict_by_language?([team_id]) + results = results.order('updated_at DESC') + else + results = Explainer.search_by_similarity(text, language, team_id) + end + rescue StandardError => e + self.handle_search_error(uid, e, language) + end + results.joins(:project_medias) + end end end diff --git a/app/models/concerns/team_associations.rb b/app/models/concerns/team_associations.rb index 5d0cbb5adc..d529cdb243 100644 --- a/app/models/concerns/team_associations.rb +++ b/app/models/concerns/team_associations.rb @@ -93,6 +93,7 @@ def medias_count(obj = nil) conditions[key] = obj.id relationship_type = Team.sanitize_sql(Relationship.confirmed_type.to_yaml) ProjectMedia.where(conditions) + .joins(:media).where('medias.type != ?', 'Blank') .joins("LEFT JOIN relationships r ON r.target_id = project_medias.id AND r.relationship_type = '#{relationship_type}'") .where('r.id IS NULL').count end @@ -116,4 +117,8 @@ def check_search_unconfirmed def check_search_spam check_search_filter({ 'archived' => CheckArchivedFlags::FlagCodes::SPAM }) end + + def fact_checks + FactCheck.joins(:claim_description).where('claim_descriptions.team_id' => self.id) + end end diff --git a/app/models/explainer.rb b/app/models/explainer.rb index 52a01f214f..a4319e718a 100644 --- a/app/models/explainer.rb +++ b/app/models/explainer.rb @@ -1,21 +1,115 @@ class Explainer < ApplicationRecord include Article + # FIXME: Read from workspace settings + ALEGRE_MODELS_AND_THRESHOLDS = { + # Bot::Alegre::ELASTICSEARCH_MODEL => 0.8 # Sometimes this is easier for local development + Bot::Alegre::PARAPHRASE_MULTILINGUAL_MODEL => 0.7 + } + belongs_to :team has_annotations + has_many :explainer_items, dependent: :destroy + has_many :project_medias, through: :explainer_items before_validation :set_team validates_format_of :url, with: URI.regexp, allow_blank: true, allow_nil: true - validates_presence_of :team + validates_presence_of :team, :title, :description validate :language_in_allowed_values, unless: proc { |e| e.language.blank? } + after_save :update_paragraphs_in_alegre + def notify_bots # Nothing to do for Explainer end def send_to_alegre - # Nothing to do for Explainer + # Let's not use the same callbacks from article.rb + end + + def as_tipline_search_result + TiplineSearchResult.new( + team: self.team, + title: self.title, + body: self.description, + image_url: nil, + language: self.language, + url: self.url, + type: :explainer, + format: :text + ) + end + + def update_paragraphs_in_alegre + previous_paragraphs_count = self.description_before_last_save.to_s.gsub(/\r\n?/, "\n").split(/\n+/).reject{ |paragraph| paragraph.strip.blank? }.size + + # Schedule to run 5 seconds later - it's a way to be sure there won't be more updates coming + self.class.delay_for(5.seconds).update_paragraphs_in_alegre(self.id, previous_paragraphs_count, Time.now.to_f) + end + + def self.update_paragraphs_in_alegre(id, previous_paragraphs_count, timestamp) + explainer = Explainer.find(id) + + # Skip if the explainer was saved since this job was created (it means that there is a more recent job) + return if explainer.updated_at.to_f > timestamp + + base_context = { + type: 'explainer', + team: explainer.team.slug, + language: explainer.language, + explainer_id: explainer.id + } + + # Index title + params = { + doc_id: Digest::MD5.hexdigest(['explainer', explainer.id, 'title'].join(':')), + text: explainer.title, + models: ALEGRE_MODELS_AND_THRESHOLDS.keys, + context: base_context.merge({ field: 'title' }) + } + Bot::Alegre.request('post', '/text/similarity/', params) + + # Index paragraphs + count = 0 + explainer.description.to_s.gsub(/\r\n?/, "\n").split(/\n+/).reject{ |paragraph| paragraph.strip.blank? }.each do |paragraph| + count += 1 + params = { + doc_id: Digest::MD5.hexdigest(['explainer', explainer.id, 'paragraph', count].join(':')), + text: paragraph.strip, + models: ALEGRE_MODELS_AND_THRESHOLDS.keys, + context: base_context.merge({ paragraph: count }) + } + Bot::Alegre.request('post', '/text/similarity/', params) + end + + # Remove paragraphs that don't exist anymore (we delete after updating in order to avoid race conditions) + previous_paragraphs_count.times do |index| + next if index < count + params = { + doc_id: Digest::MD5.hexdigest(['explainer', explainer.id, 'paragraph', index + 1].join(':')), + quiet: true, + context: base_context.merge({ paragraph: count }) + } + Bot::Alegre.request('delete', '/text/similarity/', params) + end + end + + def self.search_by_similarity(text, language, team_id) + params = { + text: text, + models: ALEGRE_MODELS_AND_THRESHOLDS.keys, + per_model_threshold: ALEGRE_MODELS_AND_THRESHOLDS, + context: { + type: 'explainer', + team: Team.find(team_id).slug, + language: language + } + } + response = Bot::Alegre.request('post', '/text/similarity/search/', params) + results = response['result'].to_a.sort_by{ |result| result['_score'] } + explainer_ids = results.collect{ |result| result.dig('_source', 'context', 'explainer_id').to_i }.uniq.first(3) + explainer_ids.empty? ? Explainer.none : Explainer.where(team_id: team_id, id: explainer_ids) end private diff --git a/app/models/explainer_item.rb b/app/models/explainer_item.rb new file mode 100644 index 0000000000..7fc5be1f49 --- /dev/null +++ b/app/models/explainer_item.rb @@ -0,0 +1,20 @@ +# Join model +class ExplainerItem < ApplicationRecord + has_paper_trail on: [:create, :destroy], ignore: [:updated_at, :created_at], if: proc { |_x| User.current.present? }, versions: { class_name: 'Version' } + + belongs_to :explainer + belongs_to :project_media + + validates_presence_of :explainer, :project_media + validate :same_team + + def version_metadata(_changes) + { explainer_title: self.explainer.title }.to_json + end + + private + + def same_team + errors.add(:base, I18n.t(:explainer_and_item_must_be_from_the_same_team)) unless self.explainer&.team_id == self.project_media&.team_id + end +end diff --git a/app/models/fact_check.rb b/app/models/fact_check.rb index 2a3e7ac2b8..5d42782496 100644 --- a/app/models/fact_check.rb +++ b/app/models/fact_check.rb @@ -1,18 +1,25 @@ class FactCheck < ApplicationRecord include Article + has_paper_trail on: [:create, :update], ignore: [:updated_at, :created_at, :rating, :report_status], if: proc { |_x| User.current.present? }, versions: { class_name: 'Version' } + + enum report_status: { unpublished: 0, published: 1, paused: 2 } + attr_accessor :skip_report_update, :publish_report belongs_to :claim_description + before_validation :set_initial_rating, on: :create, if: proc { |fc| fc.rating.blank? && fc.claim_description.present? } before_validation :set_language, on: :create, if: proc { |fc| fc.language.blank? } + before_validation :set_imported, on: :create validates_presence_of :claim_description validates_uniqueness_of :claim_description_id validates_format_of :url, with: URI.regexp, allow_blank: true, allow_nil: true - validate :language_in_allowed_values, :title_or_summary_exists + validate :language_in_allowed_values, :title_or_summary_exists, :rating_in_allowed_values - after_save :update_report + after_save :update_report, unless: proc { |fc| fc.skip_report_update || !DynamicAnnotation::AnnotationType.where(annotation_type: 'report_design').exists? || fc.project_media.blank? } + after_save :update_item_status, if: proc { |fc| fc.saved_change_to_rating? } def text_fields ['fact_check_title', 'fact_check_summary'] @@ -22,25 +29,54 @@ def project_media self.claim_description&.project_media end + def team_id + self.claim_description&.team_id + end + + def team + self.claim_description&.team + end + + def update_item_status + pm = self.project_media + s = pm&.last_status_obj + if !s.nil? && s.status != self.rating + s.skip_check_ability = true + s.status = self.rating + s.save! + end + end + private def set_language - languages = self.project_media&.team&.get_languages || ['en'] + languages = self.claim_description&.team&.get_languages || ['en'] self.language = languages.length == 1 ? languages.first : 'und' end + def set_imported + self.imported = true if self.user&.type == 'BotUser' # We consider "imported" the fact-checks that are not created by humans inside Check + end + def language_in_allowed_values - allowed_languages = self.project_media&.team&.get_languages || ['en'] + allowed_languages = self.claim_description&.team&.get_languages || ['en'] allowed_languages << 'und' errors.add(:language, I18n.t(:"errors.messages.invalid_article_language_value")) unless allowed_languages.include?(self.language) end + def rating_in_allowed_values + unless self.rating.blank? + team = self.claim_description.team + allowed_statuses = team.verification_statuses('media', nil)['statuses'].collect{ |s| s[:id] } + errors.add(:rating, I18n.t(:workflow_status_is_not_valid, status: self.rating, valid: allowed_statuses.join(', '))) unless allowed_statuses.include?(self.rating) + end + end + def title_or_summary_exists errors.add(:base, I18n.t(:"errors.messages.fact_check_empty_title_and_summary")) if self.title.blank? && self.summary.blank? end def update_report - return if self.skip_report_update || !DynamicAnnotation::AnnotationType.where(annotation_type: 'report_design').exists? pm = self.project_media reports = pm.get_dynamic_annotation('report_design') || Dynamic.new(annotation_type: 'report_design', annotated: pm) data = reports.data.to_h.with_indifferent_access @@ -93,4 +129,10 @@ def article_elasticsearch_data(action = 'create_or_update') } self.index_in_elasticsearch(data) end + + def set_initial_rating + pm_rating = self.project_media&.last_status + default_rating = self.claim_description.team.verification_statuses('media', nil)['default'] + self.rating = pm_rating || default_rating + end end diff --git a/app/models/project_media.rb b/app/models/project_media.rb index ea59146547..534b506698 100644 --- a/app/models/project_media.rb +++ b/app/models/project_media.rb @@ -271,7 +271,7 @@ def replace_by(new_pm, skip_send_report = false) new_pm.skip_check_ability = true new_pm.channel = { main: CheckChannels::ChannelCodes::FETCH } # Point the claim and consequently the fact-check - new_pm.claim_description = self.claim_description + new_pm.claim_description = self.claim_description if self.claim_description new_pm.save(validate: false) # To skip channel validation RequestStore.store[:skip_check_ability] = false diff --git a/app/models/team.rb b/app/models/team.rb index 349cb569f8..6ba123bb6b 100644 --- a/app/models/team.rb +++ b/app/models/team.rb @@ -3,6 +3,7 @@ class Team < ApplicationRecord after_create :create_team_partition before_destroy :delete_created_bots, :remove_is_default_project_flag + include SearchHelper include ValidationsHelper include DestroyLater include TeamValidations @@ -481,6 +482,68 @@ def available_newsletter_header_types available end + def filtered_explainers(filters = {}) + query = self.explainers + + # Filter by tags + query = query.where('ARRAY[?]::varchar[] && tags', filters[:tags].to_a.map(&:to_s)) unless filters[:tags].blank? + + # Filter by user + query = query.where(user_id: filters[:user_ids].to_a.map(&:to_i)) unless filters[:user_ids].blank? + + # Filter by date + query = query.where(updated_at: Range.new(*format_times_search_range_filter(JSON.parse(filters[:updated_at]), nil))) unless filters[:updated_at].blank? + + # Filter by text + query = query.where('(title ILIKE ? OR url ILIKE ? OR description ILIKE ?)', *["%#{filters[:text]}%"]*3) if filters[:text].to_s.size > 2 + + # Exclude the ones already applied to a target item + target = ProjectMedia.find_by_id(filters[:target_id].to_i) + query = query.where.not(id: target.explainer_ids) unless target.nil? + + query + end + + def filtered_fact_checks(filters = {}) + query = FactCheck.includes(:claim_description).where('claim_descriptions.team_id' => self.id) + + # Filter by standalone + query = query.left_joins(claim_description: { project_media: :media }).where('claim_descriptions.project_media_id IS NULL OR medias.type = ?', 'Blank') if filters[:standalone] + + # Filter by language + query = query.where('fact_checks.language' => filters[:language].to_a) unless filters[:language].blank? + + # Filter by tags + query = query.where('ARRAY[?]::varchar[] && fact_checks.tags', filters[:tags].to_a.map(&:to_s)) unless filters[:tags].blank? + + # Filter by user + query = query.where('fact_checks.user_id' => filters[:user_ids].to_a.map(&:to_i)) unless filters[:user_ids].blank? + + # Filter by date + query = query.where('fact_checks.updated_at' => Range.new(*format_times_search_range_filter(JSON.parse(filters[:updated_at]), nil))) unless filters[:updated_at].blank? + + # Filter by publisher + query = query.where('fact_checks.publisher_id' => filters[:publisher_ids].to_a.map(&:to_i)) unless filters[:publisher_ids].blank? + + # Filter by rating + query = query.where('fact_checks.rating' => filters[:rating].to_a.map(&:to_s)) unless filters[:rating].blank? + + # Filter by imported + query = query.where('fact_checks.imported' => !!filters[:imported]) unless filters[:imported].nil? + + # Filter by report status + query = query.where('fact_checks.report_status' => filters[:report_status].to_a.map(&:to_s)) unless filters[:report_status].blank? + + # Filter by text + query = query.where('(fact_checks.title ILIKE ? OR fact_checks.url ILIKE ? OR fact_checks.summary ILIKE ?)', *["%#{filters[:text]}%"]*3) if filters[:text].to_s.size > 2 + + # Exclude the ones already applied to a target item + target = ProjectMedia.find_by_id(filters[:target_id].to_i) + query = query.where.not('fact_checks.id' => target.fact_check_id) unless target.nil? + + query + end + # private # # Please add private methods to app/models/concerns/team_private.rb diff --git a/app/models/user.rb b/app/models/user.rb index d0dcfde3f0..9c226d378d 100644 --- a/app/models/user.rb +++ b/app/models/user.rb @@ -245,7 +245,7 @@ def send_failed_login_notifications=(enabled) end def profile_image - self.source.nil? ? nil : self.source.avatar + self.source.nil? ? nil : self.source.image end def bot_events diff --git a/app/models/version.rb b/app/models/version.rb index cc53ab1f20..7c13f00601 100644 --- a/app/models/version.rb +++ b/app/models/version.rb @@ -145,9 +145,16 @@ def get_associated when 'create_assignment', 'destroy_assignment' self.get_associated_from_assignment when 'create_claimdescription', 'update_claimdescription' - ['ProjectMedia', self.item.project_media_id] - when 'create_factcheck' + pm_id = self.item.project_media_id + if pm_id.nil? + changes = self.get_object_changes + pm_id = changes['project_media_id'][0] if changes.has_key?('project_media_id') && changes['project_media_id'][1].nil? + end + ['ProjectMedia', pm_id] + when 'create_factcheck', 'update_factcheck' ['ProjectMedia', self.item.claim_description.project_media_id] + when 'create_explaineritem', 'destroy_explaineritem' + ['ProjectMedia', self.item.project_media_id] end end diff --git a/app/models/workflow/verification_status.rb b/app/models/workflow/verification_status.rb index 7ff6a7ae08..3471014481 100644 --- a/app/models/workflow/verification_status.rb +++ b/app/models/workflow/verification_status.rb @@ -6,7 +6,6 @@ class Workflow::VerificationStatus < Workflow::Base check_workflow on: :create, actions: :index_on_es_background check_workflow on: :update, actions: :index_on_es_foreground - def self.core_default_value 'undetermined' end @@ -109,6 +108,14 @@ def update_report_design_if_needed }) report.data = data report.save! + # update FactCheck rating + fc = pm&.claim_description&.fact_check + if !fc.nil? && fc.rating != self.value + fc.skip_report_update = true + fc.skip_check_ability = true + fc.rating = self.value + fc.save! + end end end end diff --git a/config/config.yml.example b/config/config.yml.example index 691cd71933..e14ec0ee77 100644 --- a/config/config.yml.example +++ b/config/config.yml.example @@ -11,6 +11,8 @@ development: &default elasticsearch_index: elasticsearch_log: true elasticsearch_sync: false + super_admin_name: 'Meedan' + # WARNING For production, don't use a wildcard: set the allowed domains explicitly as a regular expression, e.g. # '(https?://.*\.?(meedan.com|meedan.org))' allowed_origins: '.*' @@ -273,6 +275,9 @@ development: &default devise_unlock_accounts_after: 1 login_rate_limit: 10 api_rate_limit: 100 + + session_store_key: '_checkdesk_session_dev' + session_store_domain: 'localhost' test: <<: *default checkdesk_base_url_private: http://api:3000 diff --git a/config/initializers/devise.rb b/config/initializers/devise.rb index 0c43abae93..795bf0eb2e 100644 --- a/config/initializers/devise.rb +++ b/config/initializers/devise.rb @@ -1,4 +1,5 @@ require 'error_codes' +require 'redis' class CustomFailure < Devise::FailureApp def respond @@ -49,6 +50,12 @@ def http_auth_body end config.mailer = 'DeviseMailer' config.invite_for = 1.month + + Warden::Manager.after_authentication do |user, auth, opts| + @redis = Redis.new(REDIS_CONFIG) + ip = auth.request.ip + @redis.decr("track:#{ip}") + end end AuthTrail.geocode = false diff --git a/config/initializers/rack_attack.rb b/config/initializers/rack_attack.rb index ce88316761..ba10bb09da 100644 --- a/config/initializers/rack_attack.rb +++ b/config/initializers/rack_attack.rb @@ -6,8 +6,19 @@ def self.real_ip(req) req.get_header('HTTP_CF_CONNECTING_IP') || req.ip end + def self.authenticated?(req) + warden = req.env['warden'] + warden && warden.user.present? + end + # Throttle all graphql requests by IP address - throttle('api/graphql', limit: proc { CheckConfig.get('api_rate_limit', 100, :integer) }, period: 60.seconds) do |req| + throttle('api/graphql', limit: proc { |req| + if authenticated?(req) + CheckConfig.get('api_rate_limit_authenticated', 1000, :integer) + else + CheckConfig.get('api_rate_limit', 100, :integer) + end + }, period: 60.seconds) do |req| real_ip(req) if req.path == '/api/graphql' end @@ -25,6 +36,8 @@ def self.real_ip(req) count = redis.incr("track:#{ip}") redis.expire("track:#{ip}", 3600) # Set the expiration time to 1 hour + redis.set("track:#{ip}", 0) if count < 0 + # Add IP to blocklist if count exceeds the threshold if count.to_i >= CheckConfig.get('login_block_limit', 100, :integer) redis.set("block:#{ip}", true) # No expiration diff --git a/config/initializers/report_designer.rb b/config/initializers/report_designer.rb index 79743c8b80..ffbf4d7d2b 100644 --- a/config/initializers/report_designer.rb +++ b/config/initializers/report_designer.rb @@ -21,7 +21,17 @@ user = self.annotator || User.current url = self.report_design_field_value('published_article_url') language = self.report_design_field_value('language') - fields = { user: user, skip_report_update: true , url: url, language: language } + state = self.data['state'] + publisher_id = state == 'published' ? self.annotator_id : nil + fields = { + user: user, + skip_report_update: true, + url: url, + language: language, + publisher_id: publisher_id, + report_status: state, + rating: pm.status + } if self.report_design_field_value('use_text_message') title = self.report_design_field_value('title') summary = self.report_design_field_value('text') @@ -40,9 +50,11 @@ if fc.nil? FactCheck.create({ claim_description: pm.claim_description }.merge(fields)) else - fields.each { |field, value| fc.send("#{field}=", value) } - fc.skip_check_ability = true - fc.save! + PaperTrail.request(enabled: false) do + fields.each { |field, value| fc.send("#{field}=", value) } + fc.skip_check_ability = true + fc.save! + end end end @@ -51,6 +63,25 @@ Feed.delay_for(1.minute, retry: 0).notify_subscribers(pm, title, summary, url) Request.delay_for(1.minute, retry: 0).update_fact_checked_by(pm) end + + if self.annotation_type == 'report_design' && self.action =~ /pause/ + # Update report fields + fc = pm&.claim_description&.fact_check + unless fc.nil? + PaperTrail.request(enabled: false) do + state = self.data['state'] + fields = { + skip_report_update: true, + publisher_id: nil, + report_status: state, + rating: pm.status + } + fields.each { |field, value| fc.send("#{field}=", value) } + fc.skip_check_ability = true + fc.save! + end + end + end end def report_design_introduction(data, language) @@ -66,41 +97,24 @@ def report_design_team_setting_value(field, language) self.annotated&.team&.get_report.to_h.with_indifferent_access.dig(language, field) if self.annotation_type == 'report_design' end - def report_design_text_footer(language) - footer = [] - prefixes = { - whatsapp: 'WhatsApp: ', - facebook: 'FB Messenger: m.me/', - twitter: 'Twitter: twitter.com/', - telegram: 'Telegram: t.me/', - viber: 'Viber: ', - line: 'LINE: ', - instagram: 'Instagram: instagram.com/' - } - [:signature, :whatsapp, :facebook, :twitter, :telegram, :viber, :line, :instagram].each do |field| - value = self.report_design_team_setting_value(field.to_s, language) - footer << "#{prefixes[field]}#{value}" unless value.blank? + def report_design_to_tipline_search_result + if self.annotation_type == 'report_design' + TiplineSearchResult.new( + type: :fact_check, + team: self.annotated.team, + title: self.report_design_field_value('title'), + body: self.report_design_field_value('text'), + image_url: self.report_design_image_url, + language: self.report_design_field_value('language'), + url: self.report_design_field_value('published_article_url'), + format: (!self.report_design_field_value('use_text_message') && self.report_design_field_value('use_visual_card')) ? :image : :text + ) end - footer.join("\n") end def report_design_text(language = nil, hide_body = false) if self.annotation_type == 'report_design' - team = self.annotated.team - text = [] - title = self.report_design_field_value('title') - text << "*#{title.strip}*" unless title.blank? - text << self.report_design_field_value('text').to_s unless hide_body - url = self.report_design_field_value('published_article_url') - text << url unless url.blank? - text = text.collect do |part| - team.get_shorten_outgoing_urls ? UrlRewriter.shorten_and_utmize_urls(part, team.get_outgoing_urls_utm_code) : part - end - unless language.nil? - footer = self.report_design_text_footer(language) - text << footer if !footer.blank? && self.report_design_team_setting_value('use_signature', language) - end - text.join("\n\n") + self.report_design_to_tipline_search_result.text(language, hide_body) end end @@ -214,10 +228,6 @@ def sent_count end def should_send_report_in_this_language?(language) - team = self.annotated.team - return true if team.get_languages.to_a.size < 2 - tbi = TeamBotInstallation.where(team_id: team.id, user: BotUser.alegre_user).last - should_send_report_in_different_language = !tbi&.alegre_settings&.dig('single_language_fact_checks_enabled') - self.annotation_type == 'report_design' && (self.report_design_field_value('language') == language || should_send_report_in_different_language) + self.annotation_type == 'report_design' && self.report_design_to_tipline_search_result.should_send_in_language?(language) end end diff --git a/config/initializers/session_store.rb b/config/initializers/session_store.rb index f8f9ad672c..533ebe8e78 100644 --- a/config/initializers/session_store.rb +++ b/config/initializers/session_store.rb @@ -1,3 +1,8 @@ # Be sure to restart your server when you modify this file. -Rails.application.config.session_store :cookie_store, key: '_checkdesk_session' +# Retrieve the session key and domain based on the environment using CheckConfig. +cookie_key = CheckConfig.get('session_store_key', '_checkdesk_session') +domain_setting = CheckConfig.get('session_store_domain', Rails.env.production? ? 'checkmedia.org' : 'localhost') + +# Configure the session store with the dynamically obtained session key and domain. +Rails.application.config.session_store :cookie_store, key: cookie_key, domain: domain_setting diff --git a/config/locales/en.yml b/config/locales/en.yml index 364f6241f5..aa441378fc 100644 --- a/config/locales/en.yml +++ b/config/locales/en.yml @@ -822,6 +822,7 @@ en: send_every_must_be_a_list_of_days_of_the_week: must be a list of days of the week. send_on_must_be_in_the_future: can't be in the past. cant_delete_default_folder: The default folder can't be deleted + explainer_and_item_must_be_from_the_same_team: Explainer and item must be from the same workspace. shared_feed_imported_media_already_exist: |- No media eligible to be imported into your workspace. The media selected to import already exist in your workspace in the following items: diff --git a/db/migrate/20240527011635_add_tags_to_explainers.rb b/db/migrate/20240527011635_add_tags_to_explainers.rb new file mode 100644 index 0000000000..c6e2d33e5b --- /dev/null +++ b/db/migrate/20240527011635_add_tags_to_explainers.rb @@ -0,0 +1,6 @@ +class AddTagsToExplainers < ActiveRecord::Migration[6.1] + def change + add_column :explainers, :tags, :string, array: true, default: [] + add_index :explainers, :tags, using: 'gin' + end +end diff --git a/db/migrate/20240528170336_add_tags_to_fact_checks.rb b/db/migrate/20240528170336_add_tags_to_fact_checks.rb new file mode 100644 index 0000000000..dbb9648a96 --- /dev/null +++ b/db/migrate/20240528170336_add_tags_to_fact_checks.rb @@ -0,0 +1,6 @@ +class AddTagsToFactChecks < ActiveRecord::Migration[6.1] + def change + add_column :fact_checks, :tags, :string, array: true, default: [] + add_index :fact_checks, :tags, using: 'gin' + end +end diff --git a/db/migrate/20240604045337_add_fields_to_fact_check.rb b/db/migrate/20240604045337_add_fields_to_fact_check.rb new file mode 100644 index 0000000000..fdca5e908f --- /dev/null +++ b/db/migrate/20240604045337_add_fields_to_fact_check.rb @@ -0,0 +1,10 @@ +class AddFieldsToFactCheck < ActiveRecord::Migration[6.1] + def change + add_column :fact_checks, :publisher_id, :integer, null: true + add_column :fact_checks, :report_status, :integer, null: true, default: 0 + add_column :fact_checks, :rating, :string, null: true + add_index :fact_checks, :publisher_id + add_index :fact_checks, :report_status + add_index :fact_checks, :rating + end +end diff --git a/db/migrate/20240613005052_create_explainer_items.rb b/db/migrate/20240613005052_create_explainer_items.rb new file mode 100644 index 0000000000..d5d658a3ff --- /dev/null +++ b/db/migrate/20240613005052_create_explainer_items.rb @@ -0,0 +1,10 @@ +class CreateExplainerItems < ActiveRecord::Migration[6.1] + def change + create_table :explainer_items do |t| + t.references :explainer, foreign_key: true + t.references :project_media, foreign_key: true + + t.timestamps + end + end +end diff --git a/db/migrate/20240619131714_add_team_id_to_claim_descriptions.rb b/db/migrate/20240619131714_add_team_id_to_claim_descriptions.rb new file mode 100644 index 0000000000..59221fbbc8 --- /dev/null +++ b/db/migrate/20240619131714_add_team_id_to_claim_descriptions.rb @@ -0,0 +1,6 @@ +class AddTeamIdToClaimDescriptions < ActiveRecord::Migration[6.1] + def change + add_reference :claim_descriptions, :team, index: true + change_column_null :claim_descriptions, :project_media_id, true + end +end diff --git a/db/migrate/20240713012502_add_imported_to_fact_checks.rb b/db/migrate/20240713012502_add_imported_to_fact_checks.rb new file mode 100644 index 0000000000..83474c2d6c --- /dev/null +++ b/db/migrate/20240713012502_add_imported_to_fact_checks.rb @@ -0,0 +1,6 @@ +class AddImportedToFactChecks < ActiveRecord::Migration[6.1] + def change + add_column :fact_checks, :imported, :boolean, default: false + add_index :fact_checks, :imported + end +end diff --git a/db/migrate/20240714051039_add_unique_index_to_explainer_items.rb b/db/migrate/20240714051039_add_unique_index_to_explainer_items.rb new file mode 100644 index 0000000000..2778977a6a --- /dev/null +++ b/db/migrate/20240714051039_add_unique_index_to_explainer_items.rb @@ -0,0 +1,5 @@ +class AddUniqueIndexToExplainerItems < ActiveRecord::Migration[6.1] + def change + add_index :explainer_items, [:explainer_id, :project_media_id], unique: true + end +end diff --git a/db/migrate/20240719183518_add_null_constraints_to_cluster.rb b/db/migrate/20240719183518_add_null_constraints_to_cluster.rb new file mode 100644 index 0000000000..6f52f646dd --- /dev/null +++ b/db/migrate/20240719183518_add_null_constraints_to_cluster.rb @@ -0,0 +1,5 @@ +class AddNullConstraintsToCluster < ActiveRecord::Migration[6.1] + def change + change_column_null(:clusters, :project_media_id, false) + end +end diff --git a/db/migrate/20240813155311_allow_empty_project_media_id_for_clusters.rb b/db/migrate/20240813155311_allow_empty_project_media_id_for_clusters.rb new file mode 100644 index 0000000000..d6db5939ca --- /dev/null +++ b/db/migrate/20240813155311_allow_empty_project_media_id_for_clusters.rb @@ -0,0 +1,5 @@ +class AllowEmptyProjectMediaIdForClusters < ActiveRecord::Migration[6.1] + def change + change_column_null(:clusters, :project_media_id, true) + end +end diff --git a/db/schema.rb b/db/schema.rb index 3620cd4a75..6cd154b4ef 100644 --- a/db/schema.rb +++ b/db/schema.rb @@ -10,7 +10,7 @@ # # It's strongly recommended that you check this file into your version control system. -ActiveRecord::Schema.define(version: 2024_04_20_104318) do +ActiveRecord::Schema.define(version: 2024_08_13_155311) do # These are extensions that must be enabled in order to support this database enable_extension "plpgsql" @@ -216,11 +216,13 @@ create_table "claim_descriptions", force: :cascade do |t| t.text "description" t.bigint "user_id", null: false - t.bigint "project_media_id", null: false + t.bigint "project_media_id" t.text "context" t.datetime "created_at", null: false t.datetime "updated_at", null: false + t.bigint "team_id" t.index ["project_media_id"], name: "index_claim_descriptions_on_project_media_id", unique: true + t.index ["team_id"], name: "index_claim_descriptions_on_team_id" t.index ["user_id"], name: "index_claim_descriptions_on_user_id" end @@ -289,7 +291,7 @@ t.jsonb "value_json", default: "{}" t.datetime "created_at", null: false t.datetime "updated_at", null: false - t.index "dynamic_annotation_fields_value(field_name, value)", name: "dynamic_annotation_fields_value", where: "((field_name)::text = ANY (ARRAY[('external_id'::character varying)::text, ('smooch_user_id'::character varying)::text, ('verification_status_status'::character varying)::text]))" + t.index "dynamic_annotation_fields_value(field_name, value)", name: "dynamic_annotation_fields_value", where: "((field_name)::text = ANY ((ARRAY['external_id'::character varying, 'smooch_user_id'::character varying, 'verification_status_status'::character varying])::text[]))" t.index ["annotation_id", "field_name"], name: "index_dynamic_annotation_fields_on_annotation_id_and_field_name" t.index ["annotation_id"], name: "index_dynamic_annotation_fields_on_annotation_id" t.index ["annotation_type"], name: "index_dynamic_annotation_fields_on_annotation_type" @@ -302,6 +304,16 @@ t.index ["value_json"], name: "index_dynamic_annotation_fields_on_value_json", using: :gin end + create_table "explainer_items", force: :cascade do |t| + t.bigint "explainer_id" + t.bigint "project_media_id" + t.datetime "created_at", precision: 6, null: false + t.datetime "updated_at", precision: 6, null: false + t.index ["explainer_id", "project_media_id"], name: "index_explainer_items_on_explainer_id_and_project_media_id", unique: true + t.index ["explainer_id"], name: "index_explainer_items_on_explainer_id" + t.index ["project_media_id"], name: "index_explainer_items_on_project_media_id" + end + create_table "explainers", force: :cascade do |t| t.string "title" t.text "description" @@ -311,6 +323,8 @@ t.bigint "team_id", null: false t.datetime "created_at", precision: 6, null: false t.datetime "updated_at", precision: 6, null: false + t.string "tags", default: [], array: true + t.index ["tags"], name: "index_explainers_on_tags", using: :gin t.index ["team_id"], name: "index_explainers_on_team_id" t.index ["user_id"], name: "index_explainers_on_user_id" end @@ -325,9 +339,19 @@ t.datetime "created_at", null: false t.datetime "updated_at", null: false t.string "signature" + t.string "tags", default: [], array: true + t.integer "publisher_id" + t.integer "report_status", default: 0 + t.string "rating" + t.boolean "imported", default: false t.index ["claim_description_id"], name: "index_fact_checks_on_claim_description_id", unique: true + t.index ["imported"], name: "index_fact_checks_on_imported" t.index ["language"], name: "index_fact_checks_on_language" + t.index ["publisher_id"], name: "index_fact_checks_on_publisher_id" + t.index ["rating"], name: "index_fact_checks_on_rating" + t.index ["report_status"], name: "index_fact_checks_on_report_status" t.index ["signature"], name: "index_fact_checks_on_signature", unique: true + t.index ["tags"], name: "index_fact_checks_on_tags", using: :gin t.index ["user_id"], name: "index_fact_checks_on_user_id" end @@ -486,7 +510,7 @@ t.index ["user_id"], name: "index_project_media_users_on_user_id" end - create_table "project_medias", force: :cascade do |t| + create_table "project_medias", id: :serial, force: :cascade do |t| t.integer "project_id" t.integer "media_id" t.integer "user_id" @@ -881,7 +905,8 @@ end create_table "versions", id: :serial, force: :cascade do |t| - t.string "item_type", null: false + t.string "item_type" + t.string "{:null=>false}" t.string "item_id", null: false t.string "event", null: false t.string "whodunnit" @@ -902,6 +927,8 @@ add_foreign_key "claim_descriptions", "project_medias" add_foreign_key "claim_descriptions", "users" + add_foreign_key "explainer_items", "explainers" + add_foreign_key "explainer_items", "project_medias" add_foreign_key "explainers", "teams" add_foreign_key "explainers", "users" add_foreign_key "fact_checks", "claim_descriptions" diff --git a/db/seeds.rb b/db/seeds.rb index a86380abf5..8f671dfc08 100644 --- a/db/seeds.rb +++ b/db/seeds.rb @@ -45,6 +45,13 @@ def open_file(file) BLANK_PARAMS = Array.new(8, { type: 'Blank' }) +STANDALONE_CLAIMS_FACT_CHECKS_PARAMS = (Array.new(8) do + { + description: Faker::Lorem.sentence, + context: Faker::Lorem.paragraph(sentence_count: 8) + } +end) + class Setup private @@ -323,7 +330,7 @@ def populate_projects def publish_fact_checks users.each_value do |user| - fact_checks = FactCheck.where(user: user).last(items_total/2) + fact_checks = user.claim_descriptions.where.not(project_media_id: nil).includes(:fact_check).map { |claim| claim.fact_check }.compact!.last(items_total/2) fact_checks[0, (fact_checks.size/2)].each { |fact_check| verify_fact_check_and_publish_report(fact_check.project_media)} end end @@ -419,6 +426,13 @@ def tipline_requests end end + def verified_standalone_claims_and_fact_checks + users.each_value do |user| + standalone_claims_and_fact_checks(user) + verify_standalone_claims_and_fact_checks(user) + end + end + private def medias_params @@ -707,6 +721,30 @@ def imported_fact_check_params(media_type) def channel(media_type) media_type == "Blank" ? { main: CheckChannels::ChannelCodes::FETCH } : { main: CheckChannels::ChannelCodes::MANUAL } end + + def standalone_claims_and_fact_checks(user) + STANDALONE_CLAIMS_FACT_CHECKS_PARAMS.each.with_index do |params, index| + claim_description_attributes = { + description: params[:description], + context: params[:context], + user: user, + team: user.teams[0], + fact_check_attributes: fact_check_params_for_half_the_claims(index, user), + } + + ClaimDescription.create!(claim_description_attributes) + end + end + + def verify_standalone_claims_and_fact_checks(user) + status = ['undetermined', 'not_applicable', 'in_progress', 'verified', 'false'] + + fact_checks = user.claim_descriptions.where(project_media_id: nil).includes(:fact_check).map { |claim| claim.fact_check }.compact! # some claims don't have fact checks, so they return nil + fact_checks.each do |fact_check| + fact_check.rating = status.sample + fact_check.save! + end + end end puts "If you want to create a new user: press enter" @@ -747,10 +785,12 @@ def channel(media_type) populated_workspaces.tipline_requests puts 'Publishing half of each user\'s Fact Checks...' populated_workspaces.publish_fact_checks - puts 'Creating Clusters' + puts 'Creating Clusters...' populated_workspaces.clusters(feed_2) - puts 'Creating Explainers' + puts 'Creating Explainers...' populated_workspaces.explainers + puts 'Creating Standalone Claims and FactChecks with different statuses...' + populated_workspaces.verified_standalone_claims_and_fact_checks rescue RuntimeError => e if e.message.include?('We could not parse this link') puts "—————" diff --git a/lib/check_search.rb b/lib/check_search.rb index d10b2c6021..cbcd41cea6 100644 --- a/lib/check_search.rb +++ b/lib/check_search.rb @@ -46,7 +46,7 @@ def initialize(options, file = nil, team_id = Team.current&.id) @file = file end - MEDIA_TYPES = %w[claims links twitter youtube tiktok instagram facebook telegram weblink images videos audios blank] + MEDIA_TYPES = %w[claims links twitter youtube tiktok instagram facebook telegram weblink images videos audios] SORT_MAPPING = { 'recent_activity' => 'updated_at', 'recent_added' => 'created_at', 'demand' => 'demand', 'related' => 'linked_items_count', 'last_seen' => 'last_seen', 'share_count' => 'share_count', @@ -147,13 +147,16 @@ def number_of_items(collection) collection.limit(nil).reorder(nil).offset(nil).count end + def query_all_types? + MEDIA_TYPES.size == media_types_filter.size + end + def should_hit_elasticsearch? return true if feed_query? status_blank = true status_search_fields.each do |field| status_blank = false unless @options[field].blank? end - query_all_types = (MEDIA_TYPES.size == media_types_filter.size) filters_blank = true ['tags', 'keyword', 'rules', 'language', 'fc_language', 'request_language', 'report_language', 'team_tasks', 'assigned_to', 'report_status', 'range_numeric', 'has_claim', 'cluster_teams', 'published_by', 'annotated_by', 'channels', 'cluster_published_reports' @@ -161,7 +164,7 @@ def should_hit_elasticsearch? filters_blank = false unless @options[filter].blank? end range_filter = hit_es_for_range_filter - !(query_all_types && status_blank && filters_blank && !range_filter && ['recent_activity', 'recent_added', 'last_seen'].include?(@options['sort'])) + !(query_all_types? && status_blank && filters_blank && !range_filter && ['recent_activity', 'recent_added', 'last_seen'].include?(@options['sort'])) end def media_types_filter @@ -190,7 +193,7 @@ def item_navigation_offset query = { bool: { must: conditions, must_not: must_not } } $repository.count(query: query) else - condition = sort_type == :asc ? "#{sort_key} < ?" : "#{sort_key} > ?" + condition = sort_type == :asc ? "project_medias.#{sort_key} < ?" : "project_medias.#{sort_key} > ?" get_pg_results_for_media.where(condition, pm.send(sort_key)).count end end @@ -249,6 +252,7 @@ def get_pg_results_for_media core_conditions.merge!({ 'project_medias.id' => ids }) end relation = relation.distinct('project_medias.id').includes(:media).includes(:project).where(core_conditions) + relation = relation.joins(:media).where('medias.type != ?', 'Blank') if query_all_types? relation end diff --git a/lib/relay.idl b/lib/relay.idl index 25e0255a5f..4c7445b1ae 100644 --- a/lib/relay.idl +++ b/lib/relay.idl @@ -516,7 +516,7 @@ type ApiKeyEdge { """ A union type of all article types we can handle """ -union ArticleUnion = Explainer +union ArticleUnion = Explainer | FactCheck """ The connection type for ArticleUnion. @@ -728,6 +728,7 @@ type ClaimDescription implements Node { id: ID! permissions: String project_media: ProjectMedia + project_media_was: ProjectMedia updated_at: String user: User } @@ -1179,7 +1180,7 @@ input CreateClaimDescriptionInput { clientMutationId: String context: String description: String - project_media_id: Int! + project_media_id: Int } """ @@ -2535,10 +2536,37 @@ input CreateExplainerInput { clientMutationId: String description: String language: String - title: String! + tags: [String] + title: String url: String } +""" +Autogenerated input type of CreateExplainerItem +""" +input CreateExplainerItemInput { + """ + A unique identifier for the client performing the mutation. + """ + clientMutationId: String + explainerId: Int! + projectMediaId: Int! +} + +""" +Autogenerated return type of CreateExplainerItem +""" +type CreateExplainerItemPayload { + """ + A unique identifier for the client performing the mutation. + """ + clientMutationId: String + explainer: Explainer + explainer_item: ExplainerItem + explainer_itemEdge: ExplainerItemEdge + project_media: ProjectMedia +} + """ Autogenerated return type of CreateExplainer """ @@ -2563,7 +2591,9 @@ input CreateFactCheckInput { """ clientMutationId: String language: String + rating: String summary: String! + tags: [String] title: String! url: String } @@ -2580,6 +2610,7 @@ type CreateFactCheckPayload { clientMutationId: String fact_check: FactCheck fact_checkEdge: FactCheckEdge + team: Team } """ @@ -4125,6 +4156,30 @@ input DestroyExplainerInput { id: ID } +""" +Autogenerated input type of DestroyExplainerItem +""" +input DestroyExplainerItemInput { + """ + A unique identifier for the client performing the mutation. + """ + clientMutationId: String + id: ID +} + +""" +Autogenerated return type of DestroyExplainerItem +""" +type DestroyExplainerItemPayload { + """ + A unique identifier for the client performing the mutation. + """ + clientMutationId: String + deletedId: ID + explainer: Explainer + project_media: ProjectMedia +} + """ Autogenerated return type of DestroyExplainer """ @@ -4159,6 +4214,7 @@ type DestroyFactCheckPayload { """ clientMutationId: String deletedId: ID + team: Team } """ @@ -8205,27 +8261,7 @@ type Explainer implements Node { id: ID! language: String permissions: String - tags( - """ - Returns the elements in the list that come after the specified cursor. - """ - after: String - - """ - Returns the elements in the list that come before the specified cursor. - """ - before: String - - """ - Returns the first _n_ elements from the list. - """ - first: Int - - """ - Returns the last _n_ elements from the list. - """ - last: Int - ): TagConnection + tags: [String] team: PublicTeam team_id: Int title: String @@ -8235,6 +8271,27 @@ type Explainer implements Node { user_id: Int } +""" +The connection type for Explainer. +""" +type ExplainerConnection { + """ + A list of edges. + """ + edges: [ExplainerEdge] + + """ + A list of nodes. + """ + nodes: [Explainer] + + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + totalCount: Int +} + """ An edge in a connection. """ @@ -8250,6 +8307,56 @@ type ExplainerEdge { node: Explainer } +""" +Explainer item type +""" +type ExplainerItem implements Node { + created_at: String + explainer: Explainer! + explainer_id: Int! + id: ID! + permissions: String + project_media: ProjectMedia! + project_media_id: Int! + updated_at: String +} + +""" +The connection type for ExplainerItem. +""" +type ExplainerItemConnection { + """ + A list of edges. + """ + edges: [ExplainerItemEdge] + + """ + A list of nodes. + """ + nodes: [ExplainerItem] + + """ + Information to aid in pagination. + """ + pageInfo: PageInfo! + totalCount: Int +} + +""" +An edge in a connection. +""" +type ExplainerItemEdge { + """ + A cursor for use in pagination. + """ + cursor: String! + + """ + The item at the end of the edge. + """ + node: ExplainerItem +} + """ Autogenerated input type of ExtractText """ @@ -8280,9 +8387,13 @@ type FactCheck implements Node { created_at: String dbid: Int id: ID! + imported: Boolean language: String permissions: String + rating: String + report_status: String summary: String + tags: [String] title: String updated_at: String url: String @@ -8788,6 +8899,28 @@ Me type """ type Me implements Node { accepted_terms: Boolean + accessible_teams( + """ + Returns the elements in the list that come after the specified cursor. + """ + after: String + + """ + Returns the elements in the list that come before the specified cursor. + """ + before: String + + """ + Returns the first _n_ elements from the list. + """ + first: Int + + """ + Returns the last _n_ elements from the list. + """ + last: Int + ): TeamConnection + accessible_teams_count: Int annotations( """ Returns the elements in the list that come after the specified cursor. @@ -8906,6 +9039,7 @@ type Me implements Node { last: Int status: String ): TeamUserConnection + team_users_count(status: String): Int teams( """ Returns the elements in the list that come after the specified cursor. @@ -9315,6 +9449,12 @@ type MutationType { """ input: CreateExplainerInput! ): CreateExplainerPayload + createExplainerItem( + """ + Parameters for CreateExplainerItem + """ + input: CreateExplainerItemInput! + ): CreateExplainerItemPayload createFactCheck( """ Parameters for CreateFactCheck @@ -9669,6 +9809,12 @@ type MutationType { """ input: DestroyExplainerInput! ): DestroyExplainerPayload + destroyExplainerItem( + """ + Parameters for DestroyExplainerItem + """ + input: DestroyExplainerItemInput! + ): DestroyExplainerItemPayload destroyFactCheck( """ Parameters for DestroyFactCheck @@ -10495,6 +10641,7 @@ type ProjectMedia implements Node { ): AnnotationUnionConnection annotations_count(annotation_type: String!): Int archived: Int + articles_count: Int assignments( """ Returns the elements in the list that come after the specified cursor. @@ -11365,6 +11512,50 @@ type ProjectMedia implements Node { """ last: Int ): DynamicConnection + explainer_items( + """ + Returns the elements in the list that come after the specified cursor. + """ + after: String + + """ + Returns the elements in the list that come before the specified cursor. + """ + before: String + + """ + Returns the first _n_ elements from the list. + """ + first: Int + + """ + Returns the last _n_ elements from the list. + """ + last: Int + ): ExplainerItemConnection + explainers( + """ + Returns the elements in the list that come after the specified cursor. + """ + after: String + + """ + Returns the elements in the list that come before the specified cursor. + """ + before: String + + """ + Returns the first _n_ elements from the list. + """ + first: Int + + """ + Returns the last _n_ elements from the list. + """ + last: Int + ): ExplainerConnection + fact_check: FactCheck + fact_check_id: Int fact_check_published_on: Int feed_columns_values: JsonStringType field_value(annotation_type_field_name: String!): String @@ -11406,7 +11597,6 @@ type ProjectMedia implements Node { last_status: String last_status_obj: Dynamic linked_items_count: Int - list_columns_values: JsonStringType log( """ Returns the elements in the list that come after the specified cursor. @@ -11499,6 +11689,7 @@ type ProjectMedia implements Node { source_id: Int status: String suggested_main_item: ProjectMedia + suggested_main_relationship: Relationship suggested_similar_items_count: Int suggested_similar_relationships( """ @@ -11718,6 +11909,16 @@ type Query { bot_user(id: ID!): BotUser dynamic_annotation_field(only_cache: Boolean, query: String!): DynamicAnnotationField + """ + Information about the explainer with given id + """ + explainer(id: ID!): Explainer + + """ + Information about the fact_check with given id + """ + fact_check(id: ID!): FactCheck + """ Information about the feed with given id """ @@ -12922,12 +13123,27 @@ type Team implements Node { Returns the first _n_ elements from the list. """ first: Int + imported: Boolean + language: [String] """ Returns the last _n_ elements from the list. """ last: Int + offset: Int = 0 + publisher_ids: [Int] + rating: [String] + report_status: [String] + sort: String = "title" + sort_type: String = "ASC" + standalone: Boolean + tags: [String] + target_id: Int + text: String + updated_at: String + user_ids: [Int] ): ArticleUnionConnection + articles_count(article_type: String, imported: Boolean, language: [String], publisher_ids: [Int], rating: [String], report_status: [String], standalone: Boolean, tags: [String], target_id: Int, text: String, updated_at: String, user_ids: [Int]): Int available_newsletter_header_types: JsonStringType avatar: String check_search_spam: CheckSearch @@ -13022,7 +13238,6 @@ type Team implements Node { """ last: Int ): TeamUserConnection - list_columns: JsonStringType medias_count: Int members_count: Int name: String! @@ -13842,6 +14057,7 @@ input UpdateClaimDescriptionInput { context: String description: String id: ID + project_media_id: Int } """ @@ -15344,7 +15560,8 @@ input UpdateExplainerInput { description: String id: ID language: String - title: String! + tags: [String] + title: String url: String } @@ -15371,7 +15588,9 @@ input UpdateFactCheckInput { clientMutationId: String id: ID language: String + rating: String summary: String + tags: [String] title: String url: String } @@ -15388,6 +15607,7 @@ type UpdateFactCheckPayload { clientMutationId: String fact_check: FactCheck fact_checkEdge: FactCheckEdge + team: Team } """ @@ -15846,7 +16066,6 @@ input UpdateTeamInput { language: String language_detection: Boolean languages: JsonStringType - list_columns: JsonStringType media_verification_statuses: JsonStringType name: String outgoing_urls_utm_code: String diff --git a/lib/sample_data.rb b/lib/sample_data.rb index b1708e3e8b..37c5eca8f3 100644 --- a/lib/sample_data.rb +++ b/lib/sample_data.rb @@ -874,6 +874,7 @@ def create_tipline_request(options = {}) end def create_cluster(options = {}) + options[:project_media] = create_project_media if options[:project_media].blank? team = options[:project_media]&.team || create_team options[:feed] = options[:feed] || create_feed({ team: team }) c = Cluster.new diff --git a/lib/tasks/check_khousheh.rake b/lib/tasks/check_khousheh.rake index f56e824f19..098d9f3c06 100644 --- a/lib/tasks/check_khousheh.rake +++ b/lib/tasks/check_khousheh.rake @@ -192,6 +192,8 @@ namespace :check do Cluster.transaction do # Create clusters mapping = {} # Media ID => Cluster ID + # Cluster to delete in case there is no center (project_media_id) + cluster_to_delete = [] # Bulk-insert clusters c_inserted_items = [] clusters.length.times.each_slice(2500) do |rows| @@ -277,7 +279,12 @@ namespace :check do cluster_title = cluster_center == pm.id ? pm.title : cluster.title updated_cluster_attributes[:title] = cluster_title # Update cluster - cluster_items[cluster.id] = updated_cluster_attributes + if updated_cluster_attributes[:project_media_id].blank? + cluster_to_delete << cluster.id + error_logs << {Cluster: "Failed to update Cluster with id #{cluster.id}"} + else + cluster_items[cluster.id] = updated_cluster_attributes + end end end # Bulk-update Cluster @@ -299,6 +306,8 @@ namespace :check do end search_after = [pm_ids.max] end + # Delete cluster with no project_media_id + Cluster.where(id: cluster_to_delete).delete_all Team.current = nil end puts "\nRebuilding clusters for feed #{feed.name} took #{Time.now.to_f - started_at} seconds." diff --git a/lib/tasks/data/statistics.rake b/lib/tasks/data/statistics.rake index d01967c16b..a1c5c1a3f7 100644 --- a/lib/tasks/data/statistics.rake +++ b/lib/tasks/data/statistics.rake @@ -28,11 +28,7 @@ namespace :check do team = Team.find(team_id) languages = team.get_languages.to_a - if bot.user == BotUser.smooch_user - platforms = bot.smooch_enabled_integrations.keys - else - platforms = Bot::Smooch::SUPPORTED_INTEGRATION_NAMES.keys - end + platforms = bot.user == BotUser.smooch_user ? bot.smooch_enabled_integrations.keys : Bot::Smooch::SUPPORTED_INTEGRATION_NAMES.keys team_stats = Hash.new(0) puts "[#{Time.now}] Generating month tipline statistics for team with ID #{team_id}. (#{index + 1} / #{team_ids.length})" @@ -101,40 +97,39 @@ namespace :check do raise Check::Statistics::IncompleteRunError.new("Failed to calculate #{errors.length} monthly team statistics") if errors.any? end - # bundle exec rake check:data:regenerate_statistics[unique_newsletters_sent] - desc 'Regenerate specified historic statistic for all workspaces' - task :regenerate_statistics, [:stats_to_generate] => [:environment] do |_t, args| + # bundle exec rake check:data:regenerate_statistics[start_date] + desc 'Regenerate all historic statistics for all workspaces from a given start date' + task :regenerate_statistics, [:start_date] => [:environment] do |_t, args| old_logger = ActiveRecord::Base.logger ActiveRecord::Base.logger = nil - puts "[#{Time.now}] Attempting to regenerate keys: #{args.stats_to_generate}" + start_date = DateTime.parse(args.start_date) rescue nil + if start_date.nil? + $stderr.puts "Invalid or missing start_date argument" + raise Check::Statistics::ArgumentError.new("Invalid or missing start_date argument") + end + + puts "[#{Time.now}] Starting to regenerate all statistics from #{start_date}" begin - # Give user help if they want it supported_stats = %w( unique_newsletters_sent ) - # Make sure we have at least one valid argument - requested_stats = (args.stats_to_generate || '').split(',').map(&:strip) - valid_requested_stats = requested_stats.intersection(supported_stats) - unless valid_requested_stats.length > 0 - raise Check::Statistics::ArgumentError.new("Argument '#{args.stats_to_generate}' is invalid. We currently support the following values passed a comma-separated list: #{supported_stats.join(',')}.") - end - - puts "[#{Time.now}] Regenerating stats for the following keys: #{valid_requested_stats}. Total to update: #{MonthlyTeamStatistic.count}" + puts "[#{Time.now}] Regenerating stats for the following keys: #{supported_stats}. Total to update: #{MonthlyTeamStatistic.where('start_date >= ?', start_date).count}" - # Update all of the stats total_successful = Hash.new(0) - MonthlyTeamStatistic.find_each do |monthly_stats| + MonthlyTeamStatistic.where('start_date >= ?', start_date).find_each do |monthly_stats| team_id = monthly_stats.team_id start_date = monthly_stats.start_date end_date = monthly_stats.end_date language = monthly_stats.language begin - if valid_requested_stats.include?('unique_newsletters_sent') - monthly_stats.update!(unique_newsletters_sent: CheckStatistics.number_of_newsletters_sent(team_id, start_date, end_date, language)) - total_successful[:unique_newsletters_sent] += 1 + supported_stats.each do |stat| + method_name = :number_of_newsletters_sent + result = CheckStatistics.send(method_name, team_id, start_date, end_date, language) + monthly_stats.update!(stat => result) + total_successful[stat.to_sym] += 1 end rescue StandardError => e $stderr.puts "[#{Time.now}] Failed to update MonthlyTeamStatistic with ID #{monthly_stats.id}. Error: #{e}" @@ -144,7 +139,6 @@ namespace :check do puts "[#{Time.now}] Finished updating MonthlyTeamStatistics. Total updated: #{total_successful}" rescue StandardError => e $stderr.puts e - next ensure ActiveRecord::Base.logger = old_logger end diff --git a/lib/tasks/migrate/20220703070839_add_language_to_fact_check.rake b/lib/tasks/migrate/20220703070839_add_language_to_fact_check.rake deleted file mode 100644 index bd6971e493..0000000000 --- a/lib/tasks/migrate/20220703070839_add_language_to_fact_check.rake +++ /dev/null @@ -1,26 +0,0 @@ -namespace :check do - namespace :migrate do - task add_language_to_fact_check: :environment do - started = Time.now.to_i - # Get latest team id - last_team_id = Rails.cache.read('check:migrate:add_language_to_fact_check:team_id') || 0 - Team.where('id > ?', last_team_id).find_each do |team| - puts "Processing team [#{team.slug}]" - language = team.default_language || 'en' - team.project_medias.select('fc.*') - .joins("INNER JOIN claim_descriptions cd ON project_medias.id = cd.project_media_id") - .joins("INNER JOIN fact_checks fc ON cd.id = fc.claim_description_id") - .find_in_batches(:batch_size => 2500) do |items| - ids = [] - items.each{ |i| ids << i['id'] } - puts "ids are :: #{ids.inspect}" - FactCheck.where(id: ids).update_all(language: language) - end - # log last team id - Rails.cache.write('check:migrate:add_language_to_fact_check:team_id', team.id) - end - minutes = ((Time.now.to_i - started) / 60).to_i - puts "[#{Time.now}] Done in #{minutes} minutes." - end - end -end \ No newline at end of file diff --git a/lib/tasks/migrate/20240703070839_add_language_to_fact_check.rake b/lib/tasks/migrate/20240703070839_add_language_to_fact_check.rake new file mode 100644 index 0000000000..5eedd5f9da --- /dev/null +++ b/lib/tasks/migrate/20240703070839_add_language_to_fact_check.rake @@ -0,0 +1,68 @@ +namespace :check do + namespace :migrate do + task add_language_to_fact_check: :environment do + started = Time.now.to_i + # Get latest team id + last_team_id = Rails.cache.read('check:migrate:add_language_to_fact_check:team_id') || 0 + Team.where('id > ?', last_team_id).find_each do |team| + puts "Processing team [#{team.slug}]" + language = team.default_language || 'en' + team.project_medias.select('fc.*') + .joins("INNER JOIN claim_descriptions cd ON project_medias.id = cd.project_media_id") + .joins("INNER JOIN fact_checks fc ON cd.id = fc.claim_description_id") + .find_in_batches(:batch_size => 2500) do |items| + ids = [] + items.each{ |i| ids << i['id'] } + puts "ids are :: #{ids.inspect}" + FactCheck.where(id: ids).update_all(language: language) + end + # log last team id + Rails.cache.write('check:migrate:add_language_to_fact_check:team_id', team.id) + end + minutes = ((Time.now.to_i - started) / 60).to_i + puts "[#{Time.now}] Done in #{minutes} minutes." + end + + task add_report_information_to_fact_check: :environment do + started = Time.now.to_i + # Get latest team id + last_team_id = Rails.cache.read('check:migrate:add_report_information_to_fact_check:team_id') || 0 + Team.where('id > ?', last_team_id).find_each do |team| + puts "Processing team [#{team.slug}]" + team.project_medias.select('project_medias.id as id, fc.id as fc_id') + .joins("INNER JOIN claim_descriptions cd ON project_medias.id = cd.project_media_id") + .joins("INNER JOIN fact_checks fc ON cd.id = fc.claim_description_id") + .find_in_batches(:batch_size => 2500) do |items| + print '.' + pm_fc = {} + items.each{ |i| pm_fc[i['id']] = i['fc_id'] } + fc_fields = {} + # Add rating (depend on status cached field) + ProjectMedia.where(id: pm_fc.keys).find_each do |pm| + print '.' + tags = pm.tags_as_sentence.split(',') + fc_fields[pm_fc[pm.id]] = { rating: pm.status, tags: tags } + end + # Collect report designer + Dynamic.where(annotation_type: 'report_design', annotated_type: 'ProjectMedia', annotated_id: pm_fc.keys).find_each do |rd| + print '.' + # Get report status and publisher id + state = rd.data['state'] + publisher_id = state == 'published' ? rd.annotator_id : nil + fc_fields[pm_fc[rd.annotated_id]].merge!({ publisher_id: publisher_id, report_status: state }) + end + fc_items = [] + FactCheck.where(id: pm_fc.values).find_each do |fc| + fc_fields[fc.id].each { |field, value| fc.send("#{field}=", value) } + fc_items << fc.attributes + end + FactCheck.upsert_all(fc_items) + end + # log last team id + Rails.cache.write('check:migrate:add_report_information_to_fact_check:team_id', team.id) + end + minutes = ((Time.now.to_i - started) / 60).to_i + puts "[#{Time.now}] Done in #{minutes} minutes." + end + end +end \ No newline at end of file diff --git a/lib/tasks/migrate/20240713012502_set_imported_for_fact_checks.rake b/lib/tasks/migrate/20240713012502_set_imported_for_fact_checks.rake new file mode 100644 index 0000000000..23b85fc7eb --- /dev/null +++ b/lib/tasks/migrate/20240713012502_set_imported_for_fact_checks.rake @@ -0,0 +1,20 @@ +namespace :check do + namespace :migrate do + task set_imported_for_fact_checks: :environment do + puts "[#{Time.now}] Setting imported field for existing fact-checks" + started = Time.now.to_i + BATCH_SIZE = 1000 + query = FactCheck.joins(:user).where('users.type' => 'BotUser').where(imported: false) + count = query.count + total = 0 + while count > 0 + puts "[#{Time.now}] Updating maximum #{BATCH_SIZE} fact-checks, out of #{count}" + query.limit(BATCH_SIZE).update_all(imported: true) + total += (BATCH_SIZE < count ? BATCH_SIZE : count) + count = query.count + end + minutes = ((Time.now.to_i - started) / 60).to_i + puts "[#{Time.now}] Done in #{minutes} minutes. Updated #{total} fact-checks." + end + end +end diff --git a/lib/tasks/migrate/20240715013839_add_team_id_to_claim_description.rake b/lib/tasks/migrate/20240715013839_add_team_id_to_claim_description.rake new file mode 100644 index 0000000000..35fc4517e2 --- /dev/null +++ b/lib/tasks/migrate/20240715013839_add_team_id_to_claim_description.rake @@ -0,0 +1,26 @@ +namespace :check do + namespace :migrate do + task add_team_id_to_claim_description: :environment do |_t, args| + started = Time.now.to_i + slugs = args.extras + condition = {} + if slugs.blank? + last_team_id = Rails.cache.read('check:migrate:add_team_id_to_claim_description:team_id') || 0 + else + last_team_id = 0 + condition = { slug: slugs } + end + Team.where(condition).where('id > ?', last_team_id).find_each do |team| + puts "Processing team [#{team.slug}]" + team.project_medias.joins(:claim_description).find_in_batches(batch_size: 2500) do |pms| + print '.' + ids = pms.map(&:id) + ClaimDescription.where(project_media_id: ids).update_all(team_id: team.id) + end + Rails.cache.write('check:migrate:add_team_id_to_claim_description:team_id', team.id) if slugs.blank? + end + minutes = ((Time.now.to_i - started) / 60).to_i + puts "[#{Time.now}] Done in #{minutes} minutes." + end + end +end \ No newline at end of file diff --git a/lib/tasks/migrate/20240725173311_migrate_claims_without_fact_checks.rake b/lib/tasks/migrate/20240725173311_migrate_claims_without_fact_checks.rake new file mode 100644 index 0000000000..cb3b525f0f --- /dev/null +++ b/lib/tasks/migrate/20240725173311_migrate_claims_without_fact_checks.rake @@ -0,0 +1,34 @@ +namespace :check do + namespace :migrate do + task migrate_claims_without_fact_checks: :environment do + started = Time.now.to_i + last_cd_id = Rails.cache.read('check:migrate:migrate_claims_without_fact_checks:claim_description_id') || 0 + ClaimDescription.where('claim_descriptions.id > ?', last_cd_id) + .joins("LEFT JOIN fact_checks fc ON claim_descriptions.id = fc.claim_description_id") + .where('fc.id IS NULL').find_in_batches(batch_size: 2500) do |cds| + print '.' + fc_items = [] + # Get default language for claim description team + team_ids = cds.map(&:team_id).uniq + team_language = {} + Team.where(id: team_ids).find_each{|t| team_language[t.id] = t.default_language } + cds.each do |cd| + fc_items << { + claim_description_id: cd.id, + user_id: cd.user_id, + summary: '-', + title: '-', + language: team_language[cd.team_id], + created_at: cd.created_at, + updated_at: cd.updated_at + } + end + FactCheck.insert_all(fc_items) + max_id = cds.map(&:id).max + Rails.cache.write('check:migrate:migrate_claims_without_fact_checks:claim_description_id', max_id) + end + minutes = ((Time.now.to_i - started) / 60).to_i + puts "[#{Time.now}] Done in #{minutes} minutes." + end + end +end diff --git a/public/relay.json b/public/relay.json index 458c1b5863..10d08ecdd4 100644 --- a/public/relay.json +++ b/public/relay.json @@ -2394,6 +2394,11 @@ "kind": "OBJECT", "name": "Explainer", "ofType": null + }, + { + "kind": "OBJECT", + "name": "FactCheck", + "ofType": null } ] }, @@ -3691,6 +3696,20 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "project_media_was", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "ProjectMedia", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "updated_at", "description": null, @@ -5880,13 +5899,9 @@ "name": "project_media_id", "description": null, "type": { - "kind": "NON_NULL", - "name": null, - "ofType": { - "kind": "SCALAR", - "name": "Int", - "ofType": null - } + "kind": "SCALAR", + "name": "Int", + "ofType": null }, "defaultValue": null, "isDeprecated": false, @@ -15206,13 +15221,9 @@ "name": "title", "description": null, "type": { - "kind": "NON_NULL", - "name": null, - "ofType": { - "kind": "SCALAR", - "name": "String", - "ofType": null - } + "kind": "SCALAR", + "name": "String", + "ofType": null }, "defaultValue": null, "isDeprecated": false, @@ -15254,6 +15265,77 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "tags", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": "A unique identifier for the client performing the mutation.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "CreateExplainerItemInput", + "description": "Autogenerated input type of CreateExplainerItem", + "fields": null, + "inputFields": [ + { + "name": "explainerId", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "projectMediaId", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "clientMutationId", "description": "A unique identifier for the client performing the mutation.", @@ -15271,6 +15353,89 @@ "enumValues": null, "possibleTypes": null }, + { + "kind": "OBJECT", + "name": "CreateExplainerItemPayload", + "description": "Autogenerated return type of CreateExplainerItem", + "fields": [ + { + "name": "clientMutationId", + "description": "A unique identifier for the client performing the mutation.", + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "explainer", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "Explainer", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "explainer_item", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "ExplainerItem", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "explainer_itemEdge", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "ExplainerItemEdge", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "project_media", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "ProjectMedia", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + + ], + "enumValues": null, + "possibleTypes": null + }, { "kind": "OBJECT", "name": "CreateExplainerPayload", @@ -15370,6 +15535,34 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "tags", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "rating", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "title", "description": null, @@ -15495,6 +15688,20 @@ }, "isDeprecated": false, "deprecationReason": null + }, + { + "name": "team", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "Team", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null } ], "inputFields": null, @@ -23520,10 +23727,114 @@ "enumValues": null, "possibleTypes": null }, + { + "kind": "INPUT_OBJECT", + "name": "DestroyExplainerItemInput", + "description": "Autogenerated input type of DestroyExplainerItem", + "fields": null, + "inputFields": [ + { + "name": "id", + "description": null, + "type": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": "A unique identifier for the client performing the mutation.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, { "kind": "OBJECT", - "name": "DestroyExplainerPayload", - "description": "Autogenerated return type of DestroyExplainer", + "name": "DestroyExplainerItemPayload", + "description": "Autogenerated return type of DestroyExplainerItem", + "fields": [ + { + "name": "clientMutationId", + "description": "A unique identifier for the client performing the mutation.", + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "deletedId", + "description": null, + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "explainer", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "Explainer", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "project_media", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "ProjectMedia", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "DestroyExplainerPayload", + "description": "Autogenerated return type of DestroyExplainer", "fields": [ { "name": "clientMutationId", @@ -23656,6 +23967,20 @@ }, "isDeprecated": false, "deprecationReason": null + }, + { + "name": "team", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "Team", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null } ], "inputFields": null, @@ -44414,59 +44739,16 @@ "name": "tags", "description": null, "args": [ - { - "name": "after", - "description": "Returns the elements in the list that come after the specified cursor.", - "type": { - "kind": "SCALAR", - "name": "String", - "ofType": null - }, - "defaultValue": null, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "before", - "description": "Returns the elements in the list that come before the specified cursor.", - "type": { - "kind": "SCALAR", - "name": "String", - "ofType": null - }, - "defaultValue": null, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "first", - "description": "Returns the first _n_ elements from the list.", - "type": { - "kind": "SCALAR", - "name": "Int", - "ofType": null - }, - "defaultValue": null, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "last", - "description": "Returns the last _n_ elements from the list.", - "type": { - "kind": "SCALAR", - "name": "Int", - "ofType": null - }, - "defaultValue": null, - "isDeprecated": false, - "deprecationReason": null - } + ], "type": { - "kind": "OBJECT", - "name": "TagConnection", - "ofType": null + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } }, "isDeprecated": false, "deprecationReason": null @@ -44583,21 +44865,21 @@ }, { "kind": "OBJECT", - "name": "ExplainerEdge", - "description": "An edge in a connection.", + "name": "ExplainerConnection", + "description": "The connection type for Explainer.", "fields": [ { - "name": "cursor", - "description": "A cursor for use in pagination.", + "name": "edges", + "description": "A list of edges.", "args": [ ], "type": { - "kind": "NON_NULL", + "kind": "LIST", "name": null, "ofType": { - "kind": "SCALAR", - "name": "String", + "kind": "OBJECT", + "name": "ExplainerEdge", "ofType": null } }, @@ -44605,94 +44887,95 @@ "deprecationReason": null }, { - "name": "node", - "description": "The item at the end of the edge.", + "name": "nodes", + "description": "A list of nodes.", "args": [ ], "type": { - "kind": "OBJECT", - "name": "Explainer", - "ofType": null + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Explainer", + "ofType": null + } }, "isDeprecated": false, "deprecationReason": null - } - ], - "inputFields": null, - "interfaces": [ - - ], - "enumValues": null, - "possibleTypes": null - }, - { - "kind": "INPUT_OBJECT", - "name": "ExtractTextInput", - "description": "Autogenerated input type of ExtractText", - "fields": null, - "inputFields": [ + }, { - "name": "id", - "description": null, + "name": "pageInfo", + "description": "Information to aid in pagination.", + "args": [ + + ], "type": { "kind": "NON_NULL", "name": null, "ofType": { - "kind": "SCALAR", - "name": "ID", + "kind": "OBJECT", + "name": "PageInfo", "ofType": null } }, - "defaultValue": null, "isDeprecated": false, "deprecationReason": null }, { - "name": "clientMutationId", - "description": "A unique identifier for the client performing the mutation.", + "name": "totalCount", + "description": null, + "args": [ + + ], "type": { "kind": "SCALAR", - "name": "String", + "name": "Int", "ofType": null }, - "defaultValue": null, "isDeprecated": false, "deprecationReason": null } ], - "interfaces": null, + "inputFields": null, + "interfaces": [ + + ], "enumValues": null, "possibleTypes": null }, { "kind": "OBJECT", - "name": "ExtractTextPayload", - "description": "Autogenerated return type of ExtractText", + "name": "ExplainerEdge", + "description": "An edge in a connection.", "fields": [ { - "name": "clientMutationId", - "description": "A unique identifier for the client performing the mutation.", + "name": "cursor", + "description": "A cursor for use in pagination.", "args": [ ], "type": { - "kind": "SCALAR", - "name": "String", - "ofType": null + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } }, "isDeprecated": false, "deprecationReason": null }, { - "name": "project_media", - "description": null, + "name": "node", + "description": "The item at the end of the edge.", "args": [ ], "type": { "kind": "OBJECT", - "name": "ProjectMedia", + "name": "Explainer", "ofType": null }, "isDeprecated": false, @@ -44708,47 +44991,55 @@ }, { "kind": "OBJECT", - "name": "FactCheck", - "description": "FactCheck type", + "name": "ExplainerItem", + "description": "Explainer item type", "fields": [ { - "name": "claim_description", + "name": "created_at", "description": null, "args": [ ], "type": { - "kind": "OBJECT", - "name": "ClaimDescription", + "kind": "SCALAR", + "name": "String", "ofType": null }, "isDeprecated": false, "deprecationReason": null }, { - "name": "created_at", + "name": "explainer", "description": null, "args": [ ], "type": { - "kind": "SCALAR", - "name": "String", - "ofType": null + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "Explainer", + "ofType": null + } }, "isDeprecated": false, "deprecationReason": null }, { - "name": "dbid", + "name": "explainer_id", "description": null, "args": [ ], "type": { - "kind": "SCALAR", - "name": "Int", - "ofType": null + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } }, "isDeprecated": false, "deprecationReason": null @@ -44771,6 +45062,367 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "permissions", + "description": null, + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "project_media", + "description": null, + "args": [ + + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ProjectMedia", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "project_media_id", + "description": null, + "args": [ + + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updated_at", + "description": null, + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + { + "kind": "INTERFACE", + "name": "Node", + "ofType": null + } + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "ExplainerItemConnection", + "description": "The connection type for ExplainerItem.", + "fields": [ + { + "name": "edges", + "description": "A list of edges.", + "args": [ + + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ExplainerItemEdge", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "nodes", + "description": "A list of nodes.", + "args": [ + + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "ExplainerItem", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "pageInfo", + "description": "Information to aid in pagination.", + "args": [ + + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "OBJECT", + "name": "PageInfo", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "totalCount", + "description": null, + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "ExplainerItemEdge", + "description": "An edge in a connection.", + "fields": [ + { + "name": "cursor", + "description": "A cursor for use in pagination.", + "args": [ + + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "node", + "description": "The item at the end of the edge.", + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "ExplainerItem", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "ExtractTextInput", + "description": "Autogenerated input type of ExtractText", + "fields": null, + "inputFields": [ + { + "name": "id", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": "A unique identifier for the client performing the mutation.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "ExtractTextPayload", + "description": "Autogenerated return type of ExtractText", + "fields": [ + { + "name": "clientMutationId", + "description": "A unique identifier for the client performing the mutation.", + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "project_media", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "ProjectMedia", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "FactCheck", + "description": "FactCheck type", + "fields": [ + { + "name": "claim_description", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "ClaimDescription", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "created_at", + "description": null, + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "dbid", + "description": null, + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "id", + "description": null, + "args": [ + + ], + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "imported", + "description": null, + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "language", "description": null, @@ -44799,6 +45451,34 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "rating", + "description": null, + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "report_status", + "description": null, + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "summary", "description": null, @@ -44813,6 +45493,24 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "tags", + "description": null, + "args": [ + + ], + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "title", "description": null, @@ -47741,6 +48439,81 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "accessible_teams", + "description": null, + "args": [ + { + "name": "after", + "description": "Returns the elements in the list that come after the specified cursor.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "before", + "description": "Returns the elements in the list that come before the specified cursor.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "first", + "description": "Returns the first _n_ elements from the list.", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "last", + "description": "Returns the last _n_ elements from the list.", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "type": { + "kind": "OBJECT", + "name": "TeamConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "accessible_teams_count", + "description": null, + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "annotations", "description": null, @@ -48482,6 +49255,31 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "team_users_count", + "description": null, + "args": [ + { + "name": "status", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "teams", "description": null, @@ -50522,6 +51320,35 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "createExplainerItem", + "description": null, + "args": [ + { + "name": "input", + "description": "Parameters for CreateExplainerItem", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "CreateExplainerItemInput", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "type": { + "kind": "OBJECT", + "name": "CreateExplainerItemPayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "createFactCheck", "description": null, @@ -52233,6 +53060,35 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "destroyExplainerItem", + "description": null, + "args": [ + { + "name": "input", + "description": "Parameters for DestroyExplainerItem", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "DestroyExplainerItemInput", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "type": { + "kind": "OBJECT", + "name": "DestroyExplainerItemPayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "destroyFactCheck", "description": null, @@ -55337,6 +56193,11 @@ "name": "Explainer", "ofType": null }, + { + "kind": "OBJECT", + "name": "ExplainerItem", + "ofType": null + }, { "kind": "OBJECT", "name": "FactCheck", @@ -56635,6 +57496,20 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "articles_count", + "description": null, + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "assignments", "description": null, @@ -59732,6 +60607,156 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "explainer_items", + "description": null, + "args": [ + { + "name": "after", + "description": "Returns the elements in the list that come after the specified cursor.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "before", + "description": "Returns the elements in the list that come before the specified cursor.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "first", + "description": "Returns the first _n_ elements from the list.", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "last", + "description": "Returns the last _n_ elements from the list.", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "type": { + "kind": "OBJECT", + "name": "ExplainerItemConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "explainers", + "description": null, + "args": [ + { + "name": "after", + "description": "Returns the elements in the list that come after the specified cursor.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "before", + "description": "Returns the elements in the list that come before the specified cursor.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "first", + "description": "Returns the first _n_ elements from the list.", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "last", + "description": "Returns the last _n_ elements from the list.", + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "type": { + "kind": "OBJECT", + "name": "ExplainerConnection", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "fact_check", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "FactCheck", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "fact_check_id", + "description": null, + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "fact_check_published_on", "description": null, @@ -60103,20 +61128,6 @@ "isDeprecated": false, "deprecationReason": null }, - { - "name": "list_columns_values", - "description": null, - "args": [ - - ], - "type": { - "kind": "SCALAR", - "name": "JsonStringType", - "ofType": null - }, - "isDeprecated": false, - "deprecationReason": null - }, { "name": "log", "description": null, @@ -60710,6 +61721,20 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "suggested_main_relationship", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "Relationship", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "suggested_similar_items_count", "description": null, @@ -61899,6 +62924,64 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "explainer", + "description": "Information about the explainer with given id", + "args": [ + { + "name": "id", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "type": { + "kind": "OBJECT", + "name": "Explainer", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "fact_check", + "description": "Information about the fact_check with given id", + "args": [ + { + "name": "id", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "type": { + "kind": "OBJECT", + "name": "FactCheck", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "feed", "description": "Information about the feed with given id", @@ -67712,6 +68795,198 @@ "defaultValue": null, "isDeprecated": false, "deprecationReason": null + }, + { + "name": "offset", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": "0", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "sort", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": "\"title\"", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "sort_type", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": "\"ASC\"", + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "user_ids", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "tags", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "language", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updated_at", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "text", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "standalone", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "publisher_ids", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "report_status", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "rating", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "imported", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "target_id", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null } ], "type": { @@ -67722,6 +68997,187 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "articles_count", + "description": null, + "args": [ + { + "name": "article_type", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "user_ids", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "tags", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "language", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "updated_at", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "text", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "standalone", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "publisher_ids", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "report_status", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "rating", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "imported", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "target_id", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "available_newsletter_header_types", "description": null, @@ -68362,20 +69818,6 @@ "isDeprecated": false, "deprecationReason": null }, - { - "name": "list_columns", - "description": null, - "args": [ - - ], - "type": { - "kind": "SCALAR", - "name": "JsonStringType", - "ofType": null - }, - "isDeprecated": false, - "deprecationReason": null - }, { "name": "medias_count", "description": null, @@ -72853,6 +74295,18 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "project_media_id", + "description": null, + "type": { + "kind": "SCALAR", + "name": "Int", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "clientMutationId", "description": "A unique identifier for the client performing the mutation.", @@ -83620,8 +85074,246 @@ }, { "kind": "OBJECT", - "name": "UpdateDynamicPayload", - "description": "Autogenerated return type of UpdateDynamic", + "name": "UpdateDynamicPayload", + "description": "Autogenerated return type of UpdateDynamic", + "fields": [ + { + "name": "clientMutationId", + "description": "A unique identifier for the client performing the mutation.", + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "dynamic", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "Dynamic", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "dynamicEdge", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "DynamicEdge", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "project", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "Project", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "project_media", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "ProjectMedia", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "source", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "Source", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "task", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "Task", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "version", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "Version", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "versionEdge", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "VersionEdge", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + + ], + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "INPUT_OBJECT", + "name": "UpdateExplainerInput", + "description": "Autogenerated input type of UpdateExplainer", + "fields": null, + "inputFields": [ + { + "name": "id", + "description": null, + "type": { + "kind": "SCALAR", + "name": "ID", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "title", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "description", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "url", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "language", + "description": null, + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "tags", + "description": null, + "type": { + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": "A unique identifier for the client performing the mutation.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "UpdateExplainerPayload", + "description": "Autogenerated return type of UpdateExplainer", "fields": [ { "name": "clientMutationId", @@ -83638,112 +85330,42 @@ "deprecationReason": null }, { - "name": "dynamic", - "description": null, - "args": [ - - ], - "type": { - "kind": "OBJECT", - "name": "Dynamic", - "ofType": null - }, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "dynamicEdge", - "description": null, - "args": [ - - ], - "type": { - "kind": "OBJECT", - "name": "DynamicEdge", - "ofType": null - }, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "project", - "description": null, - "args": [ - - ], - "type": { - "kind": "OBJECT", - "name": "Project", - "ofType": null - }, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "project_media", - "description": null, - "args": [ - - ], - "type": { - "kind": "OBJECT", - "name": "ProjectMedia", - "ofType": null - }, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "source", - "description": null, - "args": [ - - ], - "type": { - "kind": "OBJECT", - "name": "Source", - "ofType": null - }, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "task", + "name": "explainer", "description": null, "args": [ ], "type": { "kind": "OBJECT", - "name": "Task", + "name": "Explainer", "ofType": null }, "isDeprecated": false, "deprecationReason": null }, { - "name": "version", + "name": "explainerEdge", "description": null, "args": [ ], "type": { "kind": "OBJECT", - "name": "Version", + "name": "ExplainerEdge", "ofType": null }, "isDeprecated": false, "deprecationReason": null }, { - "name": "versionEdge", + "name": "team", "description": null, "args": [ ], "type": { "kind": "OBJECT", - "name": "VersionEdge", + "name": "Team", "ofType": null }, "isDeprecated": false, @@ -83759,8 +85381,8 @@ }, { "kind": "INPUT_OBJECT", - "name": "UpdateExplainerInput", - "description": "Autogenerated input type of UpdateExplainer", + "name": "UpdateFactCheckInput", + "description": "Autogenerated input type of UpdateFactCheck", "fields": null, "inputFields": [ { @@ -83775,34 +85397,6 @@ "isDeprecated": false, "deprecationReason": null }, - { - "name": "title", - "description": null, - "type": { - "kind": "NON_NULL", - "name": null, - "ofType": { - "kind": "SCALAR", - "name": "String", - "ofType": null - } - }, - "defaultValue": null, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "description", - "description": null, - "type": { - "kind": "SCALAR", - "name": "String", - "ofType": null - }, - "defaultValue": null, - "isDeprecated": false, - "deprecationReason": null - }, { "name": "url", "description": null, @@ -83828,123 +85422,23 @@ "deprecationReason": null }, { - "name": "clientMutationId", - "description": "A unique identifier for the client performing the mutation.", - "type": { - "kind": "SCALAR", - "name": "String", - "ofType": null - }, - "defaultValue": null, - "isDeprecated": false, - "deprecationReason": null - } - ], - "interfaces": null, - "enumValues": null, - "possibleTypes": null - }, - { - "kind": "OBJECT", - "name": "UpdateExplainerPayload", - "description": "Autogenerated return type of UpdateExplainer", - "fields": [ - { - "name": "clientMutationId", - "description": "A unique identifier for the client performing the mutation.", - "args": [ - - ], - "type": { - "kind": "SCALAR", - "name": "String", - "ofType": null - }, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "explainer", - "description": null, - "args": [ - - ], - "type": { - "kind": "OBJECT", - "name": "Explainer", - "ofType": null - }, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "explainerEdge", - "description": null, - "args": [ - - ], - "type": { - "kind": "OBJECT", - "name": "ExplainerEdge", - "ofType": null - }, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "team", - "description": null, - "args": [ - - ], - "type": { - "kind": "OBJECT", - "name": "Team", - "ofType": null - }, - "isDeprecated": false, - "deprecationReason": null - } - ], - "inputFields": null, - "interfaces": [ - - ], - "enumValues": null, - "possibleTypes": null - }, - { - "kind": "INPUT_OBJECT", - "name": "UpdateFactCheckInput", - "description": "Autogenerated input type of UpdateFactCheck", - "fields": null, - "inputFields": [ - { - "name": "id", - "description": null, - "type": { - "kind": "SCALAR", - "name": "ID", - "ofType": null - }, - "defaultValue": null, - "isDeprecated": false, - "deprecationReason": null - }, - { - "name": "url", + "name": "tags", "description": null, "type": { - "kind": "SCALAR", - "name": "String", - "ofType": null + "kind": "LIST", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } }, "defaultValue": null, "isDeprecated": false, "deprecationReason": null }, { - "name": "language", + "name": "rating", "description": null, "type": { "kind": "SCALAR", @@ -84056,6 +85550,20 @@ }, "isDeprecated": false, "deprecationReason": null + }, + { + "name": "team", + "description": null, + "args": [ + + ], + "type": { + "kind": "OBJECT", + "name": "Team", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null } ], "inputFields": null, @@ -86983,18 +88491,6 @@ "isDeprecated": false, "deprecationReason": null }, - { - "name": "list_columns", - "description": null, - "type": { - "kind": "SCALAR", - "name": "JsonStringType", - "ofType": null - }, - "defaultValue": null, - "isDeprecated": false, - "deprecationReason": null - }, { "name": "tipline_inbox_filters", "description": null, diff --git a/test/controllers/graphql_controller_10_test.rb b/test/controllers/graphql_controller_10_test.rb index 781852d57a..b7ad00320f 100644 --- a/test/controllers/graphql_controller_10_test.rb +++ b/test/controllers/graphql_controller_10_test.rb @@ -87,7 +87,7 @@ def setup assert_equal 3, data.size assert_equal [u.id, u2.id, u3.id], ids.sort # Quey bot - query = "query { me { dbid, get_send_email_notifications, get_send_successful_login_notifications, get_send_failed_login_notifications, source { medias(first: 1) { edges { node { id } } } }, annotations(first: 1) { edges { node { id } } }, team_users(first: 1) { edges { node { id } } }, bot { get_description, get_role, get_version, get_source_code_url } } }" + query = "query { me { dbid, get_send_email_notifications, get_send_successful_login_notifications, get_send_failed_login_notifications, source { medias(first: 1) { edges { node { id } } } }, annotations(first: 1) { edges { node { id } } }, team_users_count, team_users(first: 1) { edges { node { id } } }, bot { get_description, get_role, get_version, get_source_code_url } } }" post :create, params: { query: query } assert_response :success end diff --git a/test/controllers/graphql_controller_11_test.rb b/test/controllers/graphql_controller_11_test.rb index 417a3c7fff..af6670ab50 100644 --- a/test/controllers/graphql_controller_11_test.rb +++ b/test/controllers/graphql_controller_11_test.rb @@ -115,4 +115,48 @@ def teardown data = JSON.parse(response.body)['data']['createProjectMedia'] assert_not_nil data['project_media']['id'] end + + test "admin users should be able to see all workspaces as accessible teams" do + Team.destroy_all + + user = create_user + team1 = create_team + create_team_user user: user, team: team1 + + admin = create_user(is_admin: true) + team2 = create_team + create_team_user user: admin, team: team2 + + authenticate_with_user(admin) + query = "query { me { accessible_teams_count, accessible_teams { edges { node { dbid } } } } }" + post :create, params: { query: query } + assert_response :success + response = JSON.parse(@response.body)['data']['me'] + data = response['accessible_teams']['edges'] + assert_equal 2, data.size + assert_equal team1.id, data[0]['node']['dbid'] + assert_equal team2.id, data[1]['node']['dbid'] + assert_equal 2, response['accessible_teams_count'] + end + + test "non-admin users should only be able to see workspaces they belong to as accessible teams" do + Team.destroy_all + user = create_user + team1 = create_team + create_team_user user: user, team: team1 + + user2 = create_user + team2 = create_team + create_team_user user: user2, team: team2 + + authenticate_with_user(user) + query = "query { me { accessible_teams_count, accessible_teams { edges { node { dbid } } } } }" + post :create, params: { query: query } + assert_response :success + response = JSON.parse(@response.body)['data']['me'] + data = response['accessible_teams']['edges'] + assert_equal 1, data.size + assert_equal team1.id, data[0]['node']['dbid'] + assert_equal 1, response['accessible_teams_count'] + end end diff --git a/test/controllers/graphql_controller_12_test.rb b/test/controllers/graphql_controller_12_test.rb index f5b75927ef..a4e5a9e84d 100644 --- a/test/controllers/graphql_controller_12_test.rb +++ b/test/controllers/graphql_controller_12_test.rb @@ -332,7 +332,7 @@ def teardown create_cluster_project_media cluster: c, project_media: pm authenticate_with_user(@u) - query = 'query { feed(id: "' + f.id.to_s + '") { cluster(project_media_id: ' + pm.id.to_s + ') { dbid, project_media(id: ' + pm.id.to_s + ') { id, imported_from_feed { id } }, project_medias(teamId: ' + @t.id.to_s + ', first: 1) { edges { node { id } } }, cluster_teams(first: 10) { edges { node { id, team { name }, last_request_date, media_count, requests_count, fact_checks(first: 1) { edges { node { id } } } } } } } } }' + query = 'query { feed(id: "' + f.id.to_s + '") { cluster(project_media_id: ' + pm.id.to_s + ') { dbid, project_media(id: ' + pm.id.to_s + ') { id, articles_count, imported_from_feed { id } }, project_medias(teamId: ' + @t.id.to_s + ', first: 1) { edges { node { id } } }, cluster_teams(first: 10) { edges { node { id, team { name }, last_request_date, media_count, requests_count, fact_checks(first: 1) { edges { node { id } } } } } } } } }' post :create, params: { query: query } assert_response :success assert_equal c.id, JSON.parse(@response.body)['data']['feed']['cluster']['dbid'] @@ -345,8 +345,7 @@ def teardown pm2 = create_project_media team: t f = create_feed team: @t f.teams << t - c = create_cluster feed: f, team_ids: [t.id], project_media_id: pm1.id - create_cluster_project_media cluster: c, project_media: pm1 + c = create_cluster feed: f, team_ids: [t.id], project_media: pm1 create_cluster_project_media cluster: c, project_media: pm2 assert_equal 0, @t.project_medias.count @@ -365,8 +364,7 @@ def teardown pm2 = create_project_media team: t f = create_feed team: @t f.teams << t - c = create_cluster feed: f, team_ids: [t.id], project_media_id: pm1.id - create_cluster_project_media cluster: c, project_media: pm1 + c = create_cluster feed: f, team_ids: [t.id], project_media: pm1 create_cluster_project_media cluster: c, project_media: pm2 assert_equal 1, @t.project_medias.count @@ -377,21 +375,50 @@ def teardown assert_equal 3, @t.reload.project_medias.count end - test "should get team articles" do + test "should get team articles (explainers)" do + Sidekiq::Testing.fake! @t.set_explainers_enabled = true @t.save! ex = create_explainer team: @t tag = create_tag annotated: ex authenticate_with_user(@u) - query = "query { team(slug: \"#{@t.slug}\") { get_explainers_enabled, articles(article_type: \"explainer\") { edges { node { ... on Explainer { dbid, tags { edges { node { dbid } } } } } } } } }" + query = "query { team(slug: \"#{@t.slug}\") { get_explainers_enabled, articles_count(article_type: \"explainer\"), articles(article_type: \"explainer\") { edges { node { ... on Explainer { dbid, tags } } } } } }" post :create, params: { query: query, team: @t.slug } + assert_response :success team = JSON.parse(@response.body)['data']['team'] + assert_equal 1, team['articles_count'] assert team['get_explainers_enabled'] data = team['articles']['edges'] assert_equal [ex.id], data.collect{ |edge| edge['node']['dbid'] } - tags = data[0]['node']['tags']['edges'] - assert_equal [tag.id.to_s], tags.collect{ |edge| edge['node']['dbid'] } + end + + test "should get team articles (fact-checks)" do + Sidekiq::Testing.fake! + authenticate_with_user(@u) + pm = create_project_media team: @t + cd = create_claim_description project_media: pm + fc = create_fact_check claim_description: cd, tags: ['foo', 'bar'] + query = "query { team(slug: \"#{@t.slug}\") { articles_count(article_type: \"fact-check\"), articles(article_type: \"fact-check\") { edges { node { ... on FactCheck { dbid, tags } } } } } }" + post :create, params: { query: query, team: @t.slug } + assert_response :success + team = JSON.parse(@response.body)['data']['team'] + assert_equal 1, team['articles_count'] + data = team['articles']['edges'] + assert_equal [fc.id], data.collect{ |edge| edge['node']['dbid'] } + end + + test "should get team articles (all)" do + Sidekiq::Testing.fake! + authenticate_with_user(@u) + pm = create_project_media team: @t + cd = create_claim_description project_media: pm + create_fact_check claim_description: cd, tags: ['foo', 'bar'] + create_explainer team: @t + query = "query { team(slug: \"#{@t.slug}\") { articles_count } }" + post :create, params: { query: query, team: @t.slug } assert_response :success + team = JSON.parse(@response.body)['data']['team'] + assert_equal 2, team['articles_count'] end test "should create api key" do @@ -494,4 +521,163 @@ def teardown assert_equal 'false', pm1.reload.last_status assert_equal 'false', pm2.reload.last_status end + + test "should return super-admin user as 'meedan' if user IS NOT a part of the team" do + u1 = create_user name: 'Mei' + u2 = create_user name: 'Satsuki', is_admin: true + + t1 = create_team + t2 = create_team + + create_team_user user: u1, team: t1 + create_team_user user: u2, team: t2 + + authenticate_with_user(u1) + + query1 = "query { user (id: #{ u1.id }) { name } }" + post :create, params: { query: query1 } + assert_response :success + assert_equal false, u1.is_admin? + assert_equal 'Mei', JSON.parse(@response.body)['data']['user']['name'] + + query2 = "query { user (id: #{ u2.id }) { name } }" + post :create, params: { query: query2 } + assert_response :success + assert_equal true, u2.is_admin? + assert_equal CheckConfig.get('super_admin_name'), JSON.parse(@response.body)['data']['user']['name'] + end + + test "should return super-admin user themself if user IS a part of the team" do + u1 = create_user name: 'Mei' + u2 = create_user name: 'Satsuki', is_admin: true + + t = create_team + + create_team_user user: u1, team: t + create_team_user user: u2, team: t + + authenticate_with_user(u1) + + query1 = "query { user (id: #{ u1.id }) { name } }" + post :create, params: { query: query1 } + assert_response :success + assert_equal false, u1.is_admin? + assert_equal 'Mei', JSON.parse(@response.body)['data']['user']['name'] + + query2 = "query { user (id: #{ u2.id }) { name } }" + post :create, params: { query: query2 } + assert_response :success + assert_equal true, u2.is_admin? + assert_equal 'Satsuki', JSON.parse(@response.body)['data']['user']['name'] + end + + test "should return default profile image if super-admin user IS NOT a part of the team" do + u1 = create_user + u2 = create_user is_admin: true, profile_image: "#{CheckConfig.get('checkdesk_base_url')}/images/checklogo.png" + + t1 = create_team + t2 = create_team + + create_team_user user: u1, team: t1 + create_team_user user: u2, team: t2 + + authenticate_with_user(u1) + + query = "query { user (id: #{ u2.id }) { profile_image, source { image } } }" + post :create, params: { query: query } + assert_response :success + assert_equal true, u2.is_admin? + assert_equal "#{CheckConfig.get('checkdesk_base_url')}/images/user.png", JSON.parse(@response.body)['data']['user']['profile_image'] + assert_equal "#{CheckConfig.get('checkdesk_base_url')}/images/user.png", JSON.parse(@response.body)['data']['user']['source']['image'] + end + + test "should return custom profile image if super-admin user IS a part of the team" do + u1 = create_user + u2 = create_user is_admin: true, profile_image: "#{CheckConfig.get('checkdesk_base_url')}/images/checklogo.png" + + t = create_team + + create_team_user user: u1, team: t + create_team_user user: u2, team: t + + authenticate_with_user(u1) + + query = "query { user (id: #{ u2.id }) { profile_image, source { image } } }" + post :create, params: { query: query } + assert_response :success + assert_equal true, u2.is_admin? + assert_equal "#{CheckConfig.get('checkdesk_base_url')}/images/checklogo.png", JSON.parse(@response.body)['data']['user']['profile_image'] + assert_equal "#{CheckConfig.get('checkdesk_base_url')}/images/checklogo.png", JSON.parse(@response.body)['data']['user']['source']['image'] + end + + test "should treat ' tag' and 'tag' as the same tag, and not try to create a new tag" do + Sidekiq::Testing.inline! + t = create_team + a = ApiKey.create! + b = create_bot_user api_key_id: a.id + create_team_user team: t, user: b + p = create_project team: t + authenticate_with_token(a) + + query1 = ' mutation create { + createProjectMedia(input: { + project_id: ' + p.id.to_s + ', + media_type: "Blank", + channel: { main: 1 }, + set_tags: ["science"], + set_status: "verified", + set_claim_description: "Claim #1.", + set_fact_check: { + title: "Title #1", + language: "en", + } + }) { + project_media { + full_url, + tags { + edges { + node { + tag_text + } + } + } + } + } + } ' + + post :create, params: { query: query1, team: t.slug } + assert_response :success + assert_equal 'science', JSON.parse(@response.body)['data']['createProjectMedia']['project_media']['tags']['edges'][0]['node']['tag_text'] + sleep 1 + + query2 = ' mutation create { + createProjectMedia(input: { + project_id: ' + p.id.to_s + ', + media_type: "Blank", + channel: { main: 1 }, + set_tags: ["science "], + set_status: "verified", + set_claim_description: "Claim #2.", + set_fact_check: { + title: "Title #2", + language: "en", + } + }) { + project_media { + full_url, + tags { + edges { + node { + tag_text + } + } + } + } + } + } ' + + post :create, params: { query: query2, team: t.slug } + assert_response :success + assert_equal 'science', JSON.parse(@response.body)['data']['createProjectMedia']['project_media']['tags']['edges'][0]['node']['tag_text'] + end end diff --git a/test/controllers/graphql_controller_2_test.rb b/test/controllers/graphql_controller_2_test.rb index 2b743e50b0..7020a854d3 100644 --- a/test/controllers/graphql_controller_2_test.rb +++ b/test/controllers/graphql_controller_2_test.rb @@ -239,7 +239,7 @@ def setup p2b = create_project_media project: p create_relationship source_id: p2.id, target_id: p2a.id create_relationship source_id: p2.id, target_id: p2b.id, relationship_type: Relationship.suggested_type - post :create, params: { query: "query { project_media(ids: \"#{p1.id},#{p.id}\") { is_main, is_secondary, is_confirmed_similar_to_another_item, suggested_main_item { id }, confirmed_main_item { id }, default_relationships_count, default_relationships(first: 10000) { edges { node { dbid } } }, confirmed_similar_relationships(first: 10000) { edges { node { dbid } } }, suggested_similar_relationships(first: 10000) { edges { node { target { dbid } } } } } }", team: t.slug } + post :create, params: { query: "query { project_media(ids: \"#{p1.id},#{p.id}\") { is_main, is_secondary, is_confirmed_similar_to_another_item, suggested_main_item { id }, suggested_main_relationship { id }, confirmed_main_item { id }, default_relationships_count, default_relationships(first: 10000) { edges { node { dbid } } }, confirmed_similar_relationships(first: 10000) { edges { node { dbid } } }, suggested_similar_relationships(first: 10000) { edges { node { target { dbid } } } } } }", team: t.slug } assert_equal [p1a.id, p1b.id].sort, JSON.parse(@response.body)['data']['project_media']['suggested_similar_relationships']['edges'].collect{ |x| x['node']['target']['dbid'] }.sort end diff --git a/test/format-coverage.sh b/test/format-coverage.sh index c959a1d58c..bbc1eec615 100755 --- a/test/format-coverage.sh +++ b/test/format-coverage.sh @@ -2,8 +2,9 @@ apt-get install -y awscli -if [ "$TRAVIS_PULL_REQUEST" == "false" ] +if [[ "$GITHUB_EVENT" == "pull_request" || "$GITHUB_EVENT" == "push" ]] then - ./test/cc-test-reporter format-coverage -t simplecov --output ../coverage/codeclimate.$TRAVIS_JOB_NAME.json ../coverage/.resultset.json - aws s3 cp ../coverage/codeclimate.$TRAVIS_JOB_NAME.json s3://check-api-travis/codeclimate/$TRAVIS_REPO_SLUG/$TRAVIS_BUILD_NUMBER/codeclimate.$TRAVIS_JOB_NAME.json +./test/cc-test-reporter format-coverage -t simplecov --output ../coverage/codeclimate.$GITHUB_JOB_NAME.json ../coverage/.resultset.json +aws s3 cp ../coverage/codeclimate.$GITHUB_JOB_NAME.json s3://check-api-github/codeclimate/$GITHUB_REPO/$GITHUB_BUILD_NUMBER/codeclimate.$GITHUB_JOB_NAME.json fi + diff --git a/test/lib/check_rack_attack_test.rb b/test/lib/check_rack_attack_test.rb index a467dcab4a..efd8a46466 100644 --- a/test/lib/check_rack_attack_test.rb +++ b/test/lib/check_rack_attack_test.rb @@ -1,9 +1,13 @@ -require 'test_helper' +require_relative '../test_helper' class ThrottlingTest < ActionDispatch::IntegrationTest setup do - redis = Redis.new(REDIS_CONFIG) - redis.flushdb + @redis = Redis.new(REDIS_CONFIG) + @redis.flushdb + end + + def real_ip(request) + request.get_header('HTTP_CF_CONNECTING_IP') || request.remote_ip end test "should throttle excessive requests to /api/graphql" do @@ -22,7 +26,7 @@ class ThrottlingTest < ActionDispatch::IntegrationTest stub_configs({ 'login_block_limit' => 2 }) do user_params = { api_user: { email: 'user@example.com', password: random_complex_password } } - 2.times do + 3.times do post api_user_session_path, params: user_params, as: :json end @@ -48,7 +52,7 @@ class ThrottlingTest < ActionDispatch::IntegrationTest # Test blocking for /api/users/sign_in via Cloudflare user_params = { api_user: { email: 'user@example.com', password: random_complex_password } } - 2.times do + 3.times do post api_user_session_path, params: user_params, as: :json, headers: { 'CF-Connecting-IP' => '1.2.3.4' } end @@ -58,4 +62,66 @@ class ThrottlingTest < ActionDispatch::IntegrationTest Rails.env = original_env end + + test "should apply higher rate limit for authenticated users" do + stub_configs({ 'api_rate_limit_authenticated' => 5 }) do + host!('localhost') + password = random_complex_password + user = create_user password: password + user_params = { api_user: { email: user.email, password: password } } + + post api_user_session_path, params: user_params, as: :json + assert_response :success + + 5.times do + post api_graphql_path + assert_response :success + end + + post api_graphql_path + assert_response :too_many_requests + + delete destroy_api_user_session_path, as: :json + assert_response :success + end + end + + test "should not increment counter on successful login" do + stub_configs({ 'login_block_limit' => 3 }) do + password = random_complex_password + user = create_user password: password + user_params = { api_user: { email: user.email, password: password } } + + # Successful logins + 2.times do + post api_user_session_path, params: user_params, as: :json + assert_response :success + end + + ip = real_ip(@request) + counter_value = @redis.get("track:#{ip}") + assert_equal "0", counter_value, "Counter should not be incremented for successful logins" + + delete destroy_api_user_session_path, as: :json + assert_response :success + + # Unsuccessful login attempts + 2.times do + post api_user_session_path, params: { api_user: { email: user.email, password: 'wrong_password' } }, as: :json + assert_response :unauthorized + end + + # Check counter value after unsuccessful logins + counter_value = @redis.get("track:#{ip}") + assert_equal "2", counter_value, "Counter should be incremented for unsuccessful logins" + + # Ensure that the IP is not blocked after successful logins + post api_user_session_path, params: user_params, as: :json + assert_response :success + + # Subsequent unsuccessful login attempts should result in a block + post api_user_session_path, params: { api_user: { email: user.email, password: 'wrong_password' } }, as: :json + assert_response :forbidden + end + end end diff --git a/test/lib/check_session_store_test.rb b/test/lib/check_session_store_test.rb new file mode 100644 index 0000000000..03ba2a3cf9 --- /dev/null +++ b/test/lib/check_session_store_test.rb @@ -0,0 +1,43 @@ +require_relative '../test_helper' + +class SessionStoreTest < ActiveSupport::TestCase + def setup + end + + def teardown + end + + def with_environment(env) + original_env = Rails.env + Rails.singleton_class.class_eval do + define_method(:env) { ActiveSupport::StringInquirer.new(env) } + end + yield + ensure + Rails.singleton_class.class_eval do + define_method(:env) { original_env } + end + end + + test "session store configuration with default key and domain when config values are not set" do + with_environment('production') do + stub_configs({ 'session_store_key' => nil, 'session_store_domain' => nil }) do + load Rails.root.join('config/initializers/session_store.rb') + assert_equal ActionDispatch::Session::CookieStore, Rails.application.config.session_store + assert_equal '_checkdesk_session', Rails.application.config.session_options[:key] + assert_equal 'checkmedia.org', Rails.application.config.session_options[:domain] + end + end + end + + test "session store configuration with overriding key and domain in config" do + with_environment('production') do + stub_configs({ 'session_store_key' => '_checkdesk_session_qa', 'session_store_domain' => 'qa.checkmedia.org' }) do + load Rails.root.join('config/initializers/session_store.rb') + assert_equal ActionDispatch::Session::CookieStore, Rails.application.config.session_store + assert_equal '_checkdesk_session_qa', Rails.application.config.session_options[:key] + assert_equal 'qa.checkmedia.org', Rails.application.config.session_options[:domain] + end + end + end +end diff --git a/test/lib/tasks/statistics_test.rb b/test/lib/tasks/statistics_test.rb index ddb8a1137b..d00519537c 100644 --- a/test/lib/tasks/statistics_test.rb +++ b/test/lib/tasks/statistics_test.rb @@ -337,9 +337,7 @@ def teardown test "check:data:statistics allows generating conversations for months before april 1 2023, with argument" do date = DateTime.new(2023,01,01) - create_project_media(user: BotUser.smooch_user, team: @tipline_team, created_at: date + 2.weeks) - CheckStatistics.stubs(:get_statistics).returns( { platform: 'whatsapp', @@ -348,120 +346,93 @@ def teardown end_date: date, } ) - + travel_to DateTime.new(2023,01,01) - out, err = capture_io do # pass in ignore_convo_cutoff: true Rake::Task['check:data:statistics'].invoke(true) end Rake::Task['check:data:statistics'].reenable - + conversations = MonthlyTeamStatistic.where(team: @tipline_team).pluck(:conversations_24hr).uniq assert_equal 1, conversations.count assert !conversations.first.nil? end - - test "check:data:regenerate_statistics errors if only an unsupported argument is passed" do - out, err = capture_io do - Rake::Task['check:data:regenerate_statistics'].invoke("foo") - end - Rake::Task['check:data:regenerate_statistics'].reenable - - assert err.present? - end - - test "check:data:regenerate_statistics accepts arguments as comma separated list or string" do - out, err = capture_io do - Rake::Task['check:data:regenerate_statistics'].invoke("unique_newsletters_sent") - end - Rake::Task['check:data:regenerate_statistics'].reenable - - assert err.blank? - - out, err = capture_io do - Rake::Task['check:data:regenerate_statistics'].invoke("unique_newsletters_sent,foo") - end - Rake::Task['check:data:regenerate_statistics'].reenable - - assert err.blank? - end - - test "check:data:regenerate_statistics outputs supported arguments if no args provided" do - out, err = capture_io do - Rake::Task['check:data:regenerate_statistics'].invoke - end - Rake::Task['check:data:regenerate_statistics'].reenable - - assert_match /unique_newsletters_sent/, err - + + test "check:data:regenerate_statistics errors if start_date argument is invalid" do out, err = capture_io do - Rake::Task['check:data:regenerate_statistics'].invoke + assert_raises(Check::Statistics::ArgumentError) do + Rake::Task['check:data:regenerate_statistics'].invoke("invalid_date") + end end Rake::Task['check:data:regenerate_statistics'].reenable - - assert_match /unique_newsletters_sent/, err + + assert_match /Invalid or missing start_date argument/, err end - - test "check:data:regenerate_statistics regenerates any monthly team statistics present in database for provided stat" do + + test "check:data:regenerate_statistics regenerates stats from the provided start date" do + start_date = "2023-04-01" previous_month_start = DateTime.new(2023,4,1,0,0,0) previous_month_end = DateTime.new(2023,4,30,23,59,59) - + other_workspace_with_stats = create_team - + team_stat_one = create_monthly_team_statistic(team: @tipline_team, language: 'en', start_date: previous_month_start, end_date: previous_month_end) team_stat_two = create_monthly_team_statistic(team: @tipline_team, language: 'es', start_date: @start_of_month, end_date: @current_date) team_stat_three = create_monthly_team_statistic(team: other_workspace_with_stats, language: 'en', start_date: @start_of_month, end_date: @current_date) - + CheckStatistics.stubs(:number_of_newsletters_sent).with(@tipline_team.id, team_stat_one.start_date, team_stat_one.end_date, 'en').returns(100) CheckStatistics.expects(:number_of_newsletters_sent).with(@tipline_team.id, team_stat_two.start_date, team_stat_two.end_date, 'es').returns(300) CheckStatistics.expects(:number_of_newsletters_sent).with(other_workspace_with_stats.id, team_stat_three.start_date, team_stat_three.end_date, 'en').returns(400) travel_to @current_date - + out, err = capture_io do - Rake::Task['check:data:regenerate_statistics'].invoke("unique_newsletters_sent") + Rake::Task['check:data:regenerate_statistics'].invoke(start_date) end Rake::Task['check:data:regenerate_statistics'].reenable + assert err.blank? - + # en, previous month - stats_one = MonthlyTeamStatistic.first + stats_one = MonthlyTeamStatistic.find_by(team: @tipline_team, language: 'en', start_date: previous_month_start) assert_equal @tipline_team.id, stats_one.team_id - assert_equal (@current_date - 1.month).beginning_of_month.to_i, stats_one.start_date.to_i - assert_equal (@current_date - 1.month).end_of_month.to_i, stats_one.end_date.to_i + assert_equal previous_month_start.to_i, stats_one.start_date.to_i + assert_equal previous_month_end.to_i, stats_one.end_date.to_i assert_equal 'en', stats_one.language assert_equal 100, stats_one.unique_newsletters_sent - + # es, current month - stats_two = MonthlyTeamStatistic.second + stats_two = MonthlyTeamStatistic.find_by(team: @tipline_team, language: 'es', start_date: @start_of_month) assert_equal @tipline_team.id, stats_two.team_id assert_equal @start_of_month.to_i, stats_two.start_date.to_i assert_equal @current_date.to_i, stats_two.end_date.to_i assert_equal 'es', stats_two.language assert_equal 300, stats_two.unique_newsletters_sent - - # second workspace - es, current month - stats_three = MonthlyTeamStatistic.third + + # second workspace - en, current month + stats_three = MonthlyTeamStatistic.find_by(team: other_workspace_with_stats, language: 'en', start_date: @start_of_month) assert_equal other_workspace_with_stats.id, stats_three.team_id assert_equal @start_of_month.to_i, stats_three.start_date.to_i assert_equal @current_date.to_i, stats_three.end_date.to_i assert_equal 'en', stats_three.language assert_equal 400, stats_three.unique_newsletters_sent end - + test "check:data:regenerate_statistics doesn't explode if tipline has been disabled, and sets newsletters to nil" do + start_date = "2023-04-01" random_team = create_team create_monthly_team_statistic(team: random_team, language: 'es', start_date: @start_of_month, end_date: @current_date) - + travel_to @current_date - + out, err = capture_io do - Rake::Task['check:data:regenerate_statistics'].invoke("unique_newsletters_sent") + Rake::Task['check:data:regenerate_statistics'].invoke(start_date) end Rake::Task['check:data:regenerate_statistics'].reenable + assert err.blank? - + stats_one = MonthlyTeamStatistic.first assert_nil stats_one.unique_newsletters_sent - end + end end diff --git a/test/models/bot/alegre_v2_test.rb b/test/models/bot/alegre_v2_test.rb index ce44280f2b..f99d11b117 100644 --- a/test/models/bot/alegre_v2_test.rb +++ b/test/models/bot/alegre_v2_test.rb @@ -74,6 +74,13 @@ def teardown assert_equal Bot::Alegre.get_type(pm4), "text" end + test "should have host and paths for text" do + pm1 = create_project_media team: @team, quote: 'This is a long text that creates a text-based item' + assert_equal Bot::Alegre.host, CheckConfig.get('alegre_host') + assert_equal Bot::Alegre.sync_path(pm1), "/similarity/sync/text" + assert_equal Bot::Alegre.async_path(pm1), "/similarity/async/text" + assert_equal Bot::Alegre.delete_path(pm1), "/text/similarity/" + end test "should have host and paths for audio" do pm1 = create_project_media team: @team, media: create_uploaded_audio @@ -106,6 +113,11 @@ def teardown RequestStore.store[:pause_database_connection] = false end + test "should create a generic_package for text" do + pm1 = create_project_media team: @team, quote: 'This is a long text that creates a text-based item' + assert_equal Bot::Alegre.generic_package(pm1, "quote"), {:content_hash=>Bot::Alegre.content_hash(pm1, "quote"), :doc_id=>Bot::Alegre.item_doc_id(pm1, "quote"), :context=>{:team_id=>pm1.team_id, :project_media_id=>pm1.id, :has_custom_id=>true, :field=>"quote", :temporary_media=>false}} + end + test "should create a generic_package for audio" do pm1 = create_project_media team: @team, media: create_uploaded_audio assert_equal Bot::Alegre.generic_package(pm1, "audio"), {:content_hash=>Bot::Alegre.content_hash(pm1, nil), :doc_id=>Bot::Alegre.item_doc_id(pm1, "audio"), :context=>{:team_id=>pm1.team_id, :project_media_id=>pm1.id, :has_custom_id=>true, :temporary_media=>false}} @@ -121,6 +133,13 @@ def teardown assert_equal Bot::Alegre.generic_package(pm1, "video"), {:content_hash=>Bot::Alegre.content_hash(pm1, nil), :doc_id=>Bot::Alegre.item_doc_id(pm1, "video"), :context=>{:team_id=>pm1.team_id, :project_media_id=>pm1.id, :has_custom_id=>true, :temporary_media=>false}} end + test "should create a generic_package_text" do + pm1 = create_project_media team: @team, quote: 'This is a long text that creates a text-based item' + assert_equal Bot::Alegre.generic_package_text(pm1, "quote", {}), {:content_hash=>Bot::Alegre.content_hash(pm1, "quote"), :doc_id=>Bot::Alegre.item_doc_id(pm1, "quote"), :context=>{:team_id=>pm1.team_id, :project_media_id=>pm1.id, :has_custom_id=>true, :field=>"quote", :temporary_media=>false}, :models=>["elasticsearch"], :text=>pm1.text, :fuzzy=>false, :match_across_content_types=>true, :min_es_score=>10} + assert_equal Bot::Alegre.store_package_text(pm1, "quote", {}), {:content_hash=>Bot::Alegre.content_hash(pm1, "quote"), :doc_id=>Bot::Alegre.item_doc_id(pm1, "quote"), :context=>{:team_id=>pm1.team_id, :project_media_id=>pm1.id, :has_custom_id=>true, :field=>"quote", :temporary_media=>false}, :models=>["elasticsearch"], :text=>pm1.text, :fuzzy=>false, :match_across_content_types=>true, :min_es_score=>10} + assert_equal Bot::Alegre.store_package(pm1, "quote", {}), {:content_hash=>Bot::Alegre.content_hash(pm1, "quote"), :doc_id=>Bot::Alegre.item_doc_id(pm1, "quote"), :context=>{:team_id=>pm1.team_id, :project_media_id=>pm1.id, :has_custom_id=>true, :field=>"quote", :temporary_media=>false}, :models=>["elasticsearch"], :text=>pm1.text, :fuzzy=>false, :match_across_content_types=>true, :min_es_score=>10} + end + test "should create a generic_package_audio" do pm1 = create_project_media team: @team, media: create_uploaded_audio assert_equal Bot::Alegre.generic_package_audio(pm1, {}), {:content_hash=>Bot::Alegre.content_hash(pm1, nil), :doc_id=>Bot::Alegre.item_doc_id(pm1, nil), :context=>{:team_id=>pm1.team_id, :project_media_id=>pm1.id, :has_custom_id=>true, :temporary_media=>false}, :url=>Bot::Alegre.media_file_url(pm1)} @@ -142,6 +161,11 @@ def teardown assert_equal Bot::Alegre.store_package(pm1, "video", {}), {:content_hash=>Bot::Alegre.content_hash(pm1, nil), :doc_id=>Bot::Alegre.item_doc_id(pm1, nil), :context=>{:team_id=>pm1.team_id, :project_media_id=>pm1.id, :has_custom_id=>true, :temporary_media=>false}, :url=>Bot::Alegre.media_file_url(pm1)} end + test "should create a context for text" do + pm1 = create_project_media team: @team, quote: 'This is a long text that creates a text-based item' + assert_equal Bot::Alegre.get_context(pm1, "quote"), {:team_id=>pm1.team_id, :project_media_id=>pm1.id, :has_custom_id=>true, :field=>"quote", :temporary_media=>false} + end + test "should create a context for audio" do pm1 = create_project_media team: @team, media: create_uploaded_audio assert_equal Bot::Alegre.get_context(pm1, "audio"), {:team_id=>pm1.team_id, :project_media_id=>pm1.id, :has_custom_id=>true, :temporary_media=>false} @@ -157,6 +181,15 @@ def teardown assert_equal Bot::Alegre.get_context(pm1, "video"), {:team_id=>pm1.team_id, :project_media_id=>pm1.id, :has_custom_id=>true, :temporary_media=>false} end + test "should create a delete_package for text" do + pm1 = create_project_media team: @team, quote: 'This is a long text that creates a text-based item' + package = Bot::Alegre.delete_package(pm1, "quote") + assert_equal package[:doc_id], Bot::Alegre.item_doc_id(pm1, "quote") + assert_equal package[:context], {:team_id=>pm1.team_id, :project_media_id=>pm1.id, :has_custom_id=>true, :field=>"quote", :temporary_media=>false} + assert_equal package[:text].class, String + assert_equal package[:quiet], false + end + test "should create a delete_package for audio" do pm1 = create_project_media team: @team, media: create_uploaded_audio package = Bot::Alegre.delete_package(pm1, "audio") @@ -184,6 +217,37 @@ def teardown assert_equal package[:quiet], false end + test "should run text async request" do + pm1 = create_project_media team: @team, quote: 'This is a long text that creates a text-based item' + response = { + "message": "Message pushed successfully", + "queue": "text__Model", + "body": { + "callback_url": "http:\/\/alegre:3100\/presto\/receive\/add_item\/text", + "id": "f0d43d29-853d-4099-9e92-073203afa75b", + "url": nil, + "text": 'This is a long text that creates a text-based item', + "raw": { + "limit": 200, + "url": nil, + "text": 'This is a long text that creates a text-based item', + "callback_url": "http:\/\/example.com\/search_results", + "doc_id": Bot::Alegre.item_doc_id(pm1, "quote"), + "context": Bot::Alegre.get_context(pm1, "quote"), + "created_at": "2023-10-27T22:40:14.205586", + "command": "search", + "threshold": 0.0, + "per_model_threshold": {}, + "match_across_content_types": false, + "requires_callback": true, + "final_task": "search" + } + } + } + WebMock.stub_request(:post, "#{CheckConfig.get('alegre_host')}/similarity/async/text").with(body: {:content_hash=>Bot::Alegre.content_hash(pm1, "quote"), :doc_id=>Bot::Alegre.item_doc_id(pm1, "quote"), :context=>{:team_id=>pm1.team_id, :project_media_id=>pm1.id, :has_custom_id=>true, :field=>"quote", :temporary_media=>false}, :models=>["elasticsearch"], :text=>pm1.quote, :fuzzy=>false, :match_across_content_types=>true, :min_es_score=>10}).to_return(body: response.to_json) + assert_equal JSON.parse(Bot::Alegre.get_async(pm1, "quote").to_json), JSON.parse(response.to_json) + end + test "should run audio async request" do pm1 = create_project_media team: @team, media: create_uploaded_audio response = { @@ -214,6 +278,11 @@ def teardown assert_equal JSON.parse(Bot::Alegre.get_async(pm1).to_json), JSON.parse(response.to_json) end + test "should isolate relevant_context for text" do + pm1 = create_project_media team: @team, quote: 'This is a long text that creates a text-based item' + assert_equal Bot::Alegre.isolate_relevant_context(pm1, {"context"=>[{"team_id"=>pm1.team_id}]}), {"team_id"=>pm1.team_id} + end + test "should isolate relevant_context for audio" do pm1 = create_project_media team: @team, media: create_uploaded_audio assert_equal Bot::Alegre.isolate_relevant_context(pm1, {"context"=>[{"team_id"=>pm1.team_id}]}), {"team_id"=>pm1.team_id} @@ -229,6 +298,13 @@ def teardown assert_equal Bot::Alegre.isolate_relevant_context(pm1, {"context"=>[{"team_id"=>pm1.team_id}]}), {"team_id"=>pm1.team_id} end + test "should return field or type on get_target_field for text" do + pm1 = create_project_media team: @team, quote: 'This is a long text that creates a text-based item' + Bot::Alegre.stubs(:get_type).returns(nil) + assert_equal Bot::Alegre.get_target_field(pm1, "quote"), "quote" + Bot::Alegre.unstub(:get_type) + end + test "should return field or type on get_target_field for audio" do pm1 = create_project_media team: @team, media: create_uploaded_audio Bot::Alegre.stubs(:get_type).returns(nil) @@ -275,6 +351,11 @@ def teardown assert_equal Bot::Alegre.get_per_model_threshold(pm1, sample), {:threshold=>0.9} end + test "should get target field for text" do + pm1 = create_project_media team: @team, quote: 'This is a long text that creates a text-based item' + assert_equal Bot::Alegre.get_target_field(pm1, "quote"), "quote" + end + test "should get target field for audio" do pm1 = create_project_media team: @team, media: create_uploaded_audio assert_equal Bot::Alegre.get_target_field(pm1, nil), "audio" @@ -1077,6 +1158,20 @@ def teardown assert_equal Bot::Alegre.get_cached_data(Bot::Alegre.get_required_keys(pm1, nil)), {confirmed_results: nil, suggested_or_confirmed_results: nil} end + test "should relate project media for text" do + pm1 = create_project_media team: @team, quote: 'This is a long text that creates a text-based item' + pm2 = create_project_media team: @team, quote: 'This is another long text that creates a text-based item' + Bot::Alegre.stubs(:get_similar_items_v2).returns({pm2.id=>{:score=>0.91, :context=>{"team_id"=>pm2.team_id, "has_custom_id"=>true, "project_media_id"=>pm2.id, "temporary_media"=>false}, :model=>"audio", :source_field=>"audio", :target_field=>"audio", :relationship_type=>Relationship.suggested_type}}) + relationship = nil + assert_difference 'Relationship.count' do + relationship = Bot::Alegre.relate_project_media(pm1) + end + assert_equal relationship.source, pm2 + assert_equal relationship.target, pm1 + assert_equal relationship.relationship_type, Relationship.suggested_type + Bot::Alegre.unstub(:get_similar_items_v2) + end + test "should relate project media for audio" do pm1 = create_project_media team: @team, media: create_uploaded_audio pm2 = create_project_media team: @team, media: create_uploaded_audio diff --git a/test/models/bot/smooch_3_test.rb b/test/models/bot/smooch_3_test.rb index 7f8aeaed35..acef2b3e66 100644 --- a/test/models/bot/smooch_3_test.rb +++ b/test/models/bot/smooch_3_test.rb @@ -76,8 +76,9 @@ def teardown # messages contain the following: # 1). long text( > min_number_of_words_for_tipline_submit_shortcut) # 2). short text (< min_number_of_words_for_tipline_submit_shortcut) - # 3). 2 medias - # Result: created three items (on claim and two items of type image) + # 3). link + # 4). 2 medias + # Result: created four items (one claim, one link and two items of type image) Sidekiq::Testing.fake! do uid = random_string messages = [ @@ -88,6 +89,13 @@ def teardown source: { type: "whatsapp" }, text: long_text.join(' '), }, + { + '_id': random_string, + authorId: uid, + type: 'text', + source: { type: "whatsapp" }, + text: @link_url, + }, { '_id': random_string, authorId: uid, @@ -127,17 +135,19 @@ def teardown Bot::Smooch.run(payload) sleep 1 end - assert_difference 'ProjectMedia.count', 3 do + assert_difference 'ProjectMedia.count', 4 do assert_difference 'UploadedImage.count', 2 do assert_difference 'Claim.count' do - Sidekiq::Worker.drain_all + assert_difference 'Link.count' do + Sidekiq::Worker.drain_all + end end end end pm = ProjectMedia.last request = pm.tipline_requests.last text = request.smooch_data['text'].split("\n#{Bot::Smooch::MESSAGE_BOUNDARY}") - target_text = [long_text.join(' '), 'first image', @media_url, @media_url_2, 'bar'] + target_text = [long_text.join(' '), @link_url, 'first image', @media_url, @media_url_2, 'bar'] assert_equal target_text, text # Messages with short text only messages = [ @@ -181,6 +191,71 @@ def teardown end end + test "should force relationship between media and caption text" do + long_text = [] + 15.times{ long_text << random_string } + caption = long_text.join(' ') + # messages contain the following: + # 1). media with long text( > min_number_of_words_for_tipline_submit_shortcut) + # 2). media with short text (< min_number_of_words_for_tipline_submit_shortcut) + # Result: created three items and one relationship (one claim for caption and two items of type image) + last_id = ProjectMedia.last.id + Sidekiq::Testing.fake! do + uid = random_string + messages = [ + { + '_id': random_string, + authorId: uid, + type: 'image', + source: { type: "whatsapp" }, + text: 'first image', + mediaUrl: @media_url + }, + { + '_id': random_string, + authorId: uid, + type: 'image', + source: { type: "whatsapp" }, + text: caption, + mediaUrl: @media_url_2 + } + ] + messages.each do |message| + payload = { + trigger: 'message:appUser', + app: { + '_id': @app_id + }, + version: 'v1.1', + messages: [message], + appUser: { + '_id': random_string, + 'conversationStarted': true + } + }.to_json + Bot::Smooch.run(payload) + sleep 1 + end + assert_difference 'ProjectMedia.count', 3 do + assert_difference 'UploadedImage.count', 2 do + assert_difference 'Claim.count' do + assert_difference 'Relationship.count' do + assert_difference 'TiplineRequest.count', 3 do + Sidekiq::Worker.drain_all + end + end + end + end + end + claim_item = ProjectMedia.joins(:media).where('medias.type' => 'Claim').last + assert_equal caption, claim_item.media.quote + r = Relationship.last + assert_equal Relationship.suggested_type, r.relationship_type + assert_equal claim_item.id, r.target_id + assert_equal 1, claim_item.tipline_requests.count + end + end + test "should delete cache entries when user annotation is deleted" do create_flag_annotation_type create_annotation_type_and_fields('Smooch User', { 'Id' => ['Text', false], 'App Id' => ['Text', false], 'Data' => ['JSON', false] }) diff --git a/test/models/bot/smooch_4_test.rb b/test/models/bot/smooch_4_test.rb index f8df58f903..34167fa5f6 100644 --- a/test/models/bot/smooch_4_test.rb +++ b/test/models/bot/smooch_4_test.rb @@ -669,9 +669,12 @@ def teardown CheckSearch.any_instance.stubs(:medias).returns([pm1]) Bot::Alegre.stubs(:get_merged_similar_items).returns({ pm2.id => { score: 0.9, model: 'elasticsearch', context: {foo: :bar} } }) - assert_equal [pm2], Bot::Smooch.get_search_results(random_string, {}, t.id, 'en') + uid = random_string + query = Bot::Smooch.get_search_query(uid, {}) + assert_equal [pm2], Bot::Smooch.get_search_results(uid, query, t.id, 'en') Bot::Smooch.stubs(:bundle_list_of_messages).returns({ 'type' => 'text', 'text' => "Test #{url}" }) - assert_equal [pm1], Bot::Smooch.get_search_results(random_string, {}, t.id, 'en') + query = Bot::Smooch.get_search_query(uid, {}) + assert_equal [pm1], Bot::Smooch.get_search_results(uid, query, t.id, 'en') ProjectMedia.any_instance.unstub(:report_status) CheckSearch.any_instance.unstub(:medias) diff --git a/test/models/bot/smooch_6_test.rb b/test/models/bot/smooch_6_test.rb index 0251e7cab8..6473d95dc2 100644 --- a/test/models/bot/smooch_6_test.rb +++ b/test/models/bot/smooch_6_test.rb @@ -138,7 +138,7 @@ def send_message_outside_24_hours_window(template, pm = nil) end test "should submit query without details on tipline bot v2" do - WebMock.stub_request(:post, /\/text\/similarity\/search\//).to_return(body: {}.to_json) + WebMock.stub_request(:post, /\/text\/similarity\/search\//).to_return(body: {}.to_json) # For explainers claim = 'This is a test claim' send_message 'hello', '1', '1', random_string, random_string, claim, random_string, random_string, '1' assert_saved_query_type 'default_requests' @@ -208,7 +208,7 @@ def send_message_outside_24_hours_window(template, pm = nil) end test "should submit query with details on tipline bot v2" do - WebMock.stub_request(:post, /\/text\/similarity\/search\//).to_return(body: {}.to_json) + WebMock.stub_request(:post, /\/text\/similarity\/search\//).to_return(body: {}.to_json) # For explainers claim = 'This is a test claim' send_message 'hello', '1', '1', random_string, '2', random_string, claim, '1' assert_saved_query_type 'default_requests' @@ -285,6 +285,7 @@ def send_message_outside_24_hours_window(template, pm = nil) end test "should submit query and handle search error on tipline bot v2" do + WebMock.stub_request(:post, /\/text\/similarity\/search\//).to_return(body: {}.to_json) # For explainers CheckSearch.any_instance.stubs(:medias).raises(StandardError) Sidekiq::Testing.inline! do send_message 'hello', '1', '1', 'Foo bar', '1' @@ -383,6 +384,7 @@ def send_message_outside_24_hours_window(template, pm = nil) ProjectMedia.any_instance.stubs(:report_status).returns('published') ProjectMedia.any_instance.stubs(:analysis_published_article_url).returns(random_url) Bot::Alegre.stubs(:get_merged_similar_items).returns({ create_project_media.id => { score: 0.9 } }) + WebMock.stub_request(:post, /\/text\/similarity\/search\//).to_return(body: {}.to_json) # For explainers Sidekiq::Testing.inline! do send_message 'hello', '1', '1', "Foo bar foo bar #{url} foo bar", '1' end @@ -691,6 +693,7 @@ def send_message_outside_24_hours_window(template, pm = nil) pm = create_project_media team: @team publish_report(pm, {}, nil, { language: 'pt', use_visual_card: false }) Bot::Smooch.stubs(:get_search_results).returns([pm]) + WebMock.stub_request(:post, /\/text\/similarity\/search\//).to_return(body: {}.to_json) # For explainers Sidekiq::Testing.inline! do send_message 'hello', '1', '1', 'Foo bar', '1' end @@ -943,4 +946,21 @@ def send_message_outside_24_hours_window(template, pm = nil) end end end + + test "should submit query and handle explainer search error on tipline bot v2" do + Explainer.stubs(:search_by_similarity).raises(StandardError) + Sidekiq::Testing.inline! do + send_message 'hello', '1', '1', 'Foo bar', '1' + end + end + + test "should search by explainers on tipline bot v2" do + assert_nil Rails.cache.read("smooch:user_search_results:#{@uid}") + @search_result.explainers << create_explainer(language: 'en', team: @team, title: 'Test', description: 'Foo bar') + Bot::Smooch.stubs(:get_search_results).returns([]) + Sidekiq::Testing.inline! do + send_message 'hi', '1', '1', 'Foo', '1' + end + assert_not_nil Rails.cache.read("smooch:user_search_results:#{@uid}") + end end diff --git a/test/models/bot/smooch_7_test.rb b/test/models/bot/smooch_7_test.rb index ad852ffb92..4fd46ac40e 100644 --- a/test/models/bot/smooch_7_test.rb +++ b/test/models/bot/smooch_7_test.rb @@ -217,7 +217,9 @@ def teardown Bot::Smooch.stubs(:bundle_list_of_messages).returns({ 'type' => 'text', 'text' => 'Foo bar' }) CheckSearch.any_instance.stubs(:medias).returns([pm]) - assert_equal [pm], Bot::Smooch.get_search_results(random_string, {}, pm.team_id, 'en') + uid = random_string + query = Bot::Smooch.get_search_query(uid, {}) + assert_equal [pm], Bot::Smooch.get_search_results(uid, query, pm.team_id, 'en') Bot::Smooch.unstub(:bundle_list_of_messages) CheckSearch.any_instance.unstub(:medias) @@ -238,7 +240,9 @@ def teardown ProjectMedia.any_instance.stubs(:analysis_published_article_url).returns(random_url) Bot::Alegre.stubs(:get_merged_similar_items).returns({ pm.id => { score: 0.9, model: 'elasticsearch', context: {foo: :bar} } }) - assert_equal [pm], Bot::Smooch.get_search_results(random_string, {}, pm.team_id, 'en') + uid = random_string + query = Bot::Smooch.get_search_query(uid, {}) + assert_equal [pm], Bot::Smooch.get_search_results(uid, query, pm.team_id, 'en') Bot::Smooch.unstub(:bundle_list_of_messages) ProjectMedia.any_instance.unstub(:report_status) diff --git a/test/models/claim_description_test.rb b/test/models/claim_description_test.rb index 279245e581..2ff78762df 100644 --- a/test/models/claim_description_test.rb +++ b/test/models/claim_description_test.rb @@ -18,12 +18,36 @@ def setup t = create_team create_team_user team: t, user: u, role: 'admin' pm = create_project_media team: t + pm2 = create_project_media team: t with_current_user_and_team(u, t) do cd = nil - assert_difference 'PaperTrail::Version.count', 1 do + fc = nil + assert_difference 'PaperTrail::Version.count', 2 do cd = create_claim_description project_media: pm, user: u + fc = create_fact_check claim_description: cd end - assert_equal 1, cd.versions.count + cd.description = 'update description' + cd.save! + fc.title = 'update title' + fc.save! + # Remove FactCheck + cd.project_media_id = nil + cd.save! + assert_equal 3, cd.versions.count + assert_equal 2, fc.versions.count + v_count = Version.from_partition(t.id).where(associated_type: 'ProjectMedia', associated_id: pm.id, item_type: ['ClaimDescription', 'FactCheck']).count + assert_equal 5, v_count + # Add existing FactCheck to another media + cd.project_media_id = pm2.id + cd.save! + assert_equal 4, cd.versions.count + assert_equal 2, fc.versions.count + # Old item logs + v_count = Version.from_partition(t.id).where(associated_type: 'ProjectMedia', associated_id: pm.id, item_type: ['ClaimDescription', 'FactCheck']).count + assert_equal 2, v_count + # New item logs + v_count = Version.from_partition(t.id).where(associated_type: 'ProjectMedia', associated_id: pm2.id, item_type: ['ClaimDescription', 'FactCheck']).count + assert_equal 4, v_count end end end @@ -133,4 +157,44 @@ def setup pm.destroy! end end + + test "should replace item when applying fact-check from blank media" do + Sidekiq::Testing.inline! + t = create_team + pm1 = create_project_media team: t, media: create_blank_media + cd = create_claim_description project_media: pm1 + fc = create_fact_check claim_description: cd + pm2 = create_project_media team: t + cd.project_media = pm2 + assert_difference 'ProjectMedia.count', -1 do + cd.save! + end + assert_nil ProjectMedia.find_by_id(pm1.id) + assert_equal fc, pm2.fact_check + end + + test "should pause report when removing fact-check" do + Sidekiq::Testing.inline! + t = create_team + pm = create_project_media team: t + cd = create_claim_description project_media: pm + fc = create_fact_check claim_description: cd + + publish_report(pm) + assert_equal 'published', fc.reload.report_status + assert_equal 'published', pm.report_status(true) + + cd.project_media = nil + cd.save! + assert_equal 'paused', fc.reload.report_status + assert_equal 'paused', pm.report_status(true) + end + + test "should get information from removed item" do + pm = create_project_media + cd = create_claim_description project_media: pm + cd.project_media = nil + cd.save! + assert_equal pm, cd.project_media_was + end end diff --git a/test/models/cluster_project_media_test.rb b/test/models/cluster_project_media_test.rb index dd71524ee2..364fc5a82a 100644 --- a/test/models/cluster_project_media_test.rb +++ b/test/models/cluster_project_media_test.rb @@ -7,8 +7,10 @@ def setup end test "should create cluster project media" do + c = create_cluster + pm = create_project_media assert_difference 'ClusterProjectMedia.count' do - create_cluster_project_media + create_cluster_project_media cluster: c, project_media: pm end end diff --git a/test/models/cluster_test.rb b/test/models/cluster_test.rb index 2486a18726..11fc9988d4 100644 --- a/test/models/cluster_test.rb +++ b/test/models/cluster_test.rb @@ -19,10 +19,11 @@ def setup end test "should have items" do - c = create_cluster + pm = create_project_media + c = create_cluster project_media: pm pm1 = create_project_media cluster: c pm2 = create_project_media cluster: c - assert_equal [pm1, pm2].sort, c.reload.items.sort + assert_equal [pm, pm1, pm2].sort, c.reload.items.sort end test "should access cluster" do @@ -69,8 +70,8 @@ def setup test "should return size" do c = create_cluster - assert_equal 0, c.size - c.project_medias << create_project_media assert_equal 1, c.size + c.project_medias << create_project_media + assert_equal 2, c.size end end diff --git a/test/models/explainer_item_test.rb b/test/models/explainer_item_test.rb new file mode 100644 index 0000000000..39fff5a3c9 --- /dev/null +++ b/test/models/explainer_item_test.rb @@ -0,0 +1,112 @@ +require_relative '../test_helper' + +class ExplainerItemTest < ActiveSupport::TestCase + def setup + @t = create_team + @pm = create_project_media(team: @t, media: create_claim_media(quote: 'Test')) + @ex = create_explainer(team: @t) + end + + def teardown + end + + test "should create explainer item" do + assert_difference 'ExplainerItem.count' do + ExplainerItem.create! explainer: @ex, project_media: @pm + end + end + + test "should be associated with explainers" do + assert_difference 'ExplainerItem.count' do + @ex.project_medias << @pm + end + assert_equal 1, @ex.project_medias.count + end + + test "should be associated with items" do + assert_difference 'ExplainerItem.count' do + @pm.explainers << @ex + end + assert_equal 1, @pm.explainers.count + end + + test "should not create explainer item without mandatory fields" do + ei = ExplainerItem.new + assert_not ei.valid? + ei = ExplainerItem.new project_media: @pm + assert_not ei.valid? + ei = ExplainerItem.new explainer: @ex + assert_not ei.valid? + ei = ExplainerItem.new project_media: @pm, explainer: @ex + assert ei.valid? + end + + test "should not create associate explainer and item from different workspaces" do + t1 = create_team + e1 = create_explainer team: t1 + pm1 = create_project_media team: t1 + t2 = create_team + e2 = create_explainer team: t2 + pm2 = create_project_media team: t2 + assert ExplainerItem.new(project_media: pm1, explainer: e1).valid? + assert ExplainerItem.new(project_media: pm2, explainer: e2).valid? + assert_not ExplainerItem.new(project_media: pm1, explainer: e2).valid? + assert_not ExplainerItem.new(project_media: pm2, explainer: e1).valid? + end + + test "should have versions" do + with_versioning do + u = create_user + t = create_team + create_team_user team: t, user: u, role: 'admin' + e = create_explainer team: t + pm = create_project_media team: t + with_current_user_and_team(u, t) do + assert_difference 'PaperTrail::Version.count', 1 do + pm.explainers << e + end + ei = ExplainerItem.where(project_media_id: pm.id, explainer_id: e.id).last + assert_equal 1, ei.versions.count + assert_difference 'PaperTrail::Version.count', 1 do + ei.destroy + end + end + end + end + + test "should have permission to create explainer item" do + t1 = create_team + u1 = create_user + create_team_user user: u1, team: t1 + e1 = create_explainer team: t1 + pm1 = create_project_media team: t1 + + t2 = create_team + u2 = create_user + create_team_user user: u2, team: t2 + e2 = create_explainer team: t2 + pm2 = create_project_media team: t2 + + with_current_user_and_team u1, t1 do + assert_difference 'ExplainerItem.count' do + pm1.explainers << e1 + end + assert_no_difference 'ExplainerItem.count' do + assert_raises RuntimeError do # Permission error + pm2.explainers << e2 + end + end + end + + with_current_user_and_team u2, t2 do + assert_no_difference 'ExplainerItem.count' do + assert_raises RuntimeError do # Permission error + pm1.explainers << e1 + end + end + assert_difference 'ExplainerItem.count' do + pm2.explainers << e2 + end + end + end +end diff --git a/test/models/explainer_test.rb b/test/models/explainer_test.rb index bae8ccbf47..a902379b6c 100644 --- a/test/models/explainer_test.rb +++ b/test/models/explainer_test.rb @@ -11,21 +11,6 @@ def setup end end - test "should have versions" do - with_versioning do - u = create_user - t = create_team - create_team_user team: t, user: u, role: 'admin' - with_current_user_and_team(u, t) do - ex = nil - assert_difference 'PaperTrail::Version.count', 1 do - ex = create_explainer user: u, team: t - end - assert_equal 1, ex.versions.count - end - end - end - test "should not create explainer without user or team" do assert_no_difference 'Explainer.count' do assert_raises ActiveRecord::RecordInvalid do @@ -91,9 +76,48 @@ def setup end end - test "should tag explainer" do + test "should tag explainer using annotation" do ex = create_explainer tag = create_tag annotated: ex assert_equal [tag], ex.annotations('tag') end + + test "should create tag texts when setting tags" do + Sidekiq::Testing.inline! do + assert_difference 'TagText.count' do + create_explainer tags: ['foo'] + end + end + end + + test "should index explainer information" do + Sidekiq::Testing.inline! + description = %{ + The is the first paragraph. + + This is the second paragraph. + } + + # Index two paragraphs and title when the explainer is created + Bot::Alegre.stubs(:request).with('post', '/text/similarity/', anything).times(3) + Bot::Alegre.stubs(:request).with('delete', '/text/similarity/', anything).never + ex = create_explainer description: description + + # Update the index when paragraphs change + Bot::Alegre.stubs(:request).with('post', '/text/similarity/', anything).times(2) + Bot::Alegre.stubs(:request).with('delete', '/text/similarity/', anything).once + ex = Explainer.find(ex.id) + ex.description = 'Now this is the only paragraph' + ex.save! + end + + test "should destroy explainer items when project media is destroyed" do + t = create_team + ex = create_explainer team: t + pm = create_project_media team: t + pm.explainers << ex + assert_difference 'ExplainerItem.count', -1 do + pm.destroy! + end + end end diff --git a/test/models/fact_check_test.rb b/test/models/fact_check_test.rb index b915afa435..670cd0e599 100644 --- a/test/models/fact_check_test.rb +++ b/test/models/fact_check_test.rb @@ -294,6 +294,41 @@ def setup assert_not_empty fc.reload.title end + test "should validate rating" do + assert_no_difference 'FactCheck.count' do + assert_raises ActiveRecord::RecordInvalid do + create_fact_check rating: 'invalid_status' + end + end + assert_difference 'FactCheck.count' do + create_fact_check rating: 'verified' + end + # Validate custom status + t = create_team + value = { + label: 'Status', + default: 'stop', + active: 'done', + statuses: [ + { id: 'stop', label: 'Stopped', completed: '', description: 'Not started yet', style: { backgroundColor: '#a00' } }, + { id: 'done', label: 'Done!', completed: '', description: 'Nothing left to be done here', style: { backgroundColor: '#fc3' } } + ] + } + t.send :set_media_verification_statuses, value + t.save! + pm = create_project_media team: t + cd = create_claim_description project_media: pm + assert_no_difference 'FactCheck.count' do + assert_raises ActiveRecord::RecordInvalid do + create_fact_check claim_description: cd, rating: 'invalid_status' + end + end + allowed_statuses = t.reload.verification_statuses('media', nil)['statuses'].collect{|s| s[:id]} + assert_difference 'FactCheck.count' do + create_fact_check claim_description: cd, rating: 'stop' + end + end + test "should create many fact-checks without signature" do assert_difference 'FactCheck.count', 2 do create_fact_check signature: nil @@ -373,4 +408,144 @@ def setup assert_equal 'published', pm.reload.report_status end end + + test "should index report information in fact check" do + create_verification_status_stuff + t = create_team + u = create_user + create_team_user team: t, user: u, role: 'admin' + RequestStore.store[:skip_cached_field_update] = false + Sidekiq::Testing.inline! do + with_current_user_and_team(u, t) do + pm = create_project_media team: t + cd = create_claim_description project_media: pm + s = pm.last_verification_status_obj + s.status = 'verified' + s.save! + r = publish_report(pm) + fc = cd.fact_check + fc.title = 'Foo Bar' + fc.save! + fc = fc.reload + assert_equal u.id, fc.publisher_id + assert_equal 'published', fc.report_status + assert_equal 'verified', fc.rating + # Verify fact-checks filter + filters = { publisher_ids: [u.id] } + assert_equal [fc.id], t.filtered_fact_checks(filters).map(&:id) + filters = { rating: ['verified'] } + assert_equal [fc.id], t.filtered_fact_checks(filters).map(&:id) + filters = { report_status: ['published'] } + assert_equal [fc.id], t.filtered_fact_checks(filters).map(&:id) + filters = { publisher_ids: [u.id], rating: ['verified'], report_status: ['published'] } + assert_equal [fc.id], t.filtered_fact_checks(filters).map(&:id) + r = Dynamic.find(r.id) + r.set_fields = { state: 'paused' }.to_json + r.action = 'pause' + r.save! + fc = fc.reload + assert_nil fc.publisher_id + assert_equal 'paused', fc.report_status + assert_equal 'verified', fc.rating + s.status = 'in_progress' + s.save! + assert_equal 'in_progress', fc.reload.rating + # Verify fact-checks filter + filters = { publisher_ids: [u.id] } + assert_empty t.filtered_fact_checks(filters).map(&:id) + filters = { rating: ['verified'] } + assert_empty t.filtered_fact_checks(filters).map(&:id) + filters = { report_status: ['published'] } + assert_empty t.filtered_fact_checks(filters).map(&:id) + filters = { rating: ['in_progress'], report_status: ['paused'] } + assert_equal [fc.id], t.filtered_fact_checks(filters).map(&:id) + # Verify text filter + filters = { text: 'Test' } + assert_empty t.filtered_fact_checks(filters).map(&:id) + filters = { text: 'Foo' } + assert_equal [fc.id], t.filtered_fact_checks(filters).map(&:id) + # Update item status based on factcheck rating + fc.rating = 'verified' + fc.save! + s = pm.reload.last_verification_status_obj + assert_equal 'verified', s.status + end + end + end + + test "should set fact-check as imported" do + assert !create_fact_check(user: create_user).imported + assert create_fact_check(user: create_bot_user).imported + end + + test "should set initial rating" do + create_verification_status_stuff + + # Test core statuses first + t = create_team + pm = create_project_media team: t + cd = create_claim_description project_media: pm + fc = create_fact_check claim_description: cd + assert_equal 'undetermined', fc.reload.rating + fc.rating = 'in_progress' + fc.save! + assert_equal 'in_progress', pm.reload.last_status + + # Test custom statuses now + t = create_team + value = { + "label": "Custom Status Label", + "active": "in_progress", + "default": "new", + "statuses": [ + { + "id": "new", + "style": { + "color": "blue" + }, + "locales": { + "en": { + "label": "New", + "description": "An item that did not start yet" + }, + "pt": { + "label": "Novo", + "description": "Um item que ainda não começou a ser verificado" + } + } + }, + { + "id": "in_progress", + "style": { + "color": "yellow" + }, + "locales": { + "en": { + "label": "Working on it", + "description": "We are working on it" + }, + "pt": { + "label": "Estamos trabalhando nisso", + "description": "Estamos trabalhando nisso" + } + } + } + ] + } + t.set_media_verification_statuses(value) + t.save! + + pm = create_project_media team: t + cd = create_claim_description project_media: pm + fc = create_fact_check claim_description: cd + assert_equal 'new', fc.reload.rating + fc.rating = 'in_progress' + fc.save! + assert_equal 'in_progress', pm.reload.last_status + end + + test "should have team" do + fc = create_fact_check + assert_not_nil fc.team + end end diff --git a/test/models/project_media_6_test.rb b/test/models/project_media_6_test.rb index d4380406c5..921e03dcfa 100644 --- a/test/models/project_media_6_test.rb +++ b/test/models/project_media_6_test.rb @@ -484,4 +484,13 @@ def setup pms = ProjectMedia.where(team: t).to_a assert_queries(1, '=') { pms.map(&:team_avatar) } end + + test "should return fact-check" do + pm = create_project_media + assert_nil pm.fact_check + cd = create_claim_description project_media: pm + assert_nil pm.fact_check + fc = create_fact_check claim_description: cd + assert_equal fc, pm.fact_check + end end diff --git a/test/models/tag_test.rb b/test/models/tag_test.rb index 51e51d6f6b..0439f55832 100644 --- a/test/models/tag_test.rb +++ b/test/models/tag_test.rb @@ -271,4 +271,17 @@ def setup tt2.delete TagText.update_tags(tt1.id, t.id, tt2.id) end + + test "should treat ' tag' and 'tag' as the same tag, and not try to create a new tag" do + t = create_team + p = create_project team: t + pm1 = create_project_media project: p + pm2 = create_project_media project: p + + create_tag tag: 'foo', annotated: pm1 + + assert_nothing_raised do + create_tag tag: ' foo', annotated: pm2 + end + end end diff --git a/test/models/team_test.rb b/test/models/team_test.rb index f480761155..fabbc09b52 100644 --- a/test/models/team_test.rb +++ b/test/models/team_test.rb @@ -1219,7 +1219,7 @@ def setup t.save! u = create_user create_team_user team: t, user: u, role: 'admin' - with_current_user_and_team(u, t) do + with_current_user_and_team(u, t) do pm = create_project_media team: t, disable_es_callbacks: false cd = create_claim_description project_media: pm, disable_es_callbacks: false fc = create_fact_check claim_description: cd, language: 'fr' @@ -1258,4 +1258,9 @@ def setup assert_equal ['en'], result['fact_check_languages'] end end + + test "should return no team fact-checks by default" do + t = create_team + assert_equal [], t.fact_checks.to_a + end end diff --git a/test/sum-upload-coverage.sh b/test/sum-upload-coverage.sh index 402caaaa43..6ab001acf2 100755 --- a/test/sum-upload-coverage.sh +++ b/test/sum-upload-coverage.sh @@ -1,9 +1,9 @@ #!/bin/bash -if [[ "$TRAVIS_PULL_REQUEST" == "false" ]] && [[ $TRAVIS_TEST_RESULT == 0 ]] +if [[ "$GITHUB_EVENT" == "pull_request" || "$GITHUB_EVENT" == "push" ]] && [[ "$GITHUB_TEST_RESULT" == 'success' ]] then rm -rf ../coverage/* - aws s3 cp --recursive s3://check-api-travis/codeclimate/$TRAVIS_REPO_SLUG/$TRAVIS_BUILD_NUMBER/ ../coverage + aws s3 cp --recursive s3://check-api-github/codeclimate/$GITHUB_REPO/$GITHUB_BUILD_NUMBER/ ../coverage if [[ $(ls ../coverage/codeclimate.* | wc -l) -eq 3 ]] then # Make sure we are not dealing with a file that is still being uploaded @@ -15,7 +15,7 @@ then sleep 5 size=$(du -s ../coverage/ | cut -f1) done - ./cc-test-reporter sum-coverage --output - --parts 3 ../coverage/codeclimate.* | sed 's/\/home\/travis\/build\/meedan\/check-api\///g' > ../coverage/codeclimate.json + ./cc-test-reporter sum-coverage --output - --parts 3 ../coverage/codeclimate.* | sed 's/\/home\/runner\/work\/check-api\///g' > ../coverage/codeclimate.json cat ../coverage/codeclimate.json | ./cc-test-reporter upload-coverage --input - ./cc-test-reporter show-coverage ../coverage/codeclimate.json fi