diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml
index 9a35e688f9..d5126ae9ed 100644
--- a/.github/workflows/ci-tests.yml
+++ b/.github/workflows/ci-tests.yml
@@ -7,9 +7,12 @@ on:
branches:
- master
- develop
+ - epic*
+ - cv2*
pull_request:
branches:
- develop
+
env:
CC_TEST_REPORTER_ID: "${{ secrets.CC_TEST_REPORTER_ID }}"
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index f93c8f6e5c..719769d756 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -36,9 +36,10 @@ deploy_qa:
AWS_DEFAULT_REGION: $AWS_DEFAULT_REGION
GITHUB_TOKEN: $GITHUB_TOKEN
script:
- - pip install botocore==1.33.13
- - pip install boto3==1.33.13
- - pip install ecs-deploy==1.14.0
+ - pip install urllib3==2.0.6
+ - pip install botocore==1.31.62
+ - pip install boto3==1.28.62
+ - pip install ecs-deploy==1.15.0
- pip install awscli==1.31.13
- alias aws='docker run -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_DEFAULT_REGION --rm amazon/aws-cli'
- aws ssm get-parameters-by-path --region $AWS_DEFAULT_REGION --path /qa/check-api/ --recursive --with-decryption --output text --query "Parameters[].[Name]" | sed -E 's#/qa/check-api/##' > env.qa.names
@@ -106,9 +107,10 @@ deploy_live:
AWS_DEFAULT_REGION: $AWS_DEFAULT_REGION
GITHUB_TOKEN: $GITHUB_TOKEN
script:
- - pip install botocore==1.33.13
- - pip install boto3==1.33.13
- - pip install ecs-deploy==1.14.0
+ - pip install urllib3==2.0.6
+ - pip install botocore==1.31.62
+ - pip install boto3==1.28.62
+ - pip install ecs-deploy==1.15.0
- pip install awscli==1.31.13
- alias aws='docker run -e AWS_ACCESS_KEY_ID -e AWS_SECRET_ACCESS_KEY -e AWS_DEFAULT_REGION --rm amazon/aws-cli'
- aws ssm get-parameters-by-path --region $AWS_DEFAULT_REGION --path /live/check-api/ --recursive --with-decryption --output text --query "Parameters[].[Name]" | sed -E 's#/live/check-api/##' > env.live.names
diff --git a/.rubocop.yml b/.rubocop.yml
index d9bb8d820f..2ff1fbf74d 100644
--- a/.rubocop.yml
+++ b/.rubocop.yml
@@ -224,7 +224,7 @@ Metrics/CyclomaticComplexity:
A complexity metric that is strongly correlated to the number
of test cases needed to validate a method.
Enabled: true
- Max: 12
+ Max: 13
Metrics/LineLength:
Description: 'Limit lines to 80 characters.'
diff --git a/app/graph/mutations/export_mutations.rb b/app/graph/mutations/export_mutations.rb
new file mode 100644
index 0000000000..a3dde76a1b
--- /dev/null
+++ b/app/graph/mutations/export_mutations.rb
@@ -0,0 +1,24 @@
+module ExportMutations
+ class ExportList < Mutations::BaseMutation
+ argument :query, GraphQL::Types::String, required: true # JSON
+ argument :type, GraphQL::Types::String, required: true # 'media', 'feed', 'fact-check' or 'explainer'
+
+ field :success, GraphQL::Types::Boolean, null: true
+
+ def resolve(query:, type:)
+ ability = context[:ability]
+ team = Team.find_if_can(Team.current.id, ability)
+ if ability.cannot?(:export_list, team)
+ { success: false }
+ else
+ export = ListExport.new(type.to_sym, query, team.id)
+ if export.number_of_rows > CheckConfig.get(:export_csv_maximum_number_of_results, 10000, :integer)
+ { success: false }
+ else
+ export.generate_csv_and_send_email_in_background(User.current)
+ { success: true }
+ end
+ end
+ end
+ end
+end
diff --git a/app/graph/types/mutation_type.rb b/app/graph/types/mutation_type.rb
index a87ea5528c..fbf56730b4 100644
--- a/app/graph/types/mutation_type.rb
+++ b/app/graph/types/mutation_type.rb
@@ -152,4 +152,6 @@ class MutationType < BaseObject
field :createExplainerItem, mutation: ExplainerItemMutations::Create
field :destroyExplainerItem, mutation: ExplainerItemMutations::Destroy
+
+ field :exportList, mutation: ExportMutations::ExportList
end
diff --git a/app/graph/types/project_group_type.rb b/app/graph/types/project_group_type.rb
index aad98569c2..bdcd6e0a34 100644
--- a/app/graph/types/project_group_type.rb
+++ b/app/graph/types/project_group_type.rb
@@ -8,7 +8,6 @@ class ProjectGroupType < DefaultObject
field :description, GraphQL::Types::String, null: true
field :team_id, GraphQL::Types::Int, null: true
field :team, PublicTeamType, null: true
- field :medias_count, GraphQL::Types::Int, null: true
field :projects, ProjectType.connection_type, null: true
end
diff --git a/app/graph/types/project_type.rb b/app/graph/types/project_type.rb
index ab39118c35..ed215af54d 100644
--- a/app/graph/types/project_type.rb
+++ b/app/graph/types/project_type.rb
@@ -9,7 +9,6 @@ class ProjectType < DefaultObject
field :dbid, GraphQL::Types::Int, null: true
field :permissions, GraphQL::Types::String, null: true
field :pusher_channel, GraphQL::Types::String, null: true
- field :medias_count, GraphQL::Types::Int, null: true
field :search_id, GraphQL::Types::String, null: true
field :url, GraphQL::Types::String, null: true
field :search, CheckSearchType, null: true
diff --git a/app/lib/check_config.rb b/app/lib/check_config.rb
index fe34a355c9..764f747063 100644
--- a/app/lib/check_config.rb
+++ b/app/lib/check_config.rb
@@ -3,6 +3,7 @@
class CheckConfig
def self.get(key, default = nil, type = nil)
+ key = key.to_s
value = ENV[key]
value ||= CONFIG[key] if CONFIG.has_key?(key)
return default if value.nil?
diff --git a/app/mailers/export_list_mailer.rb b/app/mailers/export_list_mailer.rb
new file mode 100644
index 0000000000..c9fed76b02
--- /dev/null
+++ b/app/mailers/export_list_mailer.rb
@@ -0,0 +1,13 @@
+class ExportListMailer < ApplicationMailer
+ layout nil
+
+ def send_csv(csv_file_url, user)
+ @csv_file_url = csv_file_url
+ @user = user
+ expire_in = Time.now.to_i + CheckConfig.get('export_csv_expire', 7.days.to_i, :integer)
+ @expire_in = I18n.l(Time.at(expire_in), format: :email)
+ subject = I18n.t('mails_notifications.export_list.subject')
+ Rails.logger.info "Sending export e-mail to #{@user.email}"
+ mail(to: @user.email, email_type: 'export_list', subject: subject)
+ end
+end
diff --git a/app/models/ability.rb b/app/models/ability.rb
index 42d45f7fa6..1db39e2c31 100644
--- a/app/models/ability.rb
+++ b/app/models/ability.rb
@@ -57,7 +57,7 @@ def admin_perms
can :destroy, Team, :id => @context_team.id
can :create, TeamUser, :team_id => @context_team.id, role: ['admin']
can [:update, :destroy], TeamUser, team_id: @context_team.id
- can :duplicate, Team, :id => @context_team.id
+ can [:duplicate, :export_list], Team, :id => @context_team.id
can :set_privacy, Project, :team_id => @context_team.id
can :read_feed_invitations, Feed, :team_id => @context_team.id
can :destroy, Feed, :team_id => @context_team.id
diff --git a/app/models/annotations/annotation.rb b/app/models/annotations/annotation.rb
index 58bbe9cb89..e8274d62e5 100644
--- a/app/models/annotations/annotation.rb
+++ b/app/models/annotations/annotation.rb
@@ -17,9 +17,11 @@ def destroy
dec = self.disable_es_callbacks
skip_ability = self.skip_check_ability
a = self.load
- a.disable_es_callbacks = dec
- a.skip_check_ability = skip_ability
- a.destroy
+ unless a.nil?
+ a.disable_es_callbacks = dec
+ a.skip_check_ability = skip_ability
+ a.destroy
+ end
end
private
diff --git a/app/models/bot/smooch.rb b/app/models/bot/smooch.rb
index 9177a69039..765a8363c7 100644
--- a/app/models/bot/smooch.rb
+++ b/app/models/bot/smooch.rb
@@ -542,7 +542,11 @@ def self.process_menu_option_value(value, option, message, language, workflow, a
end
def self.is_a_shortcut_for_submission?(state, message)
- self.is_v2? && (state == 'main' || state == 'waiting_for_message') && (!message['mediaUrl'].blank? || ::Bot::Alegre.get_number_of_words(message['text'].to_s) > CheckConfig.get('min_number_of_words_for_tipline_submit_shortcut', 10, :integer))
+ self.is_v2? && (state == 'main' || state == 'waiting_for_message') && (
+ !message['mediaUrl'].blank? ||
+ ::Bot::Alegre.get_number_of_words(message['text'].to_s) > CheckConfig.get('min_number_of_words_for_tipline_submit_shortcut', 10, :integer) ||
+ !Twitter::TwitterText::Extractor.extract_urls(message['text'].to_s).blank? # URL in message?
+ )
end
def self.process_menu_option(message, state, app_id)
diff --git a/app/models/explainer.rb b/app/models/explainer.rb
index a4319e718a..5b55a57694 100644
--- a/app/models/explainer.rb
+++ b/app/models/explainer.rb
@@ -48,6 +48,14 @@ def update_paragraphs_in_alegre
self.class.delay_for(5.seconds).update_paragraphs_in_alegre(self.id, previous_paragraphs_count, Time.now.to_f)
end
+ def self.get_exported_data(query, team)
+ data = [['ID', 'Title', 'Description', 'URL', 'Language']]
+ team.filtered_explainers(query).find_each do |exp|
+ data << [exp.id, exp.title, exp.description, exp.url, exp.language]
+ end
+ data
+ end
+
def self.update_paragraphs_in_alegre(id, previous_paragraphs_count, timestamp)
explainer = Explainer.find(id)
diff --git a/app/models/fact_check.rb b/app/models/fact_check.rb
index 5d42782496..5830494615 100644
--- a/app/models/fact_check.rb
+++ b/app/models/fact_check.rb
@@ -47,6 +47,14 @@ def update_item_status
end
end
+ def self.get_exported_data(query, team)
+ data = [['ID', 'Title', 'Summary', 'URL', 'Language', 'Report Status', 'Imported?']]
+ team.filtered_fact_checks(query).find_each do |fc|
+ data << [fc.id, fc.title, fc.summary, fc.url, fc.language, fc.report_status, fc.imported.to_s]
+ end
+ data
+ end
+
private
def set_language
diff --git a/app/models/feed.rb b/app/models/feed.rb
index 05a652024f..e90c2c2d84 100755
--- a/app/models/feed.rb
+++ b/app/models/feed.rb
@@ -172,6 +172,14 @@ def saved_search_was
SavedSearch.find_by_id(self.saved_search_id_before_last_save)
end
+ def get_exported_data(filters)
+ data = [['Title', 'Number of media', 'Number of requests', 'Number of fact-checks']]
+ self.filtered_clusters(filters).find_each do |cluster|
+ data << [cluster.title, cluster.media_count, cluster.requests_count, cluster.fact_checks_count]
+ end
+ data
+ end
+
# This takes some time to run because it involves external HTTP requests and writes to the database:
# 1) If the query contains a media URL, it will be downloaded... if it contains some other URL, it will be sent to Pender
# 2) Requests will be made to Alegre in order to index the request media and to look for similar requests
diff --git a/app/models/project.rb b/app/models/project.rb
index 4d579bccfb..26c669a91e 100644
--- a/app/models/project.rb
+++ b/app/models/project.rb
@@ -40,41 +40,6 @@ class Project < ApplicationRecord
check_settings
- cached_field :medias_count,
- start_as: 0,
- recalculate: :recalculate_medias_count,
- update_on: [
- {
- model: Relationship,
- affected_ids: proc { |r| ProjectMedia.where(id: r.target_id).map(&:project_id) },
- events: {
- save: :recalculate,
- destroy: :recalculate
- }
- },
- {
- model: ProjectMedia,
- if: proc { |pm| !pm.project_id.nil? },
- affected_ids: proc { |pm| [pm.project_id] },
- events: {
- create: :recalculate,
- destroy: :recalculate
- }
- },
- {
- model: ProjectMedia,
- if: proc { |pm| pm.saved_change_to_archived? || pm.saved_change_to_project_id? },
- affected_ids: proc { |pm| [pm.project_id, pm.project_id_before_last_save] },
- events: {
- update: :recalculate,
- }
- },
- ]
-
- def recalculate_medias_count
- self.team.medias_count(self)
- end
-
def check_search_team
self.team.check_search_team
end
diff --git a/app/models/project_group.rb b/app/models/project_group.rb
index 6b5c767b04..78303a9f6e 100644
--- a/app/models/project_group.rb
+++ b/app/models/project_group.rb
@@ -7,10 +7,6 @@ class ProjectGroup < ApplicationRecord
belongs_to :team, optional: true
has_many :projects, dependent: :nullify
- def medias_count
- self.projects.map(&:medias_count).sum
- end
-
def project_medias
ProjectMedia.joins(:project).where('projects.project_group_id' => self.id)
end
diff --git a/app/models/team.rb b/app/models/team.rb
index a3d8572e51..be43ee2c7b 100644
--- a/app/models/team.rb
+++ b/app/models/team.rb
@@ -532,7 +532,7 @@ def filtered_fact_checks(filters = {})
query = query.where('fact_checks.imported' => !!filters[:imported]) unless filters[:imported].nil?
# Filter by report status
- query = query.where('fact_checks.report_status' => filters[:report_status].to_a.map(&:to_s)) unless filters[:report_status].blank?
+ query = query.where('fact_checks.report_status' => [filters[:report_status]].flatten.map(&:to_s)) unless filters[:report_status].blank?
# Filter by text
query = self.filter_by_keywords(query, filters) if filters[:text].to_s.size > 2
@@ -545,11 +545,11 @@ def filtered_fact_checks(filters = {})
end
def filter_by_keywords(query, filters, type = 'FactCheck')
- tsquery = Team.sanitize_sql_array(["websearch_to_tsquery(?)", filters[:text]]) # FIXME: May not work for all languages
+ tsquery = Team.sanitize_sql_array(["websearch_to_tsquery(?)", filters[:text]])
if type == 'FactCheck'
- tsvector = "to_tsvector('simple', coalesce(title, '') || ' ' || coalesce(summary, '') || coalesce(url, ''))"
+ tsvector = "to_tsvector('simple', coalesce(title, '') || ' ' || coalesce(summary, '') || ' ' || coalesce(url, '') || ' ' || coalesce(claim_descriptions.description, '') || ' ' || coalesce(claim_descriptions.context, ''))"
else
- tsvector = "to_tsvector('simple', coalesce(title, '') || ' ' || coalesce(description, '') || coalesce(url, ''))"
+ tsvector = "to_tsvector('simple', coalesce(title, '') || ' ' || coalesce(description, '') || ' ' || coalesce(url, ''))"
end
query.where(Arel.sql("#{tsvector} @@ #{tsquery}"))
end
diff --git a/app/views/export_list_mailer/send_csv.html.erb b/app/views/export_list_mailer/send_csv.html.erb
new file mode 100644
index 0000000000..081169fd8c
--- /dev/null
+++ b/app/views/export_list_mailer/send_csv.html.erb
@@ -0,0 +1,130 @@
+<%= render "shared/header" %>
+
+
+
+
+
+
+
+
+
+
+
+
+
+ <%= I18n.t(:"mails_notifications.export_list.hello", name: @user.name) %>
+
+
+
+
+
+
+ <%= I18n.t(:"mails_notifications.export_list.body") %>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ <%=
+ link_to(I18n.t('mails_notifications.export_list.button_label'),
+ @csv_file_url,
+ :style => "text-decoration: none !important;color: #fff !important;"
+ )
+ %>
+
+ |
+
+ <%= image_tag("https://images.ctfassets.net/g118h5yoccvd/#{@direction[:arrow]}", width: "7", alt: "arrow-icon", style: "-ms-interpolation-mode: bicubic; border: 0 none; height: auto; line-height: 100%; outline: none; text-decoration: none;") %>
+ |
+
+
+ |
+
+
+
+
+
+
+
+ <%= I18n.t(:"mails_notifications.export_list.footer", date: @expire_in) %>
+
+
+
+
+
+
+<%= render "shared/footer" %>
diff --git a/app/views/export_list_mailer/send_csv.text.erb b/app/views/export_list_mailer/send_csv.text.erb
new file mode 100644
index 0000000000..428c639142
--- /dev/null
+++ b/app/views/export_list_mailer/send_csv.text.erb
@@ -0,0 +1,14 @@
+<%= I18n.t('mails_notifications.export_list.hello', name: @user.name) %>
+
+<%= I18n.t('mails_notifications.export_list.subject') %>
+
+<%= I18n.t('mails_notifications.export_list.body') %>
+
+<%= I18n.t('mails_notifications.export_list.button_label') %>: <%= @csv_file_url %>
+
+<%= I18n.t('mails_notifications.export_list.footer', date: @expire_in ) %>
+
+...
+
+<%= strip_tags I18n.t("mails_notifications.copyright_html", app_name: CheckConfig.get('app_name')) %>
+https://meedan.com
diff --git a/config/config.yml.example b/config/config.yml.example
index e14ec0ee77..44749ca977 100644
--- a/config/config.yml.example
+++ b/config/config.yml.example
@@ -160,6 +160,7 @@ development: &default
smtp_user: # ''
smtp_pass: # ''
smtp_default_url_host: 'http://localhost:3333' # Used to construct URLs for links in email
+ smtp_mailcatcher_host: # 'host.docker.internal'
# Pusher notification service https://pusher.com/channels
#
@@ -262,7 +263,7 @@ development: &default
otel_traces_sampler:
otel_custom_sampling_rate:
- # Rate limits for tiplines
+ # Limits
#
# OPTIONAL
# When not set, default values are used.
@@ -270,12 +271,18 @@ development: &default
tipline_user_max_messages_per_day: 1500
nlu_global_rate_limit: 100
nlu_user_rate_limit: 30
-
devise_maximum_attempts: 5
devise_unlock_accounts_after: 1
login_rate_limit: 10
api_rate_limit: 100
+ export_csv_maximum_number_of_results: 10000
+ export_csv_expire: 604800 # Seconds: Default is 7 days
+ # Session
+ #
+ # OPTIONAL
+ # When not set, default values are used.
+ #
session_store_key: '_checkdesk_session_dev'
session_store_domain: 'localhost'
test:
diff --git a/config/environments/development.rb b/config/environments/development.rb
index b8924f73e8..3f54f6587f 100644
--- a/config/environments/development.rb
+++ b/config/environments/development.rb
@@ -83,4 +83,12 @@
else
puts '[WARNING] config.hosts not provided. Only requests from localhost are allowed. To change, update `whitelisted_hosts` in config.yml'
end
+
+ mailcatcher_host = ENV['smtp_mailcatcher_host'] || cfg['smtp_mailcatcher_host']
+ unless mailcatcher_host.blank?
+ config.action_mailer.smtp_settings = {
+ address: mailcatcher_host,
+ port: 1025
+ }
+ end
end
diff --git a/config/initializers/plugins.rb b/config/initializers/plugins.rb
index 056a5b61a5..b928f936ff 100644
--- a/config/initializers/plugins.rb
+++ b/config/initializers/plugins.rb
@@ -1,2 +1,2 @@
# Load classes on boot, in production, that otherwise wouldn't be auto-loaded by default
-CcDeville && Bot::Keep && Workflow::Workflow.workflows && CheckS3 && Bot::Tagger && Bot::Fetch && Bot::Smooch && Bot::Slack && Bot::Alegre && CheckChannels && RssFeed && UrlRewriter && ClusterTeam
+CcDeville && Bot::Keep && Workflow::Workflow.workflows && CheckS3 && Bot::Tagger && Bot::Fetch && Bot::Smooch && Bot::Slack && Bot::Alegre && CheckChannels && RssFeed && UrlRewriter && ClusterTeam && ListExport
diff --git a/config/locales/en.yml b/config/locales/en.yml
index aa441378fc..e947f62ab1 100644
--- a/config/locales/en.yml
+++ b/config/locales/en.yml
@@ -476,6 +476,12 @@ en:
constitutes acceptance of our updated Terms of Service.
term_button: Terms of Service
more_info: This is a one-time required legal notice sent to all Check users, even those who have unsubscribed by optional announcements.
+ export_list:
+ hello: Hello %{name}
+ subject: Check Data Export
+ body: Your requested Check data export is available to download.
+ button_label: Download Export
+ footer: This download link will expire on %{date}.
mail_security:
device_subject: 'Security alert: New login to %{app_name} from %{browser} on %{platform}'
ip_subject: 'Security alert: New or unusual %{app_name} login'
diff --git a/lib/check_s3.rb b/lib/check_s3.rb
index af6d3c8288..7989cefbb4 100644
--- a/lib/check_s3.rb
+++ b/lib/check_s3.rb
@@ -65,4 +65,13 @@ def self.delete(*paths)
client = Aws::S3::Client.new
client.delete_objects(bucket: CheckConfig.get('storage_bucket'), delete: { objects: objects })
end
+
+ def self.write_presigned(path, content_type, content, expires_in)
+ self.write(path, content_type, content)
+ bucket = CheckConfig.get('storage_bucket')
+ client = Aws::S3::Client.new
+ s3 = Aws::S3::Resource.new(client: client)
+ obj = s3.bucket(bucket).object(path)
+ obj.presigned_url(:get, expires_in: expires_in)
+ end
end
diff --git a/lib/check_search.rb b/lib/check_search.rb
index f78ba9dea2..ce8746209e 100644
--- a/lib/check_search.rb
+++ b/lib/check_search.rb
@@ -60,6 +60,10 @@ def initialize(options, file = nil, team_id = Team.current&.id)
'fact_check_published_on' => 'fact_check_published_on'
}
+ def set_option(key, value)
+ @options[key] = value
+ end
+
def team_condition(team_id = nil)
if feed_query?
feed_teams = @options['feed_team_ids'].is_a?(Array) ? (@feed.team_ids & @options['feed_team_ids']) : @feed.team_ids
@@ -329,12 +333,51 @@ def medias_get_search_result(query)
@options['es_id'] ? $repository.find([@options['es_id']]).compact : $repository.search(query: query, collapse: collapse, sort: sort, size: @options['eslimit'], from: @options['esoffset']).results
end
+ def self.get_exported_data(query, team_id)
+ team = Team.find(team_id)
+ search = CheckSearch.new(query, nil, team_id)
+
+ # Prepare the export
+ data = []
+ header = ['Claim', 'Item page URL', 'Status', 'Created by', 'Submitted at', 'Published at', 'Number of media', 'Tags']
+ fields = team.team_tasks.sort
+ fields.each { |tt| header << tt.label }
+ data << header
+
+ # No pagination for the export
+ search.set_option('esoffset', 0)
+ search.set_option('eslimit', CheckConfig.get(:export_csv_maximum_number_of_results, 10000, :integer))
+
+ # Iterate through each result and generate an output row for the CSV
+ search.medias.find_each do |pm|
+ row = [
+ pm.claim_description&.description,
+ pm.full_url,
+ pm.status_i18n,
+ pm.author_name.to_s.gsub(/ \[.*\]$/, ''),
+ pm.created_at.strftime("%Y-%m-%d %H:%M:%S"),
+ pm.published_at&.strftime("%Y-%m-%d %H:%M:%S"),
+ pm.linked_items_count,
+ pm.tags_as_sentence
+ ]
+ annotations = pm.get_annotations('task').map(&:load)
+ fields.each do |field|
+ annotation = annotations.find { |a| a.team_task_id == field.id }
+ answer = (annotation ? (begin annotation.first_response_obj.file_data[:file_urls].join("\n") rescue annotation.first_response.to_s end) : '')
+ answer = begin JSON.parse(answer).collect{ |x| x['url'] }.join(', ') rescue answer end
+ row << answer
+ end
+ data << row
+ end
+ data
+ end
+
private
def adjust_es_window_size
window_size = 10000
current_size = @options['esoffset'].to_i + @options['eslimit'].to_i
- @options['eslimit'] = window_size - @options['esoffset'].to_i if current_size > window_size
+ @options['eslimit'] = window_size - @options['esoffset'].to_i if current_size > window_size
end
def adjust_project_filter
diff --git a/lib/list_export.rb b/lib/list_export.rb
new file mode 100644
index 0000000000..533f74771f
--- /dev/null
+++ b/lib/list_export.rb
@@ -0,0 +1,67 @@
+class ListExport
+ TYPES = [:media, :feed, :fact_check, :explainer]
+
+ def initialize(type, query, team_id)
+ @type = type
+ @query = query
+ @parsed_query = JSON.parse(@query).with_indifferent_access
+ @team_id = team_id
+ @team = Team.find(team_id)
+ @feed = Feed.find(@parsed_query['feed_id']) if type == :feed && @team.is_part_of_feed?(Feed.find(@parsed_query['feed_id']))
+ raise "Invalid export type '#{type}'. Should be one of: #{TYPES}" unless TYPES.include?(type)
+ end
+
+ def number_of_rows
+ case @type
+ when :media
+ CheckSearch.new(@query, nil, @team_id).number_of_results
+ when :feed
+ @feed.clusters_count(@parsed_query)
+ when :fact_check
+ @team.filtered_fact_checks(@parsed_query).count
+ when :explainer
+ @team.filtered_explainers(@parsed_query).count
+ end
+ end
+
+ def generate_csv_and_send_email_in_background(user)
+ ListExport.delay.generate_csv_and_send_email(self, user.id)
+ end
+
+ def generate_csv_and_send_email(user)
+ # Convert to CSV
+ csv_string = CSV.generate do |csv|
+ self.export_data.each do |row|
+ csv << row
+ end
+ end
+
+ # Save to S3
+ csv_file_url = CheckS3.write_presigned("export/#{@type}/#{@team_id}/#{Time.now.to_i}/#{Digest::MD5.hexdigest(@query)}.csv", 'text/csv', csv_string, CheckConfig.get('export_csv_expire', 7.days.to_i, :integer))
+
+ # Send to e-mail
+ ExportListMailer.delay.send_csv(csv_file_url, user)
+
+ # Return path to CSV
+ csv_file_url
+ end
+
+ def self.generate_csv_and_send_email(export, user_id)
+ export.generate_csv_and_send_email(User.find(user_id))
+ end
+
+ private
+
+ def export_data
+ case @type
+ when :media
+ CheckSearch.get_exported_data(@query, @team_id)
+ when :feed
+ @feed.get_exported_data(@parsed_query)
+ when :fact_check
+ FactCheck.get_exported_data(@parsed_query, @team)
+ when :explainer
+ Explainer.get_exported_data(@parsed_query, @team)
+ end
+ end
+end
diff --git a/lib/relay.idl b/lib/relay.idl
index 4c7445b1ae..5edaad82c4 100644
--- a/lib/relay.idl
+++ b/lib/relay.idl
@@ -8357,6 +8357,29 @@ type ExplainerItemEdge {
node: ExplainerItem
}
+"""
+Autogenerated input type of ExportList
+"""
+input ExportListInput {
+ """
+ A unique identifier for the client performing the mutation.
+ """
+ clientMutationId: String
+ query: String!
+ type: String!
+}
+
+"""
+Autogenerated return type of ExportList
+"""
+type ExportListPayload {
+ """
+ A unique identifier for the client performing the mutation.
+ """
+ clientMutationId: String
+ success: Boolean
+}
+
"""
Autogenerated input type of ExtractText
"""
@@ -9917,6 +9940,12 @@ type MutationType {
"""
input: DuplicateTeamMutationInput!
): DuplicateTeamMutationPayload
+ exportList(
+ """
+ Parameters for ExportList
+ """
+ input: ExportListInput!
+ ): ExportListPayload
extractText(
"""
Parameters for ExtractText
@@ -10466,7 +10495,6 @@ type Project implements Node {
description: String
id: ID!
is_default: Boolean
- medias_count: Int
permissions: String
privacy: Int
project_group: ProjectGroup
@@ -10545,7 +10573,6 @@ type ProjectGroup implements Node {
dbid: Int
description: String
id: ID!
- medias_count: Int
permissions: String
projects(
"""
diff --git a/lib/tasks/check_khousheh.rake b/lib/tasks/check_khousheh.rake
index 098d9f3c06..98f467258f 100644
--- a/lib/tasks/check_khousheh.rake
+++ b/lib/tasks/check_khousheh.rake
@@ -20,12 +20,13 @@ namespace :check do
FileUtils.mkdir_p(File.join(Rails.root, 'tmp', 'feed-clusters-input'))
started = Time.now.to_i
sort = [{ annotated_id: { order: :asc } }]
+ all_types = CheckSearch::MEDIA_TYPES + ['blank']
Feed.find_each do |feed|
# Only feeds that are sharing media
if feed.data_points.to_a.include?(2)
output = { call_id: "#{TIMESTAMP}-#{feed.uuid}", nodes: [], edges: [] }
Team.current = feed.team
- query = { feed_id: feed.id, feed_view: 'media', show_similar: true }
+ query = { feed_id: feed.id, feed_view: 'media', show_similar: true, show: all_types }
es_query = CheckSearch.new(query.to_json).medias_query
total = CheckSearch.new(query.to_json, nil, feed.team.id).number_of_results
pages = (total / PER_PAGE.to_f).ceil
@@ -211,7 +212,8 @@ namespace :check do
end
# Add items to clusters
Team.current = feed.team
- query = { feed_id: feed.id, feed_view: 'media', show_similar: true }
+ all_types = CheckSearch::MEDIA_TYPES + ['blank']
+ query = { feed_id: feed.id, feed_view: 'media', show_similar: true, show: all_types }
es_query = CheckSearch.new(query.to_json).medias_query
total = CheckSearch.new(query.to_json, nil, feed.team.id).number_of_results
pages = (total / PER_PAGE.to_f).ceil
diff --git a/public/relay.json b/public/relay.json
index 10d08ecdd4..8f9e81dfe1 100644
--- a/public/relay.json
+++ b/public/relay.json
@@ -45264,6 +45264,102 @@
"enumValues": null,
"possibleTypes": null
},
+ {
+ "kind": "INPUT_OBJECT",
+ "name": "ExportListInput",
+ "description": "Autogenerated input type of ExportList",
+ "fields": null,
+ "inputFields": [
+ {
+ "name": "query",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "type",
+ "description": null,
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ }
+ },
+ "defaultValue": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "defaultValue": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "interfaces": null,
+ "enumValues": null,
+ "possibleTypes": null
+ },
+ {
+ "kind": "OBJECT",
+ "name": "ExportListPayload",
+ "description": "Autogenerated return type of ExportList",
+ "fields": [
+ {
+ "name": "clientMutationId",
+ "description": "A unique identifier for the client performing the mutation.",
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "String",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
+ {
+ "name": "success",
+ "description": null,
+ "args": [
+
+ ],
+ "type": {
+ "kind": "SCALAR",
+ "name": "Boolean",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "inputFields": null,
+ "interfaces": [
+
+ ],
+ "enumValues": null,
+ "possibleTypes": null
+ },
{
"kind": "INPUT_OBJECT",
"name": "ExtractTextInput",
@@ -53582,6 +53678,35 @@
"isDeprecated": false,
"deprecationReason": null
},
+ {
+ "name": "exportList",
+ "description": null,
+ "args": [
+ {
+ "name": "input",
+ "description": "Parameters for ExportList",
+ "type": {
+ "kind": "NON_NULL",
+ "name": null,
+ "ofType": {
+ "kind": "INPUT_OBJECT",
+ "name": "ExportListInput",
+ "ofType": null
+ }
+ },
+ "defaultValue": null,
+ "isDeprecated": false,
+ "deprecationReason": null
+ }
+ ],
+ "type": {
+ "kind": "OBJECT",
+ "name": "ExportListPayload",
+ "ofType": null
+ },
+ "isDeprecated": false,
+ "deprecationReason": null
+ },
{
"name": "extractText",
"description": null,
@@ -56595,20 +56720,6 @@
"isDeprecated": false,
"deprecationReason": null
},
- {
- "name": "medias_count",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
{
"name": "permissions",
"description": null,
@@ -57031,20 +57142,6 @@
"isDeprecated": false,
"deprecationReason": null
},
- {
- "name": "medias_count",
- "description": null,
- "args": [
-
- ],
- "type": {
- "kind": "SCALAR",
- "name": "Int",
- "ofType": null
- },
- "isDeprecated": false,
- "deprecationReason": null
- },
{
"name": "permissions",
"description": null,
diff --git a/test/controllers/graphql_controller_11_test.rb b/test/controllers/graphql_controller_11_test.rb
index af6670ab50..2cad8e2620 100644
--- a/test/controllers/graphql_controller_11_test.rb
+++ b/test/controllers/graphql_controller_11_test.rb
@@ -132,10 +132,10 @@ def teardown
post :create, params: { query: query }
assert_response :success
response = JSON.parse(@response.body)['data']['me']
- data = response['accessible_teams']['edges']
+ data = response['accessible_teams']['edges'].collect{ |edge| edge['node']['dbid'] }.sort
assert_equal 2, data.size
- assert_equal team1.id, data[0]['node']['dbid']
- assert_equal team2.id, data[1]['node']['dbid']
+ assert_equal team1.id, data[0]
+ assert_equal team2.id, data[1]
assert_equal 2, response['accessible_teams_count']
end
@@ -159,4 +159,45 @@ def teardown
assert_equal team1.id, data[0]['node']['dbid']
assert_equal 1, response['accessible_teams_count']
end
+
+ test "should export list if it's a workspace admin and number of results is not over the limit" do
+ Sidekiq::Testing.inline!
+ u = create_user
+ t = create_team
+ create_team_user team: t, user: u, role: 'admin'
+ authenticate_with_user(u)
+
+ query = "mutation { exportList(input: { query: \"{}\", type: \"media\" }) { success } }"
+ post :create, params: { query: query, team: t.slug }
+ assert_response :success
+ assert JSON.parse(@response.body)['data']['exportList']['success']
+ end
+
+ test "should not export list if it's not a workspace admin" do
+ Sidekiq::Testing.inline!
+ u = create_user
+ t = create_team
+ create_team_user team: t, user: u, role: 'editor'
+ authenticate_with_user(u)
+
+ query = "mutation { exportList(input: { query: \"{}\", type: \"media\" }) { success } }"
+ post :create, params: { query: query, team: t.slug }
+ assert_response :success
+ assert !JSON.parse(@response.body)['data']['exportList']['success']
+ end
+
+ test "should not export list if it's over the limit" do
+ Sidekiq::Testing.inline!
+ u = create_user
+ t = create_team
+ create_team_user team: t, user: u, role: 'admin'
+ authenticate_with_user(u)
+
+ stub_configs({ 'export_csv_maximum_number_of_results' => -1 }) do
+ query = "mutation { exportList(input: { query: \"{}\", type: \"media\" }) { success } }"
+ post :create, params: { query: query, team: t.slug }
+ assert_response :success
+ assert !JSON.parse(@response.body)['data']['exportList']['success']
+ end
+ end
end
diff --git a/test/controllers/graphql_controller_4_test.rb b/test/controllers/graphql_controller_4_test.rb
index 2bdd4ef54b..60dc8f83b1 100644
--- a/test/controllers/graphql_controller_4_test.rb
+++ b/test/controllers/graphql_controller_4_test.rb
@@ -47,7 +47,6 @@ def teardown
test "should bulk-send project medias to trash" do
@pms.each { |pm| assert_equal CheckArchivedFlags::FlagCodes::NONE, pm.archived }
- @ps.each { |p| assert_equal 1, p.reload.medias_count }
assert_search_finds_all({ archived: CheckArchivedFlags::FlagCodes::NONE })
assert_search_finds_none({ archived: CheckArchivedFlags::FlagCodes::TRASHED })
assert_equal 0, CheckPusher::Worker.jobs.size
@@ -57,7 +56,6 @@ def teardown
assert_response :success
@pms.each { |pm| assert_equal CheckArchivedFlags::FlagCodes::TRASHED, pm.reload.archived }
- @ps.each { |p| assert_equal 0, p.reload.medias_count }
assert_search_finds_all({ archived: CheckArchivedFlags::FlagCodes::TRASHED })
assert_search_finds_none({ archived: CheckArchivedFlags::FlagCodes::NONE })
assert_equal 1, CheckPusher::Worker.jobs.size
@@ -90,7 +88,6 @@ def teardown
Sidekiq::Worker.drain_all
sleep 1
@pms.each { |pm| assert_equal CheckArchivedFlags::FlagCodes::TRASHED, pm.reload.archived }
- @ps.each { |p| assert_equal 0, p.reload.medias_count }
assert_search_finds_all({ archived: CheckArchivedFlags::FlagCodes::TRASHED })
assert_search_finds_none({ archived: CheckArchivedFlags::FlagCodes::NONE })
assert_equal 0, CheckPusher::Worker.jobs.size
@@ -100,7 +97,6 @@ def teardown
assert_response :success
@pms.each { |pm| assert_equal CheckArchivedFlags::FlagCodes::NONE, pm.reload.archived }
- @ps.each { |p| assert_equal 1, p.reload.medias_count }
assert_search_finds_all({ archived: CheckArchivedFlags::FlagCodes::NONE })
assert_search_finds_none({ archived: CheckArchivedFlags::FlagCodes::TRASHED })
assert_equal 1, CheckPusher::Worker.jobs.size
@@ -113,7 +109,6 @@ def teardown
Sidekiq::Worker.drain_all
sleep 1
@pms.each { |pm| assert_equal CheckArchivedFlags::FlagCodes::TRASHED, pm.reload.archived }
- @ps.each { |p| assert_equal 0, p.reload.medias_count }
assert_search_finds_all({ archived: CheckArchivedFlags::FlagCodes::TRASHED })
assert_search_finds_none({ archived: CheckArchivedFlags::FlagCodes::NONE })
assert_equal 0, CheckPusher::Worker.jobs.size
@@ -123,8 +118,6 @@ def teardown
assert_response :success
@pms.each { |pm| assert_equal CheckArchivedFlags::FlagCodes::NONE, pm.reload.archived }
- @ps.each { |p| assert_equal 0, p.reload.medias_count }
- assert_equal @pms.length, add_to.reload.medias_count
assert_search_finds_all({ archived: CheckArchivedFlags::FlagCodes::NONE })
assert_search_finds_none({ archived: CheckArchivedFlags::FlagCodes::TRASHED })
assert_equal 2, CheckPusher::Worker.jobs.size
@@ -291,17 +284,11 @@ def teardown
invalid_id_1 = Base64.encode64("ProjectMedia/0")
invalid_id_2 = Base64.encode64("Project/#{pm1.id}")
invalid_id_3 = random_string
- assert_equal 4, @p1.reload.medias_count
- assert_equal 2, @p2.reload.medias_count
- assert_equal 0, p4.reload.medias_count
ids = []
[@pm1.graphql_id, @pm2.graphql_id, pm1.graphql_id, pm2.graphql_id, invalid_id_1, invalid_id_2, invalid_id_3].each { |id| ids << id }
query = 'mutation { updateProjectMedias(input: { clientMutationId: "1", ids: ' + ids.to_json + ', action: "move_to", params: "{\"move_to\": \"' + p4.id.to_s + '\"}" }) { team { dbid } } }'
post :create, params: { query: query, team: @t.slug }
assert_response :success
- assert_equal 0, @p1.reload.medias_count
- assert_equal 0, @p2.reload.medias_count
- assert_equal 6, p4.reload.medias_count
# verify move similar items
assert_equal p4.id, t_pm1.reload.project_id
assert_equal p4.id, t2_pm1.reload.project_id
diff --git a/test/controllers/graphql_controller_test.rb b/test/controllers/graphql_controller_test.rb
index 839ddaf9bd..786141273f 100644
--- a/test/controllers/graphql_controller_test.rb
+++ b/test/controllers/graphql_controller_test.rb
@@ -397,7 +397,7 @@ def setup
p = create_project team: t
pm = create_project_media project: p
create_comment annotated: pm, annotator: u
- query = "query GetById { project(id: \"#{p.id}\") { medias_count, project_medias(first: 1) { edges { node { permissions } } } } }"
+ query = "query GetById { project(id: \"#{p.id}\") { project_medias(first: 1) { edges { node { permissions } } } } }"
post :create, params: { query: query, team: 'team' }
assert_response :success
assert_not_equal '{}', JSON.parse(@response.body)['data']['project']['project_medias']['edges'][0]['node']['permissions']
diff --git a/test/lib/check_s3_test.rb b/test/lib/check_s3_test.rb
index d313f1b3df..1dc71a5c02 100644
--- a/test/lib/check_s3_test.rb
+++ b/test/lib/check_s3_test.rb
@@ -1,6 +1,12 @@
require_relative '../test_helper'
class CheckS3Test < ActiveSupport::TestCase
+ def setup
+ end
+
+ def teardown
+ end
+
test "should return resource" do
assert_kind_of Aws::S3::Resource, CheckS3.resource
end
diff --git a/test/lib/list_export_test.rb b/test/lib/list_export_test.rb
new file mode 100644
index 0000000000..668797d6e6
--- /dev/null
+++ b/test/lib/list_export_test.rb
@@ -0,0 +1,86 @@
+require_relative '../test_helper'
+
+class ListExportTest < ActiveSupport::TestCase
+ def setup
+ end
+
+ def teardown
+ end
+
+ test "should expire the export" do
+ t = create_team
+ create_team_task team_id: t.id, fieldset: 'tasks'
+ pm = create_project_media team: t
+
+ stub_configs({ 'export_csv_expire' => 2 }) do
+ # Generate a CSV with the two exported items
+ export = ListExport.new(:media, '{}', t.id)
+ csv_url = export.generate_csv_and_send_email(create_user)
+ response = Net::HTTP.get_response(URI(csv_url))
+ assert_equal 200, response.code.to_i
+
+ # Make sure it expires after 2 seconds
+ sleep 3 # Just to be safe
+ response = Net::HTTP.get_response(URI(csv_url))
+ assert_equal 403, response.code.to_i
+ end
+ end
+
+ test "should export media CSV" do
+ t = create_team
+ create_team_task team_id: t.id, fieldset: 'tasks'
+ 2.times { create_project_media team: t }
+
+ export = ListExport.new(:media, '{}', t.id)
+ csv_url = export.generate_csv_and_send_email(create_user)
+ response = Net::HTTP.get_response(URI(csv_url))
+ assert_equal 200, response.code.to_i
+ csv_content = CSV.parse(response.body, headers: true)
+ assert_equal 2, csv_content.size
+ assert_equal 2, export.number_of_rows
+ end
+
+ test "should export feed CSV" do
+ t = create_team
+ f = create_feed team: t
+ 2.times { f.clusters << create_cluster }
+
+ export = ListExport.new(:feed, { feed_id: f.id }.to_json, t.id)
+ csv_url = export.generate_csv_and_send_email(create_user)
+ response = Net::HTTP.get_response(URI(csv_url))
+ assert_equal 200, response.code.to_i
+ csv_content = CSV.parse(response.body, headers: true)
+ assert_equal 2, csv_content.size
+ assert_equal 2, export.number_of_rows
+ end
+
+ test "should export fact-checks CSV" do
+ t = create_team
+ 2.times do
+ pm = create_project_media team: t
+ cd = create_claim_description project_media: pm
+ create_fact_check claim_description: cd
+ end
+
+ export = ListExport.new(:fact_check, '{}', t.id)
+ csv_url = export.generate_csv_and_send_email(create_user)
+ response = Net::HTTP.get_response(URI(csv_url))
+ assert_equal 200, response.code.to_i
+ csv_content = CSV.parse(response.body, headers: true)
+ assert_equal 2, csv_content.size
+ assert_equal 2, export.number_of_rows
+ end
+
+ test "should export explainers CSV" do
+ t = create_team
+ 2.times { create_explainer team: t }
+
+ export = ListExport.new(:explainer, '{}', t.id)
+ csv_url = export.generate_csv_and_send_email(create_user)
+ response = Net::HTTP.get_response(URI(csv_url))
+ assert_equal 200, response.code.to_i
+ csv_content = CSV.parse(response.body, headers: true)
+ assert_equal 2, csv_content.size
+ assert_equal 2, export.number_of_rows
+ end
+end
diff --git a/test/models/bot/smooch_6_test.rb b/test/models/bot/smooch_6_test.rb
index 6473d95dc2..de5b67ff21 100644
--- a/test/models/bot/smooch_6_test.rb
+++ b/test/models/bot/smooch_6_test.rb
@@ -664,12 +664,12 @@ def send_message_outside_24_hours_window(template, pm = nil)
test "should not duplicate messages when saving" do
@team.set_languages ['en']
@team.save!
- url = 'http://localhost'
- send_message url, '1', url, '1'
+ message_text = 'not_a_url' #Not a URL, not media, and not longer than 'min_number_of_words_for_tipline_submit_shortcut'
+ send_message message_text, '1', message_text, '1'
assert_state 'search'
Sidekiq::Worker.drain_all
tr = TiplineRequest.last
- assert_equal 2, tr.smooch_data['text'].split("\n#{Bot::Smooch::MESSAGE_BOUNDARY}").select{ |x| x.chomp.strip == url }.size
+ assert_equal 2, tr.smooch_data['text'].split("\n#{Bot::Smooch::MESSAGE_BOUNDARY}").select{ |x| x.chomp.strip == message_text }.size
end
test "should get search results in different languages" do
diff --git a/test/models/bot/smooch_test.rb b/test/models/bot/smooch_test.rb
index 8724731980..f825154547 100644
--- a/test/models/bot/smooch_test.rb
+++ b/test/models/bot/smooch_test.rb
@@ -789,4 +789,27 @@ def teardown
tr = pm.tipline_requests.last
assert_equal 'en', tr.smooch_user_request_language
end
+
+ test "should submit message for factchecking" do
+ Bot::Smooch.stubs(:is_v2?).returns(true)
+ state='main'
+
+ # Should not be a submission shortcut
+ message = {"text"=>"abc"}
+ assert_equal(false, Bot::Smooch.is_a_shortcut_for_submission?(state,message), "Unexpected shortcut")
+
+ # Should be a submission shortcut
+ message = {"text"=>"abc http://example.com"}
+ assert_equal(true, Bot::Smooch.is_a_shortcut_for_submission?(state,message), "Missed URL shortcut")
+
+ # Should be a submission shortcut
+ message = {"text"=>"abc", "mediaUrl"=>"not blank"}
+ assert_equal(true, Bot::Smooch.is_a_shortcut_for_submission?(state,message), "Missed media shortcut")
+
+ # Should be a submission shortcut
+ message = {"text"=>"abc example.com"}
+ assert_equal(true, Bot::Smooch.is_a_shortcut_for_submission?(state,message), "Missed non-qualified URL shortcut")
+
+ Bot::Smooch.unstub(:is_v2?)
+ end
end
diff --git a/test/models/project_group_test.rb b/test/models/project_group_test.rb
index 959000629f..a8bcf2b6ec 100644
--- a/test/models/project_group_test.rb
+++ b/test/models/project_group_test.rb
@@ -36,28 +36,6 @@ def setup
assert_equal [pg], t.reload.project_groups
end
- test "should have medias count" do
- RequestStore.store[:skip_cached_field_update] = false
- t = create_team
- pg = create_project_group team: t
- assert_equal 0, pg.medias_count
- p1 = create_project team: t
- p1.project_group = pg
- p1.save!
- assert_equal 0, p1.medias_count
- create_project_media project: p1
- assert_equal 1, p1.medias_count
- create_project_media project: p1
- assert_equal 2, p1.medias_count
- p2 = create_project team: t
- p2.project_group = pg
- p2.save!
- assert_equal 0, p2.medias_count
- create_project_media project: p2
- assert_equal 1, p2.medias_count
- assert_equal 3, pg.reload.medias_count
- end
-
test "should have project medias" do
t = create_team
pg = create_project_group team: t
diff --git a/test/models/project_test.rb b/test/models/project_test.rb
index 39362aa68d..e278d7ef50 100644
--- a/test/models/project_test.rb
+++ b/test/models/project_test.rb
@@ -121,14 +121,6 @@ def setup
assert_equal [m1, m2].sort, p.reload.project_medias.map(&:media).sort
end
- test "should get project medias count" do
- t = create_team
- p = create_project team: t
- create_project_media project: p
- create_project_media project: p
- assert_equal 2, p.medias_count
- end
-
test "should have annotations" do
pm = create_project_media
c1 = create_comment annotated: nil
@@ -550,42 +542,11 @@ def setup
end
end
- test "should not include trashed items in medias count" do
- p = create_project
- create_project_media project: p
- create_project_media project: p
- create_project_media project: p, archived: CheckArchivedFlags::FlagCodes::TRASHED
- assert_equal 2, p.reload.medias_count
- end
-
test "should have search team" do
assert_kind_of CheckSearch, create_project.check_search_team
assert_kind_of Array, create_project.check_search_team.projects
end
- test "should cache medias_count" do
- RequestStore.store[:skip_cached_field_update] = false
- t = create_team
- p = create_project team: t
- pm1 = create_project_media team: t, project: nil
- pm2 = create_project_media team: t, project: nil
- pm3 = create_project_media team: t, project: nil
- assert_equal 0, p.reload.medias_count
- pm1.project_id = p.id; pm1.save!
- assert_equal 1, p.reload.medias_count
- pm2.project_id = p.id; pm2.save!
- assert_equal 2, p.reload.medias_count
- pm3.project_id = p.id; pm3.save!
- assert_equal 3, p.reload.medias_count
- pm3.destroy!
- assert_equal 2, p.reload.medias_count
- assert_equal 0, pm2.reload.sources_count
- r = create_relationship source_id: pm1.id, target_id: pm2.id, relationship_type: Relationship.confirmed_type
- assert_equal 1, pm2.reload.sources_count
- assert_equal 1, p.reload.medias_count
- RequestStore.store[:skip_cached_field_update] = true
- end
-
test "should have a project group" do
t = create_team
p = create_project team: t
@@ -657,8 +618,6 @@ def setup
p2.items_destination_project_id = p3.id
p2.destroy
assert_equal [pm3.id, pm4.id], p3.reload.project_media_ids.sort
- # assert_equal p3.title, pm3.folder
- assert_equal 2, p3.medias_count
end
end
RequestStore.store[:skip_cached_field_update] = true
diff --git a/test/models/relationship_test.rb b/test/models/relationship_test.rb
index 8eca19286e..455ba30880 100644
--- a/test/models/relationship_test.rb
+++ b/test/models/relationship_test.rb
@@ -17,13 +17,9 @@ def setup
pm2 = create_project_media team: t, project: p2
assert_equal p1, pm1.reload.project
assert_equal p2, pm2.reload.project
- assert_equal 1, p1.reload.medias_count
- assert_equal 1, p2.reload.medias_count
r = create_relationship relationship_type: Relationship.confirmed_type, source_id: pm1.id, target_id: pm2.id
assert_equal p1, pm1.reload.project
assert_equal p1, pm2.reload.project
- assert_equal 1, p1.reload.medias_count
- assert_equal 0, p2.reload.medias_count
end
test "should create relationship between items with same media" do
diff --git a/test/models/team_2_test.rb b/test/models/team_2_test.rb
index 1542c9491e..5b2df95442 100644
--- a/test/models/team_2_test.rb
+++ b/test/models/team_2_test.rb
@@ -1117,16 +1117,12 @@ def setup
t.rules = rules.to_json
t.save!
pm1 = create_project_media project: p0, disable_es_callbacks: false
- assert_equal 1, p0.reload.medias_count
- assert_equal 0, p1.reload.medias_count
s = pm1.last_status_obj
s.status = 'in_progress'
s.save!
sleep 2
result = $repository.find(get_es_id(pm1))
assert_equal p1.id, result['project_id']
- assert_equal 0, p0.reload.medias_count
- assert_equal 1, p1.reload.medias_count
pm2 = create_project_media project: p0, disable_es_callbacks: false
sleep 2
assert_equal p1.id, pm1.reload.project_id
@@ -1532,12 +1528,13 @@ def setup
Sidekiq::Testing.fake!
t = create_team
# Fact-checks
- create_fact_check title: 'Some Other Test', claim_description: create_claim_description(project_media: create_project_media(team: t))
- create_fact_check title: 'Bar Bravo Foo Test', claim_description: create_claim_description(project_media: create_project_media(team: t))
+ create_fact_check title: 'Some Other Test', claim_description: create_claim_description(description: 'Claim', project_media: create_project_media(team: t))
+ create_fact_check title: 'Bar Bravo Foo Test', claim_description: create_claim_description(context: 'Claim', project_media: create_project_media(team: t))
create_fact_check title: 'Foo Alpha Bar Test', claim_description: create_claim_description(project_media: create_project_media(team: t))
assert_equal 3, t.filtered_fact_checks.count
assert_equal 3, t.filtered_fact_checks(text: 'Test').count
assert_equal 2, t.filtered_fact_checks(text: 'Foo Bar').count
+ assert_equal 2, t.filtered_fact_checks(text: 'Claim').count
assert_equal 1, t.filtered_fact_checks(text: 'Foo Bar Bravo').count
assert_equal 1, t.filtered_fact_checks(text: 'Foo Bar Alpha').count
assert_equal 0, t.filtered_fact_checks(text: 'Foo Bar Delta').count
diff --git a/test/models/team_test.rb b/test/models/team_test.rb
index fabbc09b52..2b0f0c176d 100644
--- a/test/models/team_test.rb
+++ b/test/models/team_test.rb
@@ -515,7 +515,6 @@ def setup
u = create_user
create_team_user team: t, user: u
assert_equal 0, p.reload.project_medias.count
- assert_equal 0, p.reload.medias_count
rules = []
rules << {
"name": random_string,
@@ -547,7 +546,6 @@ def setup
create_project_media team: t
create_project_media user: u
assert_equal 1, p.reload.project_medias.count
- assert_equal 1, p.reload.medias_count
end
test "should set default language when creating team" do
@@ -573,7 +571,6 @@ def setup
u2 = create_user
create_team_user team: t, user: u
assert_equal 0, p.reload.project_medias.count
- assert_equal 0, p.reload.medias_count
rules = []
rules << {
"name": random_string,
@@ -636,7 +633,6 @@ def setup
assert_equal 0, pm2.reload.archived
assert_equal 0, pm3.reload.archived
assert_equal 1, p.reload.project_medias.count
- assert_equal 1, p.reload.medias_count
end
test "should create default fieldsets when team is created" do
@@ -670,7 +666,6 @@ def setup
p = create_project team: t
pm = create_project_media team: t
assert_equal 0, p.reload.project_medias.count
- assert_equal 0, p.reload.medias_count
rules = []
rules << {
"name": random_string,
@@ -702,12 +697,10 @@ def setup
tk.response = { annotation_type: 'task_response_single_choice', set_fields: { response_single_choice: { selected: 'Bar' }.to_json }.to_json }.to_json
tk.save!
assert_equal 0, p.reload.project_medias.count
- assert_equal 0, p.reload.medias_count
tk = Task.find(tk.id)
tk.response = { annotation_type: 'task_response_single_choice', set_fields: { response_single_choice: { selected: 'Foo' }.to_json }.to_json }.to_json
tk.save!
assert_equal 1, p.reload.project_medias.count
- assert_equal 1, p.reload.medias_count
end
test "should match rule by assignment" do
@@ -719,7 +712,6 @@ def setup
p = create_project team: t
pm = create_project_media team: t
assert_equal 0, p.reload.project_medias.count
- assert_equal 0, p.reload.medias_count
rules = []
rules << {
"name": random_string,
@@ -749,7 +741,6 @@ def setup
t.save!
Assignment.create! assigned: pm.last_status_obj.becomes(Annotation), assigner: create_user, user: u
assert_equal 1, p.reload.project_medias.count
- assert_equal 1, p.reload.medias_count
end
test "should match rule by text task answer" do
@@ -760,7 +751,6 @@ def setup
p = create_project team: t
pm = create_project_media team: t
assert_equal 0, p.reload.project_medias.count
- assert_equal 0, p.reload.medias_count
rules = []
rules << {
"name": random_string,
@@ -792,12 +782,10 @@ def setup
tk.response = { annotation_type: 'task_response_free_text', set_fields: { response_free_text: 'test test' }.to_json }.to_json
tk.save!
assert_equal 0, p.reload.project_medias.count
- assert_equal 0, p.reload.medias_count
tk = Task.find(tk.id)
tk.response = { annotation_type: 'task_response_free_text', set_fields: { response_free_text: 'test foo test' }.to_json }.to_json
tk.save!
assert_equal 1, p.reload.project_medias.count
- assert_equal 1, p.reload.medias_count
end
test "should allow default BotUser to be added on creation" do
|