From f1f3b6cb7acee60daf7d2ef0b1fd357253a6a0cf Mon Sep 17 00:00:00 2001 From: Caio Almeida <117518+caiosba@users.noreply.github.com> Date: Sat, 24 Aug 2024 08:43:55 -0300 Subject: [PATCH] Export lists (#1992) Allow lists (core, custom, search results, articles, filtered or not) to be exported to a CSV file that is sent to e-mail. - [x] Add an `exportList` GraphQL mutation - [x] Implement a generic export class that supports media, articles and feeds - [x] Validate maximum number of results (which is a global configuration key) - [x] Validate permission - [x] Create Sidekiq job to export results - [x] Create a CSV for the export - [x] Save CSV in S3 using a pre-signed URL that expires after X days ("X" is a global configuration key) - [x] Add support to MailCatcher - [x] Send CSV by e-mail - [x] Automated tests - [x] Make sure it works for articles as well - [x] Make sure it works for shared feeds as well References: CV2-5067 and CV2-4979. --- app/graph/mutations/export_mutations.rb | 24 ++++ app/graph/types/mutation_type.rb | 2 + app/lib/check_config.rb | 1 + app/mailers/export_list_mailer.rb | 13 ++ app/models/ability.rb | 2 +- app/models/explainer.rb | 8 ++ app/models/fact_check.rb | 8 ++ app/models/feed.rb | 8 ++ .../export_list_mailer/send_csv.html.erb | 130 ++++++++++++++++++ .../export_list_mailer/send_csv.text.erb | 14 ++ config/config.yml.example | 11 +- config/environments/development.rb | 8 ++ config/initializers/plugins.rb | 2 +- config/locales/en.yml | 6 + lib/check_s3.rb | 9 ++ lib/check_search.rb | 45 +++++- lib/list_export.rb | 67 +++++++++ lib/relay.idl | 29 ++++ public/relay.json | 125 +++++++++++++++++ .../controllers/graphql_controller_11_test.rb | 47 ++++++- test/lib/check_s3_test.rb | 6 + test/lib/list_export_test.rb | 86 ++++++++++++ 22 files changed, 643 insertions(+), 8 deletions(-) create mode 100644 app/graph/mutations/export_mutations.rb create mode 100644 app/mailers/export_list_mailer.rb create mode 100644 app/views/export_list_mailer/send_csv.html.erb create mode 100644 app/views/export_list_mailer/send_csv.text.erb create mode 100644 lib/list_export.rb create mode 100644 test/lib/list_export_test.rb diff --git a/app/graph/mutations/export_mutations.rb b/app/graph/mutations/export_mutations.rb new file mode 100644 index 0000000000..a3dde76a1b --- /dev/null +++ b/app/graph/mutations/export_mutations.rb @@ -0,0 +1,24 @@ +module ExportMutations + class ExportList < Mutations::BaseMutation + argument :query, GraphQL::Types::String, required: true # JSON + argument :type, GraphQL::Types::String, required: true # 'media', 'feed', 'fact-check' or 'explainer' + + field :success, GraphQL::Types::Boolean, null: true + + def resolve(query:, type:) + ability = context[:ability] + team = Team.find_if_can(Team.current.id, ability) + if ability.cannot?(:export_list, team) + { success: false } + else + export = ListExport.new(type.to_sym, query, team.id) + if export.number_of_rows > CheckConfig.get(:export_csv_maximum_number_of_results, 10000, :integer) + { success: false } + else + export.generate_csv_and_send_email_in_background(User.current) + { success: true } + end + end + end + end +end diff --git a/app/graph/types/mutation_type.rb b/app/graph/types/mutation_type.rb index a87ea5528c..fbf56730b4 100644 --- a/app/graph/types/mutation_type.rb +++ b/app/graph/types/mutation_type.rb @@ -152,4 +152,6 @@ class MutationType < BaseObject field :createExplainerItem, mutation: ExplainerItemMutations::Create field :destroyExplainerItem, mutation: ExplainerItemMutations::Destroy + + field :exportList, mutation: ExportMutations::ExportList end diff --git a/app/lib/check_config.rb b/app/lib/check_config.rb index fe34a355c9..764f747063 100644 --- a/app/lib/check_config.rb +++ b/app/lib/check_config.rb @@ -3,6 +3,7 @@ class CheckConfig def self.get(key, default = nil, type = nil) + key = key.to_s value = ENV[key] value ||= CONFIG[key] if CONFIG.has_key?(key) return default if value.nil? diff --git a/app/mailers/export_list_mailer.rb b/app/mailers/export_list_mailer.rb new file mode 100644 index 0000000000..c9fed76b02 --- /dev/null +++ b/app/mailers/export_list_mailer.rb @@ -0,0 +1,13 @@ +class ExportListMailer < ApplicationMailer + layout nil + + def send_csv(csv_file_url, user) + @csv_file_url = csv_file_url + @user = user + expire_in = Time.now.to_i + CheckConfig.get('export_csv_expire', 7.days.to_i, :integer) + @expire_in = I18n.l(Time.at(expire_in), format: :email) + subject = I18n.t('mails_notifications.export_list.subject') + Rails.logger.info "Sending export e-mail to #{@user.email}" + mail(to: @user.email, email_type: 'export_list', subject: subject) + end +end diff --git a/app/models/ability.rb b/app/models/ability.rb index 42d45f7fa6..1db39e2c31 100644 --- a/app/models/ability.rb +++ b/app/models/ability.rb @@ -57,7 +57,7 @@ def admin_perms can :destroy, Team, :id => @context_team.id can :create, TeamUser, :team_id => @context_team.id, role: ['admin'] can [:update, :destroy], TeamUser, team_id: @context_team.id - can :duplicate, Team, :id => @context_team.id + can [:duplicate, :export_list], Team, :id => @context_team.id can :set_privacy, Project, :team_id => @context_team.id can :read_feed_invitations, Feed, :team_id => @context_team.id can :destroy, Feed, :team_id => @context_team.id diff --git a/app/models/explainer.rb b/app/models/explainer.rb index a4319e718a..5b55a57694 100644 --- a/app/models/explainer.rb +++ b/app/models/explainer.rb @@ -48,6 +48,14 @@ def update_paragraphs_in_alegre self.class.delay_for(5.seconds).update_paragraphs_in_alegre(self.id, previous_paragraphs_count, Time.now.to_f) end + def self.get_exported_data(query, team) + data = [['ID', 'Title', 'Description', 'URL', 'Language']] + team.filtered_explainers(query).find_each do |exp| + data << [exp.id, exp.title, exp.description, exp.url, exp.language] + end + data + end + def self.update_paragraphs_in_alegre(id, previous_paragraphs_count, timestamp) explainer = Explainer.find(id) diff --git a/app/models/fact_check.rb b/app/models/fact_check.rb index 5d42782496..5830494615 100644 --- a/app/models/fact_check.rb +++ b/app/models/fact_check.rb @@ -47,6 +47,14 @@ def update_item_status end end + def self.get_exported_data(query, team) + data = [['ID', 'Title', 'Summary', 'URL', 'Language', 'Report Status', 'Imported?']] + team.filtered_fact_checks(query).find_each do |fc| + data << [fc.id, fc.title, fc.summary, fc.url, fc.language, fc.report_status, fc.imported.to_s] + end + data + end + private def set_language diff --git a/app/models/feed.rb b/app/models/feed.rb index 05a652024f..e90c2c2d84 100755 --- a/app/models/feed.rb +++ b/app/models/feed.rb @@ -172,6 +172,14 @@ def saved_search_was SavedSearch.find_by_id(self.saved_search_id_before_last_save) end + def get_exported_data(filters) + data = [['Title', 'Number of media', 'Number of requests', 'Number of fact-checks']] + self.filtered_clusters(filters).find_each do |cluster| + data << [cluster.title, cluster.media_count, cluster.requests_count, cluster.fact_checks_count] + end + data + end + # This takes some time to run because it involves external HTTP requests and writes to the database: # 1) If the query contains a media URL, it will be downloaded... if it contains some other URL, it will be sent to Pender # 2) Requests will be made to Alegre in order to index the request media and to look for similar requests diff --git a/app/views/export_list_mailer/send_csv.html.erb b/app/views/export_list_mailer/send_csv.html.erb new file mode 100644 index 0000000000..081169fd8c --- /dev/null +++ b/app/views/export_list_mailer/send_csv.html.erb @@ -0,0 +1,130 @@ +<%= render "shared/header" %> + + + + + +
+ + + + +
 
+
+ + +
+
+
+ <%= I18n.t(:"mails_notifications.export_list.hello", name: @user.name) %> +
+ + + + +
 
+
+ <%= I18n.t("mails_notifications.export_list.subject") %> +
+ + + + +
 
+
+
+ <%= I18n.t(:"mails_notifications.export_list.body") %> +
+
+
+ + + + + +
 
+ + + + +
+ + + + + +
+ + + + + +
+ + <%= + link_to(I18n.t('mails_notifications.export_list.button_label'), + @csv_file_url, + :style => "text-decoration: none !important;color: #fff !important;" + ) + %> + + + <%= image_tag("https://images.ctfassets.net/g118h5yoccvd/#{@direction[:arrow]}", width: "7", alt: "arrow-icon", style: "-ms-interpolation-mode: bicubic; border: 0 none; height: auto; line-height: 100%; outline: none; text-decoration: none;") %> +
+
+ + + + + +
 
+ +
+
+ <%= I18n.t(:"mails_notifications.export_list.footer", date: @expire_in) %> +
+
+
+ + + + +
 
+ + +<%= render "shared/footer" %> diff --git a/app/views/export_list_mailer/send_csv.text.erb b/app/views/export_list_mailer/send_csv.text.erb new file mode 100644 index 0000000000..428c639142 --- /dev/null +++ b/app/views/export_list_mailer/send_csv.text.erb @@ -0,0 +1,14 @@ +<%= I18n.t('mails_notifications.export_list.hello', name: @user.name) %> + +<%= I18n.t('mails_notifications.export_list.subject') %> + +<%= I18n.t('mails_notifications.export_list.body') %> + +<%= I18n.t('mails_notifications.export_list.button_label') %>: <%= @csv_file_url %> + +<%= I18n.t('mails_notifications.export_list.footer', date: @expire_in ) %> + +... + +<%= strip_tags I18n.t("mails_notifications.copyright_html", app_name: CheckConfig.get('app_name')) %> +https://meedan.com diff --git a/config/config.yml.example b/config/config.yml.example index e14ec0ee77..44749ca977 100644 --- a/config/config.yml.example +++ b/config/config.yml.example @@ -160,6 +160,7 @@ development: &default smtp_user: # '' smtp_pass: # '' smtp_default_url_host: 'http://localhost:3333' # Used to construct URLs for links in email + smtp_mailcatcher_host: # 'host.docker.internal' # Pusher notification service https://pusher.com/channels # @@ -262,7 +263,7 @@ development: &default otel_traces_sampler: otel_custom_sampling_rate: - # Rate limits for tiplines + # Limits # # OPTIONAL # When not set, default values are used. @@ -270,12 +271,18 @@ development: &default tipline_user_max_messages_per_day: 1500 nlu_global_rate_limit: 100 nlu_user_rate_limit: 30 - devise_maximum_attempts: 5 devise_unlock_accounts_after: 1 login_rate_limit: 10 api_rate_limit: 100 + export_csv_maximum_number_of_results: 10000 + export_csv_expire: 604800 # Seconds: Default is 7 days + # Session + # + # OPTIONAL + # When not set, default values are used. + # session_store_key: '_checkdesk_session_dev' session_store_domain: 'localhost' test: diff --git a/config/environments/development.rb b/config/environments/development.rb index b8924f73e8..3f54f6587f 100644 --- a/config/environments/development.rb +++ b/config/environments/development.rb @@ -83,4 +83,12 @@ else puts '[WARNING] config.hosts not provided. Only requests from localhost are allowed. To change, update `whitelisted_hosts` in config.yml' end + + mailcatcher_host = ENV['smtp_mailcatcher_host'] || cfg['smtp_mailcatcher_host'] + unless mailcatcher_host.blank? + config.action_mailer.smtp_settings = { + address: mailcatcher_host, + port: 1025 + } + end end diff --git a/config/initializers/plugins.rb b/config/initializers/plugins.rb index 056a5b61a5..b928f936ff 100644 --- a/config/initializers/plugins.rb +++ b/config/initializers/plugins.rb @@ -1,2 +1,2 @@ # Load classes on boot, in production, that otherwise wouldn't be auto-loaded by default -CcDeville && Bot::Keep && Workflow::Workflow.workflows && CheckS3 && Bot::Tagger && Bot::Fetch && Bot::Smooch && Bot::Slack && Bot::Alegre && CheckChannels && RssFeed && UrlRewriter && ClusterTeam +CcDeville && Bot::Keep && Workflow::Workflow.workflows && CheckS3 && Bot::Tagger && Bot::Fetch && Bot::Smooch && Bot::Slack && Bot::Alegre && CheckChannels && RssFeed && UrlRewriter && ClusterTeam && ListExport diff --git a/config/locales/en.yml b/config/locales/en.yml index aa441378fc..e947f62ab1 100644 --- a/config/locales/en.yml +++ b/config/locales/en.yml @@ -476,6 +476,12 @@ en: constitutes acceptance of our updated Terms of Service. term_button: Terms of Service more_info: This is a one-time required legal notice sent to all Check users, even those who have unsubscribed by optional announcements. + export_list: + hello: Hello %{name} + subject: Check Data Export + body: Your requested Check data export is available to download. + button_label: Download Export + footer: This download link will expire on %{date}. mail_security: device_subject: 'Security alert: New login to %{app_name} from %{browser} on %{platform}' ip_subject: 'Security alert: New or unusual %{app_name} login' diff --git a/lib/check_s3.rb b/lib/check_s3.rb index af6d3c8288..7989cefbb4 100644 --- a/lib/check_s3.rb +++ b/lib/check_s3.rb @@ -65,4 +65,13 @@ def self.delete(*paths) client = Aws::S3::Client.new client.delete_objects(bucket: CheckConfig.get('storage_bucket'), delete: { objects: objects }) end + + def self.write_presigned(path, content_type, content, expires_in) + self.write(path, content_type, content) + bucket = CheckConfig.get('storage_bucket') + client = Aws::S3::Client.new + s3 = Aws::S3::Resource.new(client: client) + obj = s3.bucket(bucket).object(path) + obj.presigned_url(:get, expires_in: expires_in) + end end diff --git a/lib/check_search.rb b/lib/check_search.rb index cbcd41cea6..d8707e31a2 100644 --- a/lib/check_search.rb +++ b/lib/check_search.rb @@ -60,6 +60,10 @@ def initialize(options, file = nil, team_id = Team.current&.id) 'fact_check_published_on' => 'fact_check_published_on' } + def set_option(key, value) + @options[key] = value + end + def team_condition(team_id = nil) if feed_query? feed_teams = @options['feed_team_ids'].is_a?(Array) ? (@feed.team_ids & @options['feed_team_ids']) : @feed.team_ids @@ -329,12 +333,51 @@ def medias_get_search_result(query) @options['es_id'] ? $repository.find([@options['es_id']]).compact : $repository.search(query: query, collapse: collapse, sort: sort, size: @options['eslimit'], from: @options['esoffset']).results end + def self.get_exported_data(query, team_id) + team = Team.find(team_id) + search = CheckSearch.new(query, nil, team_id) + + # Prepare the export + data = [] + header = ['Claim', 'Item page URL', 'Status', 'Created by', 'Submitted at', 'Published at', 'Number of media', 'Tags'] + fields = team.team_tasks.sort + fields.each { |tt| header << tt.label } + data << header + + # No pagination for the export + search.set_option('esoffset', 0) + search.set_option('eslimit', CheckConfig.get(:export_csv_maximum_number_of_results, 10000, :integer)) + + # Iterate through each result and generate an output row for the CSV + search.medias.find_each do |pm| + row = [ + pm.claim_description&.description, + pm.full_url, + pm.status_i18n, + pm.author_name.to_s.gsub(/ \[.*\]$/, ''), + pm.created_at.strftime("%Y-%m-%d %H:%M:%S"), + pm.published_at&.strftime("%Y-%m-%d %H:%M:%S"), + pm.linked_items_count(true), + pm.tags_as_sentence(true) + ] + annotations = pm.get_annotations('task').map(&:load) + fields.each do |field| + annotation = annotations.find { |a| a.team_task_id == field.id } + answer = (annotation ? (begin annotation.first_response_obj.file_data[:file_urls].join("\n") rescue annotation.first_response.to_s end) : '') + answer = begin JSON.parse(answer).collect{ |x| x['url'] }.join(', ') rescue answer end + row << answer + end + data << row + end + data + end + private def adjust_es_window_size window_size = 10000 current_size = @options['esoffset'].to_i + @options['eslimit'].to_i - @options['eslimit'] = window_size - @options['esoffset'].to_i if current_size > window_size + @options['eslimit'] = window_size - @options['esoffset'].to_i if current_size > window_size end def adjust_project_filter diff --git a/lib/list_export.rb b/lib/list_export.rb new file mode 100644 index 0000000000..51ec959d60 --- /dev/null +++ b/lib/list_export.rb @@ -0,0 +1,67 @@ +class ListExport + TYPES = [:media, :feed, :fact_check, :explainer] + + def initialize(type, query, team_id) + @type = type + @query = query + @parsed_query = JSON.parse(@query) + @team_id = team_id + @team = Team.find(team_id) + @feed = Feed.where(id: @parsed_query['feed_id'], team_id: @team_id).last if type == :feed + raise "Invalid export type '#{type}'. Should be one of: #{TYPES}" unless TYPES.include?(type) + end + + def number_of_rows + case @type + when :media + CheckSearch.new(@query, nil, @team_id).number_of_results + when :feed + @feed.clusters_count(@parsed_query) + when :fact_check + @team.filtered_fact_checks(@parsed_query).count + when :explainer + @team.filtered_explainers(@parsed_query).count + end + end + + def generate_csv_and_send_email_in_background(user) + ListExport.delay.generate_csv_and_send_email(self, user.id) + end + + def generate_csv_and_send_email(user) + # Convert to CSV + csv_string = CSV.generate do |csv| + self.export_data.each do |row| + csv << row + end + end + + # Save to S3 + csv_file_url = CheckS3.write_presigned("export/#{@type}/#{@team_id}/#{Time.now.to_i}/#{Digest::MD5.hexdigest(@query)}.csv", 'text/csv', csv_string, CheckConfig.get('export_csv_expire', 7.days.to_i, :integer)) + + # Send to e-mail + ExportListMailer.delay.send_csv(csv_file_url, user) + + # Return path to CSV + csv_file_url + end + + def self.generate_csv_and_send_email(export, user_id) + export.generate_csv_and_send_email(User.find(user_id)) + end + + private + + def export_data + case @type + when :media + CheckSearch.get_exported_data(@query, @team_id) + when :feed + @feed.get_exported_data(@parsed_query) + when :fact_check + FactCheck.get_exported_data(@parsed_query, @team) + when :explainer + Explainer.get_exported_data(@parsed_query, @team) + end + end +end diff --git a/lib/relay.idl b/lib/relay.idl index 4c7445b1ae..e0e7bc11e7 100644 --- a/lib/relay.idl +++ b/lib/relay.idl @@ -8357,6 +8357,29 @@ type ExplainerItemEdge { node: ExplainerItem } +""" +Autogenerated input type of ExportList +""" +input ExportListInput { + """ + A unique identifier for the client performing the mutation. + """ + clientMutationId: String + query: String! + type: String! +} + +""" +Autogenerated return type of ExportList +""" +type ExportListPayload { + """ + A unique identifier for the client performing the mutation. + """ + clientMutationId: String + success: Boolean +} + """ Autogenerated input type of ExtractText """ @@ -9917,6 +9940,12 @@ type MutationType { """ input: DuplicateTeamMutationInput! ): DuplicateTeamMutationPayload + exportList( + """ + Parameters for ExportList + """ + input: ExportListInput! + ): ExportListPayload extractText( """ Parameters for ExtractText diff --git a/public/relay.json b/public/relay.json index 10d08ecdd4..3cc4a0e499 100644 --- a/public/relay.json +++ b/public/relay.json @@ -45264,6 +45264,102 @@ "enumValues": null, "possibleTypes": null }, + { + "kind": "INPUT_OBJECT", + "name": "ExportListInput", + "description": "Autogenerated input type of ExportList", + "fields": null, + "inputFields": [ + { + "name": "query", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "type", + "description": null, + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "SCALAR", + "name": "String", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "clientMutationId", + "description": "A unique identifier for the client performing the mutation.", + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "interfaces": null, + "enumValues": null, + "possibleTypes": null + }, + { + "kind": "OBJECT", + "name": "ExportListPayload", + "description": "Autogenerated return type of ExportList", + "fields": [ + { + "name": "clientMutationId", + "description": "A unique identifier for the client performing the mutation.", + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "String", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, + { + "name": "success", + "description": null, + "args": [ + + ], + "type": { + "kind": "SCALAR", + "name": "Boolean", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + } + ], + "inputFields": null, + "interfaces": [ + + ], + "enumValues": null, + "possibleTypes": null + }, { "kind": "INPUT_OBJECT", "name": "ExtractTextInput", @@ -53582,6 +53678,35 @@ "isDeprecated": false, "deprecationReason": null }, + { + "name": "exportList", + "description": null, + "args": [ + { + "name": "input", + "description": "Parameters for ExportList", + "type": { + "kind": "NON_NULL", + "name": null, + "ofType": { + "kind": "INPUT_OBJECT", + "name": "ExportListInput", + "ofType": null + } + }, + "defaultValue": null, + "isDeprecated": false, + "deprecationReason": null + } + ], + "type": { + "kind": "OBJECT", + "name": "ExportListPayload", + "ofType": null + }, + "isDeprecated": false, + "deprecationReason": null + }, { "name": "extractText", "description": null, diff --git a/test/controllers/graphql_controller_11_test.rb b/test/controllers/graphql_controller_11_test.rb index af6670ab50..2cad8e2620 100644 --- a/test/controllers/graphql_controller_11_test.rb +++ b/test/controllers/graphql_controller_11_test.rb @@ -132,10 +132,10 @@ def teardown post :create, params: { query: query } assert_response :success response = JSON.parse(@response.body)['data']['me'] - data = response['accessible_teams']['edges'] + data = response['accessible_teams']['edges'].collect{ |edge| edge['node']['dbid'] }.sort assert_equal 2, data.size - assert_equal team1.id, data[0]['node']['dbid'] - assert_equal team2.id, data[1]['node']['dbid'] + assert_equal team1.id, data[0] + assert_equal team2.id, data[1] assert_equal 2, response['accessible_teams_count'] end @@ -159,4 +159,45 @@ def teardown assert_equal team1.id, data[0]['node']['dbid'] assert_equal 1, response['accessible_teams_count'] end + + test "should export list if it's a workspace admin and number of results is not over the limit" do + Sidekiq::Testing.inline! + u = create_user + t = create_team + create_team_user team: t, user: u, role: 'admin' + authenticate_with_user(u) + + query = "mutation { exportList(input: { query: \"{}\", type: \"media\" }) { success } }" + post :create, params: { query: query, team: t.slug } + assert_response :success + assert JSON.parse(@response.body)['data']['exportList']['success'] + end + + test "should not export list if it's not a workspace admin" do + Sidekiq::Testing.inline! + u = create_user + t = create_team + create_team_user team: t, user: u, role: 'editor' + authenticate_with_user(u) + + query = "mutation { exportList(input: { query: \"{}\", type: \"media\" }) { success } }" + post :create, params: { query: query, team: t.slug } + assert_response :success + assert !JSON.parse(@response.body)['data']['exportList']['success'] + end + + test "should not export list if it's over the limit" do + Sidekiq::Testing.inline! + u = create_user + t = create_team + create_team_user team: t, user: u, role: 'admin' + authenticate_with_user(u) + + stub_configs({ 'export_csv_maximum_number_of_results' => -1 }) do + query = "mutation { exportList(input: { query: \"{}\", type: \"media\" }) { success } }" + post :create, params: { query: query, team: t.slug } + assert_response :success + assert !JSON.parse(@response.body)['data']['exportList']['success'] + end + end end diff --git a/test/lib/check_s3_test.rb b/test/lib/check_s3_test.rb index d313f1b3df..1dc71a5c02 100644 --- a/test/lib/check_s3_test.rb +++ b/test/lib/check_s3_test.rb @@ -1,6 +1,12 @@ require_relative '../test_helper' class CheckS3Test < ActiveSupport::TestCase + def setup + end + + def teardown + end + test "should return resource" do assert_kind_of Aws::S3::Resource, CheckS3.resource end diff --git a/test/lib/list_export_test.rb b/test/lib/list_export_test.rb new file mode 100644 index 0000000000..668797d6e6 --- /dev/null +++ b/test/lib/list_export_test.rb @@ -0,0 +1,86 @@ +require_relative '../test_helper' + +class ListExportTest < ActiveSupport::TestCase + def setup + end + + def teardown + end + + test "should expire the export" do + t = create_team + create_team_task team_id: t.id, fieldset: 'tasks' + pm = create_project_media team: t + + stub_configs({ 'export_csv_expire' => 2 }) do + # Generate a CSV with the two exported items + export = ListExport.new(:media, '{}', t.id) + csv_url = export.generate_csv_and_send_email(create_user) + response = Net::HTTP.get_response(URI(csv_url)) + assert_equal 200, response.code.to_i + + # Make sure it expires after 2 seconds + sleep 3 # Just to be safe + response = Net::HTTP.get_response(URI(csv_url)) + assert_equal 403, response.code.to_i + end + end + + test "should export media CSV" do + t = create_team + create_team_task team_id: t.id, fieldset: 'tasks' + 2.times { create_project_media team: t } + + export = ListExport.new(:media, '{}', t.id) + csv_url = export.generate_csv_and_send_email(create_user) + response = Net::HTTP.get_response(URI(csv_url)) + assert_equal 200, response.code.to_i + csv_content = CSV.parse(response.body, headers: true) + assert_equal 2, csv_content.size + assert_equal 2, export.number_of_rows + end + + test "should export feed CSV" do + t = create_team + f = create_feed team: t + 2.times { f.clusters << create_cluster } + + export = ListExport.new(:feed, { feed_id: f.id }.to_json, t.id) + csv_url = export.generate_csv_and_send_email(create_user) + response = Net::HTTP.get_response(URI(csv_url)) + assert_equal 200, response.code.to_i + csv_content = CSV.parse(response.body, headers: true) + assert_equal 2, csv_content.size + assert_equal 2, export.number_of_rows + end + + test "should export fact-checks CSV" do + t = create_team + 2.times do + pm = create_project_media team: t + cd = create_claim_description project_media: pm + create_fact_check claim_description: cd + end + + export = ListExport.new(:fact_check, '{}', t.id) + csv_url = export.generate_csv_and_send_email(create_user) + response = Net::HTTP.get_response(URI(csv_url)) + assert_equal 200, response.code.to_i + csv_content = CSV.parse(response.body, headers: true) + assert_equal 2, csv_content.size + assert_equal 2, export.number_of_rows + end + + test "should export explainers CSV" do + t = create_team + 2.times { create_explainer team: t } + + export = ListExport.new(:explainer, '{}', t.id) + csv_url = export.generate_csv_and_send_email(create_user) + response = Net::HTTP.get_response(URI(csv_url)) + assert_equal 200, response.code.to_i + csv_content = CSV.parse(response.body, headers: true) + assert_equal 2, csv_content.size + assert_equal 2, export.number_of_rows + end +end