diff --git a/api/src/functions/cpfValidation/cpfValidation.scenarios.ts b/api/src/functions/cpfValidation/cpfValidation.scenarios.ts new file mode 100644 index 00000000..d24ff747 --- /dev/null +++ b/api/src/functions/cpfValidation/cpfValidation.scenarios.ts @@ -0,0 +1,8 @@ +import type { ScenarioData } from '@redwoodjs/testing/api' + +export const standard = defineScenario({ + // Define the "fixture" to write into your test database here + // See guide: https://redwoodjs.com/docs/testing#scenarios +}) + +export type StandardScenario = ScenarioData diff --git a/api/src/functions/cpfValidation/cpfValidation.test.ts b/api/src/functions/cpfValidation/cpfValidation.test.ts new file mode 100644 index 00000000..8153683c --- /dev/null +++ b/api/src/functions/cpfValidation/cpfValidation.test.ts @@ -0,0 +1,57 @@ +// import { S3EventRecord } from 'aws-lambda' + +// import { handler } from './cpfValidation' + +// Improve this test with help from the Redwood Testing Doc: +// https://redwoodjs.com/docs/testing#testing-functions + +describe('cpfValidation function', () => { + it('Dummy test', () => { + expect(1 + 1).toBe(2) + }) + // it('Should respond with 200', async () => { + // const record: S3EventRecord = { + // eventVersion: '2.0', + // eventSource: 'aws:s3', + // eventName: 'ObjectCreated:Put', + // eventTime: '1970-01-01T00:00:00.000Z', + // userIdentity: { principalId: 'test-principalId' }, + // requestParameters: { sourceIPAddress: 'test-sourceIPAddress' }, + // responseElements: { + // 'x-amz-request-id': 'test-x-amz-request-id', + // 'x-amz-id-2': 'test-x-amz-id-2', + // }, + // awsRegion: 'us-east-1', + // s3: { + // s3SchemaVersion: '1.0', + // configurationId: 'test-configurationId', + // bucket: { + // name: 'test-bucket', + // arn: 'test-arn', + // ownerIdentity: { + // principalId: 'test-principalId', + // }, + // }, + // object: { + // key: 'test-key', + // size: 1234, + // eTag: 'test-etag', + // sequencer: 'test-sequencer', + // }, + // }, + // } + // const s3Event = { + // Records: [record], + // } + // const response = await handler(s3Event, null, null) + // const { data } = JSON.parse(response.body) + // expect(response.statusCode).toBe(200) + // expect(data).toBe('excelToJson function') +}) + +// You can also use scenarios to test your api functions +// See guide here: https://redwoodjs.com/docs/testing#scenarios +// +// scenario('Scenario test', async () => { +// +// }) diff --git a/api/src/functions/cpfValidation/cpfValidation.ts b/api/src/functions/cpfValidation/cpfValidation.ts new file mode 100644 index 00000000..98be06fc --- /dev/null +++ b/api/src/functions/cpfValidation/cpfValidation.ts @@ -0,0 +1,27 @@ +import { https } from 'https' + +import { S3Event, S3Handler } from 'aws-lambda' + +import { logger } from 'src/lib/logger' + +const apiEndpoint = 'https://example.com' + +export const handler: S3Handler = async (event: S3Event): Promise => { + try { + const bucket = event.Records[0].s3.bucket.name + const key = event.Records[0].s3.object.key + + const options = { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + }, + } + + // call API endpoint with S3 key + https.request(apiEndpoint, options, (res) => {}) + } catch (error) { + logger.error('Error processing S3 event:', error) + throw error + } +} diff --git a/api/src/functions/excelToJson/excelToJson.scenarios.ts b/api/src/functions/excelToJson/excelToJson.scenarios.ts new file mode 100644 index 00000000..d24ff747 --- /dev/null +++ b/api/src/functions/excelToJson/excelToJson.scenarios.ts @@ -0,0 +1,8 @@ +import type { ScenarioData } from '@redwoodjs/testing/api' + +export const standard = defineScenario({ + // Define the "fixture" to write into your test database here + // See guide: https://redwoodjs.com/docs/testing#scenarios +}) + +export type StandardScenario = ScenarioData diff --git a/api/src/functions/excelToJson/excelToJson.test.ts b/api/src/functions/excelToJson/excelToJson.test.ts new file mode 100644 index 00000000..01d78839 --- /dev/null +++ b/api/src/functions/excelToJson/excelToJson.test.ts @@ -0,0 +1,57 @@ +// import { S3EventRecord } from 'aws-lambda' + +// import { handler } from './excelToJson' + +// Improve this test with help from the Redwood Testing Doc: +// https://redwoodjs.com/docs/testing#testing-functions + +describe('excelToJson function', () => { + it('Dummy test', () => { + expect(1 + 1).toBe(2) + }) + // it('Should respond with 200', async () => { + // const record: S3EventRecord = { + // eventVersion: '2.0', + // eventSource: 'aws:s3', + // eventName: 'ObjectCreated:Put', + // eventTime: '1970-01-01T00:00:00.000Z', + // userIdentity: { principalId: 'test-principalId' }, + // requestParameters: { sourceIPAddress: 'test-sourceIPAddress' }, + // responseElements: { + // 'x-amz-request-id': 'test-x-amz-request-id', + // 'x-amz-id-2': 'test-x-amz-id-2', + // }, + // awsRegion: 'us-east-1', + // s3: { + // s3SchemaVersion: '1.0', + // configurationId: 'test-configurationId', + // bucket: { + // name: 'test-bucket', + // arn: 'test-arn', + // ownerIdentity: { + // principalId: 'test-principalId', + // }, + // }, + // object: { + // key: 'test-key', + // size: 1234, + // eTag: 'test-etag', + // sequencer: 'test-sequencer', + // }, + // }, + // } + // const s3Event = { + // Records: [record], + // } + // const response = await handler(s3Event, null, null) + // const { data } = JSON.parse(response.body) + // expect(response.statusCode).toBe(200) + // expect(data).toBe('excelToJson function') +}) + +// You can also use scenarios to test your api functions +// See guide here: https://redwoodjs.com/docs/testing#scenarios +// +// scenario('Scenario test', async () => { +// +// }) diff --git a/api/src/functions/excelToJson/excelToJson.ts b/api/src/functions/excelToJson/excelToJson.ts new file mode 100644 index 00000000..337d4035 --- /dev/null +++ b/api/src/functions/excelToJson/excelToJson.ts @@ -0,0 +1,49 @@ +import { + S3Client, + GetObjectCommand, + PutObjectCommand, +} from '@aws-sdk/client-s3' +import { NodeJsClient } from '@smithy/types' +import { S3Event, S3Handler } from 'aws-lambda' +import { Workbook } from 'exceljs' + +import { logger } from 'src/lib/logger' + +const s3 = new S3Client({}) as NodeJsClient + +export const handler: S3Handler = async (event: S3Event): Promise => { + try { + const bucket = event.Records[0].s3.bucket.name + const key = event.Records[0].s3.object.key + + // Download the Excel file from S3 + const getObjectResponse = await s3.send( + new GetObjectCommand({ Bucket: bucket, Key: key }) + ) + + if (getObjectResponse.Body) { + new Workbook().xlsx.read(getObjectResponse.Body) + const workbook = new Workbook() + + const worksheet = workbook.worksheets[0] + const jsonData = worksheet.getSheetValues() + + // Write JSON data to a file + const jsonFileName = `${key}.json` // Use the same key with .json extension + const jsonFileContent = JSON.stringify(jsonData) + + // Upload the JSON file to the same bucket + s3.send( + new PutObjectCommand({ + Bucket: bucket, + Key: jsonFileName, + Body: jsonFileContent, + ContentType: 'application/json', + }) + ) + } + } catch (error) { + logger.error('Error processing S3 event:', error) + throw error + } +} diff --git a/api/src/lib/aws.ts b/api/src/lib/aws.ts index 95b12282..39201dd3 100644 --- a/api/src/lib/aws.ts +++ b/api/src/lib/aws.ts @@ -14,7 +14,7 @@ import { import { getSignedUrl as awsGetSignedUrl } from '@aws-sdk/s3-request-presigner' import { QueryResolvers, CreateUploadInput } from 'types/graphql' -const CPF_REPORTER_BUCKET_NAME = 'cpf-reporter' +const CPF_REPORTER_BUCKET_NAME = `cpf-reporter-${process.env.environment}` function getS3Client() { let s3: S3Client diff --git a/terraform/functions.tf b/terraform/functions.tf index ac685148..e05f44ce 100644 --- a/terraform/functions.tf +++ b/terraform/functions.tf @@ -117,6 +117,40 @@ module "lambda_artifacts_bucket" { ] } +module "cpf_uploads_bucket" { + source = "cloudposse/s3-bucket/aws" + version = "4.0.1" + context = module.s3_label.context + name = "cpf-reporter-${var.environment}" + + acl = "private" + versioning_enabled = true + sse_algorithm = "AES256" + allow_ssl_requests_only = true + allow_encrypted_uploads_only = true + source_policy_documents = [] + + lifecycle_configuration_rules = [ + { + enabled = true + id = "rule-1" + filter_and = null + abort_incomplete_multipart_upload_days = 7 + transition = [{ days = null }] + expiration = { days = null } + noncurrent_version_transition = [ + { + noncurrent_days = 30 + storage_class = "GLACIER" + }, + ] + noncurrent_version_expiration = { + noncurrent_days = 90 + } + } + ] +} + resource "aws_s3_object" "lambda_artifact-graphql" { bucket = module.lambda_artifacts_bucket.bucket_id key = "graphql.${filemd5("${local.lambda_artifacts_base_path}/graphql.zip")}.zip" @@ -126,6 +160,44 @@ resource "aws_s3_object" "lambda_artifact-graphql" { server_side_encryption = "AES256" } +resource "aws_s3_object" "lambda_artifact-excelToJson" { + bucket = module.lambda_artifacts_bucket.bucket_id + key = "excelToJson.${filemd5("${local.lambda_artifacts_base_path}/excelToJson.zip")}.zip" + source = "${local.lambda_artifacts_base_path}/excelToJson.zip" + source_hash = filemd5("${local.lambda_artifacts_base_path}/excelToJson.zip") + etag = filemd5("${local.lambda_artifacts_base_path}/excelToJson.zip") + server_side_encryption = "AES256" +} + +resource "aws_s3_object" "lambda_artifact-cpfValidation" { + bucket = module.lambda_artifacts_bucket.bucket_id + key = "cpfValidation.${filemd5("${local.lambda_artifacts_base_path}/cpfValidation.zip")}.zip" + source = "${local.lambda_artifacts_base_path}/cpfValidation.zip" + source_hash = filemd5("${local.lambda_artifacts_base_path}/cpfValidation.zip") + etag = filemd5("${local.lambda_artifacts_base_path}/cpfValidation.zip") + server_side_encryption = "AES256" +} + +resource "aws_s3_bucket_notification" "json_notification" { + bucket = module.cpf_uploads_bucket.bucket_id + + lambda_function { + lambda_function_arn = module.lambda_function-cpfValidation.lambda_function_arn + events = ["s3:ObjectCreated:*"] + filter_suffix = ".json" + } +} + +resource "aws_s3_bucket_notification" "excel_notification" { + bucket = module.cpf_uploads_bucket.bucket_id + + lambda_function { + lambda_function_arn = module.lambda_function-excelToJson.lambda_function_arn + events = ["s3:ObjectCreated:*"] + filter_suffix = ".xlsm" + } +} + module "lambda_function-graphql" { source = "terraform-aws-modules/lambda/aws" version = "6.5.0" @@ -209,3 +281,59 @@ module "lambda_function-graphql" { } } } + +module "lambda_function-excelToJson" { + source = "terraform-aws-modules/lambda/aws" + version = "6.5.0" + + function_name = "excel-to-json" + description = "Reacts to S3 events and converts Excel files to JSON." + + vpc_subnet_ids = local.private_subnet_ids + vpc_security_group_ids = [ + module.lambda_security_group.id, + module.postgres.security_group_id, + ] + handler = "index.handler" + architectures = [var.lambda_arch] + runtime = var.lambda_runtime + publish = true + layers = local.lambda_layer_arns + create_package = false + s3_existing_package = { + bucket = aws_s3_object.lambda_artifact-excelToJson.bucket + key = aws_s3_object.lambda_artifact-excelToJson.key + } + + role_name = "lambda-role-excelToJson" + attach_policy = true + policy = "arn:aws:iam::aws:policy/AmazonS3FullAccess" +} + +module "lambda_function-cpfValidation" { + source = "terraform-aws-modules/lambda/aws" + version = "6.5.0" + function_name = "cpf-validation" + description = "Reacts to S3 events and validates CPF JSON files." + + vpc_subnet_ids = local.private_subnet_ids + vpc_security_group_ids = [ + module.lambda_security_group.id, + module.postgres.security_group_id, + ] + handler = "index.handler" + architectures = [var.lambda_arch] + runtime = var.lambda_runtime + publish = true + layers = local.lambda_layer_arns + create_package = false + s3_existing_package = { + bucket = aws_s3_object.lambda_artifact-cpfValidation.bucket + key = aws_s3_object.lambda_artifact-cpfValidation.key + } + + role_name = "lambda-role-cpfValidation" + attach_policy = true + policy = "arn:aws:iam::aws:policy/AmazonS3ReadOnlyAccess" + # TODO: we need a policy for calling an API endpoint on the application for validation +} diff --git a/web/src/Routes.tsx b/web/src/Routes.tsx index 9a73e69a..3c81d8cf 100644 --- a/web/src/Routes.tsx +++ b/web/src/Routes.tsx @@ -40,6 +40,29 @@ const Routes = () => { + {/* Uploads */} + + + + + + {/* Agencies */} + + + + + {/* Users */} + + + + + {/* Reporting Periods */} + + {/* Organizations */} + + + +