Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[treasury-report] Update Send Treasury Report by email button to only send the email and not trigger the step function #516

Closed
wants to merge 18 commits into from
Closed
Show file tree
Hide file tree
Changes from 13 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .env.defaults
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,9 @@ AWS_DEFAULT_REGION=us-west-2
AWS_ACCESS_KEY_ID=test
AWS_SECRET_ACCESS_KEY=test

# Localstack environment defaults (needed for SQS routing)
LOCALSTACK_HOSTNAME=localhost

# Datadog environment defaults (note: RUM vars are not secret)
DD_ENABLED=false
DD_ENV=sandbox
Expand Down
4 changes: 1 addition & 3 deletions api/src/lib/aws.ts
Original file line number Diff line number Diff line change
Expand Up @@ -196,9 +196,7 @@ function getSQSClient() {
let sqs: SQSClient
if (process.env.LOCALSTACK_HOSTNAME) {
console.log('------------ USING LOCALSTACK FOR SQS ------------')
const endpoint = `http://${process.env.LOCALSTACK_HOSTNAME}:${
process.env.EDGE_PORT || 4566
}`
const endpoint = `http://sqs.us-west-2.localhost.localstack.cloud:4566/000000000000/treasury-email-queue`
sqs = new SQSClient({ endpoint, region: process.env.AWS_DEFAULT_REGION })
} else {
sqs = new SQSClient()
Expand Down
110 changes: 7 additions & 103 deletions api/src/services/uploads/uploads.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import {
deleteUploadFile,
s3UploadFilePutSignedUrl,
getSignedUrl,
startStepFunctionExecution,
sendSqsMessage,
} from 'src/lib/aws'
import { db } from 'src/lib/db'
import { logger } from 'src/lib/logger'
Expand All @@ -23,9 +23,6 @@ import {
getUploadsByExpenditureCategory,
getValidUploadsInCurrentPeriod,
sendTreasuryReport,
SubrecipientLambdaPayload,
ProjectLambdaPayload,
CreateArchiveLambdaPayload,
EmailLambdaPayload,
} from './uploads'
import type { StandardScenario } from './uploads.scenarios'
Expand All @@ -42,7 +39,7 @@ jest.mock('src/lib/aws', () => ({
deleteUploadFile: jest.fn(),
s3UploadFilePutSignedUrl: jest.fn(),
getSignedUrl: jest.fn(),
startStepFunctionExecution: jest.fn(),
sendSqsMessage: jest.fn(),
}))
jest.mock('uuid', () => ({
v4: () => '00000000-0000-0000-0000-000000000000',
Expand Down Expand Up @@ -379,6 +376,8 @@ describe('treasury report', () => {
beforeEach(() => {
jest.resetAllMocks()
process.env.TREASURY_STEP_FUNCTION_ARN = 'test-arn'
process.env.TREASURY_EMAIL_SQS_URL =
'https://sqs.us-east-1.amazon.com/fake_aws_account_key/fake_queue'
})

scenario(
Expand All @@ -387,91 +386,8 @@ describe('treasury report', () => {
mockCurrentUser(scenario.user.one)
const mockOrganization = scenario.organization.one
const mockReportingPeriod = scenario.reportingPeriod.one
const mockUpload = scenario.upload.two
const mockUser = scenario.user.one

const projectPayload: ProjectLambdaPayload = {
'1A': {
organization: {
id: mockOrganization.id,
preferences: {
current_reporting_period_id: mockReportingPeriod.id,
},
},
user: {
email: mockUser.email,
id: mockUser.id,
},
outputTemplateId: mockReportingPeriod.outputTemplateId,
uploadsToAdd: {
[mockUpload.agencyId]: {
objectKey: `uploads/${mockOrganization.id}/${mockUpload.agencyId}/${mockReportingPeriod.id}/${mockUpload.id}/${mockUpload.filename}`,
createdAt: mockUpload.createdAt,
filename: mockUpload.filename,
},
},
uploadsToRemove: {},
ProjectType: '1A',
},
'1B': {
organization: {
id: mockOrganization.id,
preferences: {
current_reporting_period_id: mockReportingPeriod.id,
},
},
user: {
email: mockUser.email,
id: mockUser.id,
},
outputTemplateId: mockReportingPeriod.outputTemplateId,
uploadsToAdd: {},
uploadsToRemove: {},
ProjectType: '1B',
},
'1C': {
organization: {
id: mockOrganization.id,
preferences: {
current_reporting_period_id: mockReportingPeriod.id,
},
},
user: {
email: mockUser.email,
id: mockUser.id,
},
outputTemplateId: mockReportingPeriod.outputTemplateId,
uploadsToAdd: {},
uploadsToRemove: {},
ProjectType: '1C',
},
}
const subrecipientPayload: SubrecipientLambdaPayload = {
Subrecipient: {
organization: {
id: mockOrganization.id,
preferences: {
current_reporting_period_id: mockReportingPeriod.id,
},
},
user: {
email: mockUser.email,
id: mockUser.id,
},
outputTemplateId: mockReportingPeriod.outputTemplateId,
},
}

const zipPayload: CreateArchiveLambdaPayload = {
zip: {
organization: {
id: mockOrganization.id,
preferences: {
current_reporting_period_id: mockReportingPeriod.id,
},
},
},
}
const emailPayload: EmailLambdaPayload = {
email: {
organization: {
Expand All @@ -487,24 +403,12 @@ describe('treasury report', () => {
},
}

const input = JSON.stringify({
'1A': {},
'1B': {},
'1C': {},
Subrecipient: {},
zip: {},
email: {},
...projectPayload,
...subrecipientPayload,
...zipPayload,
...emailPayload,
})
const input = emailPayload
const result = await sendTreasuryReport()

expect(result).toBe(true)
expect(startStepFunctionExecution).toHaveBeenCalledWith(
'test-arn',
`Force-kick-off-00000000-0000-0000-0000-000000000000`,
expect(sendSqsMessage).toHaveBeenCalledWith(
'https://sqs.us-east-1.amazon.com/fake_aws_account_key/fake_queue',
input
)
}
Expand Down
38 changes: 4 additions & 34 deletions api/src/services/uploads/uploads.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@ import type {
MutationResolvers,
UploadRelationResolvers,
} from 'types/graphql'
import { v4 as uuidv4 } from 'uuid'

import { RedwoodError } from '@redwoodjs/api'

Expand All @@ -16,7 +15,7 @@ import {
s3UploadFilePutSignedUrl,
getSignedUrl,
getS3UploadFileKey,
startStepFunctionExecution,
sendSqsMessage,
} from 'src/lib/aws'
import { ROLES } from 'src/lib/constants'
import { db } from 'src/lib/db'
Expand Down Expand Up @@ -370,41 +369,12 @@ export const sendTreasuryReport: MutationResolvers['sendTreasuryReport'] =
const organization = await db.organization.findFirst({
where: { id: context.currentUser.agency.organizationId },
})
const reportingPeriod = await db.reportingPeriod.findFirst({
where: { id: organization.preferences['current_reporting_period_id'] },
})
const projectLambdaPayload: ProjectLambdaPayload =
await getUploadsByExpenditureCategory(organization, reportingPeriod)
const subrecipientLambdaPayload: SubrecipientLambdaPayload =
await getSubrecipientLambdaPayload(
organization,
context.currentUser,
reportingPeriod
)
const createArchiveLambdaPayload: CreateArchiveLambdaPayload =
await getCreateArchiveLambdaPayload(organization)

const emailLambdaPayload: EmailLambdaPayload =
await getEmailLambdaPayload(organization, context.currentUser)

const input = {
'1A': {},
'1B': {},
'1C': {},
Subrecipient: {},
zip: {},
email: {},
...projectLambdaPayload,
...subrecipientLambdaPayload,
...createArchiveLambdaPayload,
...emailLambdaPayload,
}

await startStepFunctionExecution(
process.env.TREASURY_STEP_FUNCTION_ARN,
`Force-kick-off-${uuidv4()}`,
JSON.stringify(input)
)
const input = emailLambdaPayload

await sendSqsMessage(process.env.TREASURY_EMAIL_SQS_URL, input)
return true
} catch (error) {
logger.error(error, 'Error sending Treasury Report')
Expand Down
14 changes: 13 additions & 1 deletion docker-compose.dev.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,10 @@ services:
- db
networks:
- redwood
- ls
dns:
# Set the DNS server to be the LocalStack container
- 10.0.2.20
environment:
- DATABASE_URL=postgresql://redwood:redwood@db:5432/redwood
- TEST_DATABASE_URL=postgresql://redwood:redwood@db:5432/redwood_test
Expand Down Expand Up @@ -91,7 +95,10 @@ services:
- DOCKER_HOST=unix:///var/run/docker.sock
- AWS_DEFAULT_REGION=${AWS_REGION:-us-west-2}
networks:
- redwood
redwood:
ls:
# Set the container IP address in the 10.0.2.0/24 subnet
ipv4_address: 10.0.2.20
volumes:
- "${LOCALSTACK_VOLUME_DIR:-./volume}:/var/lib/localstack"
- "/var/run/docker.sock:/var/run/docker.sock"
Expand All @@ -100,6 +107,11 @@ services:
networks:
redwood:
driver: bridge
ls:
ipam:
config:
# Specify the subnet range for IP address allocation
- subnet: 10.0.2.0/24
Comment on lines +110 to +114
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Needed for proper SQS message routing in the local development environment: localstack/localstack#10590 (comment)


volumes:
node_modules:
Expand Down
1 change: 1 addition & 0 deletions localstack/entrypoint/init-aws.sh
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,4 @@ for email in "${VALID_EMAILS[@]}"; do
done

awslocal s3api create-bucket --bucket cpf-reporter --region us-west-2 --create-bucket-configuration '{"LocationConstraint": "us-west-2"}'
awslocal sqs create-queue --queue-name treasury-email-queue
16 changes: 11 additions & 5 deletions python/src/functions/generate_presigned_url_and_send_email.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import os
from typing import Any, Optional, Tuple
from typing import Any, Dict, Optional, Tuple

import boto3
import chevron
Expand Down Expand Up @@ -28,7 +28,7 @@ class SendTreasuryEmailLambdaPayload(BaseModel):


@reset_contextvars
def handle(event: SendTreasuryEmailLambdaPayload, context: Context) -> dict[str, Any]:
def handle(event: Dict[str, Any], context: Context) -> dict[str, Any]:
"""Lambda handler for emailing Treasury reports

Given a user and organization object- send an email to the user that
Expand All @@ -37,18 +37,24 @@ def handle(event: SendTreasuryEmailLambdaPayload, context: Context) -> dict[str,
If the object does not exist then raise an exception.

Args:
event: S3 Lambda event of type `s3:ObjectCreated:*`
event: S3 Lambda event of type `s3:ObjectCreated:*` or a single SQS message
with a `Records` field
context: Lambda context
"""
structlog.contextvars.bind_contextvars(lambda_event={"step_function": event})
logger = get_logger()
logger.info("received new invocation event from step function")

try:
# Lambda payload
payload = SendTreasuryEmailLambdaPayload.model_validate(event)
except Exception:
logger.exception("Exception parsing Send Treasury Email event payload")
return {"statusCode": 400, "body": "Bad Request"}
try:
# SQS event
payload = SendTreasuryEmailLambdaPayload.model_validate(event["Records"][0])
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

except Exception:
logger.exception("Exception parsing Send Treasury Email event payload")
return {"statusCode": 400, "body": "Bad Request"}

try:
process_event(payload, logger)
Expand Down
1 change: 1 addition & 0 deletions terraform/functions.tf
Original file line number Diff line number Diff line change
Expand Up @@ -385,6 +385,7 @@ module "lambda_function-graphql" {
PASSAGE_API_KEY_SECRET_ARN = data.aws_ssm_parameter.passage_api_key_secret_arn.value
AUTH_PROVIDER = "passage"
TREASURY_STEP_FUNCTION_ARN = module.treasury_generation_step_function.state_machine_arn
TREASURY_EMAIL_SQS_URL = aws_sqs_queue.email_queue.id
PASSAGE_APP_ID = var.passage_app_id
})

Expand Down
15 changes: 15 additions & 0 deletions terraform/treasury_generation_lambda_functions.tf
Original file line number Diff line number Diff line change
Expand Up @@ -443,5 +443,20 @@ module "lambda_function-email-presigned-url" {
principal = "states.amazonaws.com"
source_arn = module.treasury_generation_step_function.state_machine_arn
}

}
}

// SQS queue for email triggers
resource "aws_sqs_queue" "email_queue" {
name = "${var.namespace}-treasury-email-queue"
}


// Event source from SQS
resource "aws_lambda_event_source_mapping" "email_event" {
event_source_arn = aws_sqs_queue.email_queue.arn
enabled = true
function_name = module.lambda_function-email-presigned-url.lambda_function_arn
batch_size = 1
}
Loading