diff --git a/.github/workflows/deploy-development.yml b/.github/workflows/deploy-development.yml index 7a79f83ad1..09ab212baf 100644 --- a/.github/workflows/deploy-development.yml +++ b/.github/workflows/deploy-development.yml @@ -49,6 +49,15 @@ jobs: autoapprove: false secrets: inherit + new-relic-record: + name: Record deployment to New Relic + needs: + - deploy-infrastructure-dev + uses: ./.github/workflows/new-relic-deployment.yml + with: + environment: "dev" + secrets: inherit + deploy-dev: name: Deploy application needs: @@ -69,7 +78,7 @@ jobs: generate-e2e-test-data: needs: - deploy-dev - name: + name: uses: ./.github/workflows/end-to-end-test-data-generator.yml secrets: inherit with: diff --git a/.github/workflows/deploy-production.yml b/.github/workflows/deploy-production.yml index 921053bf31..9d4e985ad3 100644 --- a/.github/workflows/deploy-production.yml +++ b/.github/workflows/deploy-production.yml @@ -32,6 +32,15 @@ jobs: autoapprove: false secrets: inherit + new-relic-record: + name: Record deployment to New Relic + needs: + - deploy-infrastructure-production + uses: ./.github/workflows/new-relic-deployment.yml + with: + environment: "production" + secrets: inherit + deploy-production: name: Deploy application needs: diff --git a/.github/workflows/deploy-staging.yml b/.github/workflows/deploy-staging.yml index 3f426703ec..cdc59bf003 100644 --- a/.github/workflows/deploy-staging.yml +++ b/.github/workflows/deploy-staging.yml @@ -31,6 +31,15 @@ jobs: autoapprove: false secrets: inherit + new-relic-record: + name: Record deployment to New Relic + needs: + - deploy-infrastructure-staging + uses: ./.github/workflows/new-relic-deployment.yml + with: + environment: "staging" + secrets: inherit + deploy-staging: name: Deploy application needs: @@ -61,7 +70,7 @@ jobs: generate-e2e-test-data: needs: - deploy-staging - name: + name: uses: ./.github/workflows/end-to-end-test-data-generator.yml secrets: inherit with: diff --git a/.github/workflows/new-relic-deployment.yml b/.github/workflows/new-relic-deployment.yml index 01fc5a2e1e..61977f0ec0 100644 --- a/.github/workflows/new-relic-deployment.yml +++ b/.github/workflows/new-relic-deployment.yml @@ -1,26 +1,57 @@ name: Record Deployment And Add New Relic Monitor on: - push: - branches: - - main - - prod - tags: - - v1.* + workflow_call: + inputs: + environment: + required: true + type: string jobs: - newrelic: + newrelic-dev: + if: ${{ inputs.environment == 'dev' }} runs-on: ubuntu-latest name: New Relic Record Deployment steps: - # This step builds a var with the release tag value to use later - name: Set Release Version from Tag run: echo "RELEASE_VERSION=${{ github.ref_name }}" >> $GITHUB_ENV - # This step creates a new Change Tracking Marker + + - name: Add New Relic Application Deployment Marker + uses: newrelic/deployment-marker-action@v2.5.0 + with: + apiKey: ${{ secrets.NEW_RELIC_API_KEY }} + guid: ${{ secrets.NEW_RELIC_DEV_DEPLOYMENT_ENTITY_GUID }} + version: "${{ env.RELEASE_VERSION }}" + user: "${{ github.actor }}" + + newrelic-staging: + if: ${{ inputs.environment == 'staging' }} + runs-on: ubuntu-latest + name: New Relic Record Deployment + steps: + - name: Set Release Version from Tag + run: echo "RELEASE_VERSION=${{ github.ref_name }}" >> $GITHUB_ENV + + - name: Add New Relic Application Deployment Marker + uses: newrelic/deployment-marker-action@v2.5.0 + with: + apiKey: ${{ secrets.NEW_RELIC_API_KEY }} + guid: ${{ secrets.NEW_RELIC_STAGING_DEPLOYMENT_ENTITY_GUID }} + version: "${{ env.RELEASE_VERSION }}" + user: "${{ github.actor }}" + + newrelic-production: + if: ${{ inputs.environment == 'production' }} + runs-on: ubuntu-latest + name: New Relic Record Deployment + steps: + - name: Set Release Version from Tag + run: echo "RELEASE_VERSION=${{ github.ref_name }}" >> $GITHUB_ENV + - name: Add New Relic Application Deployment Marker uses: newrelic/deployment-marker-action@v2.5.0 with: apiKey: ${{ secrets.NEW_RELIC_API_KEY }} - guid: ${{ secrets.NEW_RELIC_DEPLOYMENT_ENTITY_GUID }} + guid: ${{ secrets.NEW_RELIC_PRODUCTION_DEPLOYMENT_ENTITY_GUID }} version: "${{ env.RELEASE_VERSION }}" user: "${{ github.actor }}" diff --git a/backend/audit/admin.py b/backend/audit/admin.py index 4c1265776a..c03d3ce766 100644 --- a/backend/audit/admin.py +++ b/backend/audit/admin.py @@ -20,6 +20,7 @@ def has_view_permission(self, request, obj=None): "cognizant_agency", "oversight_agency", ] + readonly_fields = ("submitted_by",) class AccessAdmin(admin.ModelAdmin): diff --git a/backend/census_historical_migration/README.md b/backend/census_historical_migration/README.md index f23a4c79a2..c07f15e3ec 100644 --- a/backend/census_historical_migration/README.md +++ b/backend/census_historical_migration/README.md @@ -46,17 +46,17 @@ NOTE: Never check in the census_historical_migration/data folder into GitHub. 2. In the FAC/backend folder, run the following to load CSV files from census_historical_migration/data folder into fac-census-to-gsafac-s3 bucket. ```bash -docker compose run web python manage.py fac_s3 fac-census-to-gsafac-s3 --upload --src census_historical_migration/data +docker compose run --rm web python manage.py fac_s3 fac-census-to-gsafac-s3 --upload --src census_historical_migration/data ``` 3. In the FAC/backend folder, run the following to read the CSV files from fac-census-to-gsafac-s3 bucket and load into Postgres. ```bash -docker compose run web python manage.py csv_to_postgres --folder data --chunksize 10000 +docker compose run --rm web python manage.py csv_to_postgres --folder data --chunksize 10000 ``` ### How to run the historic data migrator: ``` -docker compose run web python manage.py historic_data_migrator --email any_email_in_the_system@woo.gov \ +docker compose run --rm web python manage.py historic_data_migrator --email any_email_in_the_system@woo.gov \ --years 22 \ --dbkeys 100010 ``` @@ -65,7 +65,7 @@ docker compose run web python manage.py historic_data_migrator --email any_email ### How to run the historic workbook generator: ``` -docker compose run web python manage.py historic_workbook_generator +docker compose run --rm web python manage.py historic_workbook_generator \ --year 22 \ --output \ --dbkey 100010 diff --git a/backend/census_historical_migration/management/commands/historic_workbook_generator.py b/backend/census_historical_migration/management/commands/historic_workbook_generator.py index 9916afb11e..58cabf37a2 100644 --- a/backend/census_historical_migration/management/commands/historic_workbook_generator.py +++ b/backend/census_historical_migration/management/commands/historic_workbook_generator.py @@ -10,11 +10,11 @@ import pprint from census_historical_migration.workbooklib.workbook_creation import ( - sections, - workbook_loader, + generate_workbook, +) +from census_historical_migration.workbooklib.workbook_section_handlers import ( + sections_to_handlers, ) - -import datetime from census_historical_migration.workbooklib.census_models.census import ( CensusGen22 as Gen, @@ -181,16 +181,11 @@ def handle(self, *args, **options): # noqa: C901 logger.info("could not create output directory. exiting.") sys.exit() - entity_id = "DBKEY {dbkey} {date:%Y_%m_%d_%H_%M_%S}".format( - dbkey=options["dbkey"], date=datetime.datetime.now() - ) - - loader = workbook_loader( - None, None, options["dbkey"], options["year"], entity_id - ) json_test_tables = [] - for section, fun in sections.items(): - (wb, api_json, filename) = loader(fun, section) + for section, fun in sections_to_handlers.items(): + (wb, api_json, _, filename) = generate_workbook( + fun, options["dbkey"], options["year"], section + ) if wb: wb_path = os.path.join(outdir, filename) wb.save(wb_path) diff --git a/backend/census_historical_migration/workbooklib/corrective_action_plan.py b/backend/census_historical_migration/workbooklib/corrective_action_plan.py index 10d7dd04b2..1177a245ec 100644 --- a/backend/census_historical_migration/workbooklib/corrective_action_plan.py +++ b/backend/census_historical_migration/workbooklib/corrective_action_plan.py @@ -4,7 +4,6 @@ set_uei, map_simple_columns, generate_dissemination_test_table, - test_pfix, ) from census_historical_migration.workbooklib.templates import sections_to_template_paths from census_historical_migration.workbooklib.census_models.census import dynamic_import @@ -27,7 +26,7 @@ def generate_corrective_action_plan(dbkey, year, outfile): ) mappings = [ FieldMap("reference_number", "findingrefnums", "finding_ref_number", None, str), - FieldMap("planned_action", "text", WorkbookFieldInDissem, None, test_pfix(3)), + FieldMap("planned_action", "text", WorkbookFieldInDissem, None, str), FieldMap( "contains_chart_or_table", "chartstables", WorkbookFieldInDissem, None, str ), diff --git a/backend/census_historical_migration/workbooklib/end_to_end_core.py b/backend/census_historical_migration/workbooklib/end_to_end_core.py index d61d05e670..7019179463 100644 --- a/backend/census_historical_migration/workbooklib/end_to_end_core.py +++ b/backend/census_historical_migration/workbooklib/end_to_end_core.py @@ -11,10 +11,12 @@ from datetime import datetime from census_historical_migration.workbooklib.workbook_creation import ( - sections, workbook_loader, setup_sac, ) +from census_historical_migration.workbooklib.workbook_section_handlers import ( + sections_to_handlers, +) from census_historical_migration.workbooklib.sac_creation import _post_upload_pdf from audit.intake_to_dissemination import IntakeToDissemination @@ -197,9 +199,9 @@ def generate_workbooks(user, email, dbkey, year): if sac.general_information["audit_type"] == "alternative-compliance-engagement": print(f"Skipping ACE audit: {dbkey}") else: - loader = workbook_loader(user, sac, dbkey, year, entity_id) + loader = workbook_loader(user, sac, dbkey, year) json_test_tables = [] - for section, fun in sections.items(): + for section, fun in sections_to_handlers.items(): # FIXME: Can we conditionally upload the addl' and secondary workbooks? (_, json, _) = loader(fun, section) json_test_tables.append(json) diff --git a/backend/census_historical_migration/workbooklib/excel_creation.py b/backend/census_historical_migration/workbooklib/excel_creation.py index 96cf329fb7..c59b62ad8f 100644 --- a/backend/census_historical_migration/workbooklib/excel_creation.py +++ b/backend/census_historical_migration/workbooklib/excel_creation.py @@ -17,14 +17,6 @@ WorkbookFieldInDissem = 1000 -def test_pfix(n): - def _test(o): - # return ' '.join(["TEST" for x in range(n)]) + " " + str(o) - return o - - return _test - - def set_single_cell_range(wb, range_name, value): the_range = wb.defined_names[range_name] # The above returns a generator. Turn it to a list, and grab diff --git a/backend/census_historical_migration/workbooklib/findings_text.py b/backend/census_historical_migration/workbooklib/findings_text.py index 610ea06b43..6c8820e012 100644 --- a/backend/census_historical_migration/workbooklib/findings_text.py +++ b/backend/census_historical_migration/workbooklib/findings_text.py @@ -4,7 +4,6 @@ set_uei, map_simple_columns, generate_dissemination_test_table, - test_pfix, ) from census_historical_migration.workbooklib.templates import sections_to_template_paths from census_historical_migration.workbooklib.census_models.census import dynamic_import @@ -18,7 +17,7 @@ mappings = [ FieldMap("reference_number", "findingrefnums", "finding_ref_number", None, str), - FieldMap("text_of_finding", "text", "finding_text", None, test_pfix(3)), + FieldMap("text_of_finding", "text", "finding_text", None, str), FieldMap( "contains_chart_or_table", "chartstables", WorkbookFieldInDissem, None, str ), diff --git a/backend/census_historical_migration/workbooklib/notes_to_sefa.py b/backend/census_historical_migration/workbooklib/notes_to_sefa.py index 105b1e29df..ec9b65b9a9 100644 --- a/backend/census_historical_migration/workbooklib/notes_to_sefa.py +++ b/backend/census_historical_migration/workbooklib/notes_to_sefa.py @@ -4,7 +4,6 @@ set_single_cell_range, map_simple_columns, generate_dissemination_test_table, - test_pfix, ) from census_historical_migration.workbooklib.templates import sections_to_template_paths from census_historical_migration.workbooklib.excel_creation import ( @@ -22,9 +21,8 @@ logger = logging.getLogger(__name__) mappings = [ - FieldMap("note_title", "title", "title", None, test_pfix(3)), - FieldMap("note_content", "content", "content", None, test_pfix(3)), - # FieldMap("seq_number", "seq_number", "note_seq_number", 0, int), + FieldMap("note_title", "title", "title", None, str), + FieldMap("note_content", "content", "content", None, str), ] diff --git a/backend/census_historical_migration/workbooklib/secondary_auditors.py b/backend/census_historical_migration/workbooklib/secondary_auditors.py index 6354b315fa..c88df82c26 100644 --- a/backend/census_historical_migration/workbooklib/secondary_auditors.py +++ b/backend/census_historical_migration/workbooklib/secondary_auditors.py @@ -3,7 +3,6 @@ set_uei, map_simple_columns, generate_dissemination_test_table, - test_pfix, ) from census_historical_migration.workbooklib.templates import sections_to_template_paths from census_historical_migration.workbooklib.census_models.census import dynamic_import @@ -29,14 +28,14 @@ "cpastreet1", "address_street", None, - test_pfix(3), + str, ), FieldMap( "secondary_auditor_contact_title", "cpatitle", "contact_title", None, - test_pfix(3), + str, ), FieldMap( "secondary_auditor_address_zipcode", diff --git a/backend/census_historical_migration/workbooklib/utils.py b/backend/census_historical_migration/workbooklib/utils.py new file mode 100644 index 0000000000..a76df9668b --- /dev/null +++ b/backend/census_historical_migration/workbooklib/utils.py @@ -0,0 +1,12 @@ +from census_historical_migration.workbooklib.templates import sections_to_template_paths + + +def get_template_name_for_section(section): + """ + Return a workbook template name corresponding to the given section + """ + if section in sections_to_template_paths: + template_name = sections_to_template_paths[section].name + return template_name + else: + raise ValueError(f"Unknown section {section}") diff --git a/backend/census_historical_migration/workbooklib/workbook_creation.py b/backend/census_historical_migration/workbooklib/workbook_creation.py index 70ee3b5e86..128efdbbd3 100644 --- a/backend/census_historical_migration/workbooklib/workbook_creation.py +++ b/backend/census_historical_migration/workbooklib/workbook_creation.py @@ -5,54 +5,13 @@ _make_excel_file, _create_test_sac, ) -from audit.fixtures.excel import FORM_SECTIONS -from django.apps import apps +from django.apps import apps -from census_historical_migration.workbooklib.notes_to_sefa import generate_notes_to_sefa -from census_historical_migration.workbooklib.federal_awards import ( - generate_federal_awards, -) -from census_historical_migration.workbooklib.findings import generate_findings -from census_historical_migration.workbooklib.findings_text import generate_findings_text -from census_historical_migration.workbooklib.corrective_action_plan import ( - generate_corrective_action_plan, -) -from census_historical_migration.workbooklib.additional_ueis import ( - generate_additional_ueis, -) -from census_historical_migration.workbooklib.additional_eins import ( - generate_additional_eins, -) -from census_historical_migration.workbooklib.secondary_auditors import ( - generate_secondary_auditors, -) +from .utils import get_template_name_for_section import logging -sections = { - FORM_SECTIONS.ADDITIONAL_EINS: generate_additional_eins, - FORM_SECTIONS.ADDITIONAL_UEIS: generate_additional_ueis, - FORM_SECTIONS.ADDITIONAL_UEIS: generate_additional_ueis, - FORM_SECTIONS.CORRECTIVE_ACTION_PLAN: generate_corrective_action_plan, - FORM_SECTIONS.FEDERAL_AWARDS_EXPENDED: generate_federal_awards, - FORM_SECTIONS.FINDINGS_TEXT: generate_findings_text, - FORM_SECTIONS.FINDINGS_UNIFORM_GUIDANCE: generate_findings, - FORM_SECTIONS.NOTES_TO_SEFA: generate_notes_to_sefa, - FORM_SECTIONS.SECONDARY_AUDITORS: generate_secondary_auditors, -} - -filenames = { - FORM_SECTIONS.ADDITIONAL_EINS: "additional-eins-{}.xlsx", - FORM_SECTIONS.ADDITIONAL_UEIS: "additional-ueis-{}.xlsx", - FORM_SECTIONS.CORRECTIVE_ACTION_PLAN: "corrective-action-plan-{}.xlsx", - FORM_SECTIONS.FEDERAL_AWARDS_EXPENDED: "federal-awards-{}.xlsx", - FORM_SECTIONS.FINDINGS_TEXT: "audit-findings-text-{}.xlsx", - FORM_SECTIONS.FINDINGS_UNIFORM_GUIDANCE: "audit-findings-{}.xlsx", - FORM_SECTIONS.NOTES_TO_SEFA: "notes-to-sefa-{}.xlsx", - FORM_SECTIONS.SECONDARY_AUDITORS: "secondary-auditors-{}.xlsx", -} - logger = logging.getLogger(__name__) @@ -73,19 +32,33 @@ def setup_sac(user, test_name, dbkey): return sac -# FIXME: Refactor workbook_loader to separate workbook creation from upload -def workbook_loader(user, sac, dbkey, year, entity_id): - def _loader(workbook_generator, section): - with MemoryFS() as mem_fs: - filename = filenames[section].format(dbkey) - outfile = mem_fs.openbin(filename, mode="w") - (wb, json) = workbook_generator(dbkey, year, outfile) - outfile.close() - outfile = mem_fs.openbin(filename, mode="r") +def generate_workbook(workbook_generator, dbkey, year, section): + with MemoryFS() as mem_fs: + filename = ( + get_template_name_for_section(section) + .replace(".xlsx", "-{}.xlsx") + .format(dbkey) + ) + with mem_fs.openbin(filename, mode="w") as outfile: + # Generate the workbook object along with the API JSON representation + wb, json_data = workbook_generator(dbkey, year, outfile) + + # Re-open the file in read mode to create an Excel file object + with mem_fs.openbin(filename, mode="r") as outfile: excel_file = _make_excel_file(filename, outfile) - if user: - _post_upload_workbook(sac, user, section, excel_file) - outfile.close() - return (wb, json, filename) + + return wb, json_data, excel_file, filename + + +def workbook_loader(user, sac, dbkey, year): + def _loader(workbook_generator, section): + wb, json_data, excel_file, filename = generate_workbook( + workbook_generator, dbkey, year, section + ) + + if user: + _post_upload_workbook(sac, user, section, excel_file) + + return wb, json_data, filename return _loader diff --git a/backend/census_historical_migration/workbooklib/workbook_section_handlers.py b/backend/census_historical_migration/workbooklib/workbook_section_handlers.py new file mode 100644 index 0000000000..3b8e4a730c --- /dev/null +++ b/backend/census_historical_migration/workbooklib/workbook_section_handlers.py @@ -0,0 +1,34 @@ +from audit.fixtures.excel import FORM_SECTIONS + + +from census_historical_migration.workbooklib.notes_to_sefa import generate_notes_to_sefa +from census_historical_migration.workbooklib.federal_awards import ( + generate_federal_awards, +) +from census_historical_migration.workbooklib.findings import generate_findings +from census_historical_migration.workbooklib.findings_text import generate_findings_text +from census_historical_migration.workbooklib.corrective_action_plan import ( + generate_corrective_action_plan, +) +from census_historical_migration.workbooklib.additional_ueis import ( + generate_additional_ueis, +) +from census_historical_migration.workbooklib.additional_eins import ( + generate_additional_eins, +) +from census_historical_migration.workbooklib.secondary_auditors import ( + generate_secondary_auditors, +) + + +sections_to_handlers = { + FORM_SECTIONS.ADDITIONAL_EINS: generate_additional_eins, + FORM_SECTIONS.ADDITIONAL_UEIS: generate_additional_ueis, + FORM_SECTIONS.ADDITIONAL_UEIS: generate_additional_ueis, + FORM_SECTIONS.CORRECTIVE_ACTION_PLAN: generate_corrective_action_plan, + FORM_SECTIONS.FEDERAL_AWARDS_EXPENDED: generate_federal_awards, + FORM_SECTIONS.FINDINGS_TEXT: generate_findings_text, + FORM_SECTIONS.FINDINGS_UNIFORM_GUIDANCE: generate_findings, + FORM_SECTIONS.NOTES_TO_SEFA: generate_notes_to_sefa, + FORM_SECTIONS.SECONDARY_AUDITORS: generate_secondary_auditors, +} diff --git a/backend/dissemination/search.py b/backend/dissemination/search.py index 98a5ae7380..6baad5c929 100644 --- a/backend/dissemination/search.py +++ b/backend/dissemination/search.py @@ -118,8 +118,24 @@ def _get_names_match_query(names): """ Given a list of (potential) names, return the query object that searches auditee and firm names. """ + name_fields = [ + "auditee_city", + "auditee_contact_name", + "auditee_email", + "auditee_name", + "auditee_state", + "auditor_city", + "auditor_contact_name", + "auditor_email", + "auditor_firm_name", + "auditor_state", + ] + names_match = Q() - for name in names: - names_match.add(Q(auditee_name__search=name), Q.OR) - names_match.add(Q(auditor_firm_name__search=name), Q.OR) + + # turn ["name1", "name2", "name3"] into "name1 name2 name3" + names = " ".join(names) + for field in name_fields: + names_match.add(Q(**{"%s__search" % field: names}), Q.OR) + return names_match diff --git a/backend/dissemination/test_search.py b/backend/dissemination/test_search.py index e17e9cc46d..ddc3788faf 100644 --- a/backend/dissemination/test_search.py +++ b/backend/dissemination/test_search.py @@ -60,20 +60,14 @@ def test_name_matches_auditor_firm_name(self): def test_name_multiple(self): """ - Given multiple names, search_general should return records that match either name + Given multiple name terms, search_general should only return records that contain all of the terms """ - names = [ - "auditee-01", - "auditor-firm-01", - "this-one-has-no-match", - ] + names = ["city", "bronze"] - baker.make(General, is_public=True, auditee_name=names[0]) - baker.make(General, is_public=True, auditor_firm_name=names[1]) - baker.make(General, is_public=True, auditee_name="not-looking-for-this-auditee") - baker.make( - General, is_public=True, auditor_firm_name="not-looking-for-this-auditor" - ) + baker.make(General, is_public=True, auditee_name="city of gold") + baker.make(General, is_public=True, auditee_name="city of silver") + baker.make(General, is_public=True, auditee_name="city of bronze") + baker.make(General, is_public=True, auditee_name="bronze city") results = search_general( names=names, @@ -89,24 +83,13 @@ def test_name_matches_inexact(self): auditee_match = baker.make( General, is_public=True, auditee_name="the university of somewhere" ) - auditor_match = baker.make( - General, is_public=True, auditor_firm_name="auditors unite, LLC" - ) - baker.make(General, is_public=True, auditee_name="not looking for this auditee") - baker.make( - General, - is_public=True, - auditor_firm_name="not looking for this auditor firm", - ) + baker.make(General, is_public=True, auditor_firm_name="not this one") - results = search_general( - names=["UNIVERSITY", "unitE", "there is not match for this one"] - ) + results = search_general(names=["UNIVERSITY"]) assert_all_results_public(self, results) - self.assertEqual(len(results), 2) + self.assertEqual(len(results), 1) self.assertEqual(results[0], auditee_match) - self.assertEqual(results[1], auditor_match) def test_uei_or_ein_matches_uei(self): """ diff --git a/docs/architecture/decisions/0031-user-access-management.md b/docs/architecture/decisions/0031-user-access-management.md new file mode 100644 index 0000000000..60e0e41d50 --- /dev/null +++ b/docs/architecture/decisions/0031-user-access-management.md @@ -0,0 +1,91 @@ +# 31. User access management + +Date: 2023-11-14 + +## Status + +Accepted + +## Areas of impact + +- [ ] Compliance +- [ ] Content +- [ ] CX +- [x] Design +- [x] Engineering +- [ ] Policy +- [ ] Product +- [ ] Process +- [x] UX + +## Related documents/links + +* [Epic: Modify user roles after audit creation](https://github.com/GSA-TTS/FAC/issues/2654). +* Many ZenDesk tickets from users who want to change the auditee and/or auditor certifying officials for a submission. +* [Deleting `Access` objects](https://github.com/GSA-TTS/FAC/issues/2739) +* An [issue from ancient times about access management](https://github.com/GSA-TTS/FAC/issues/333). + +## Context + +Access to submissions is controlled using Access entries that associate email addresses with roles. + +Currently, access is determined in the third pre-SAC step, the final step before a submission is actually created in our system. + +The user creating the submission specifies, in that step, any number of auditee/auditor contacts; each of these, and the submitter, are given editor roles on the submission. In the same step, the user also specifies a single auditee certifying official and a single auditor certifying official; their email addresses are associated with the roles that allow them to perform specific certification steps that other users cannot do. + +There is no user-facing way to delete, add, or change access to a submission once it’s started. + +Many users are entering incorrect email addresses for the certifying roles and only discovering this once the submission is ready for certification. + +The only option for those submissions, aside from altering the database, is to abandon the in-progress submission and start a new one with the correct email addresses. + +This causing significant difficulty for our users and in turn is generating a significant number of help desk tickets + +## Decision + +Due to the need to address this problem sooner rather than later, we need to implement a bare-minimum version of user-facing access controls. An ideal version of this access control feature is planned for the future, but this document concerns what the bare minimum is. + +Rather than a unified access control page, we will implement three different pages, one each for the following: + +* Changing the auditor certifying official role. +* Changing the auditee certifying official role. +* Adding more users to a submission with the generic “editor” role. + +We will probably add them in that order. + +Some notable features we will not include in this version: + +* Pure deletion. It will not be possible to remove certifying officials without specifying new officials in their place. +* Removing other users. It will not be possible to remove users with the “editor” role. +* Mass addition/alteration. Each user added will have to be done through the interface one at a time. + +Each of the auditor and auditee certifying official role interfaces will contain: + +* A warning notification that saving the page will remove the current user from the submission and replace them with the user entered. +* (Possibly a warning that if the user is removing themselves, they will lose access as soon as they submit this form.) (Once we have editor addition in, we can also advise them to add themselves as an editor before doing this.) +* A reminder that this page is for whichever of auditor/auditee it is, and that it’s not for the other roles, with links to those pages. +* Non-editable text indicating the name and email address that will be removed. +* Form fields for the name and email address of the new user. +* The same validation we have for individual fields on the pre-SAC page. +* A save button. + +The page for adding editors will have: + +* A list of the names and addresses of the users who currently have edit access. +* Form fields for the name and email address of the new user. +* The same validation we have for individual fields on the pre-SAC page. +* A save button. + +We will add links to these pages into the UI so that users can reach them. + +We will add support for [deleting `Access` objects](https://github.com/GSA-TTS/FAC/issues/2739) in order to support this feature; that change does not require a user-facing interface. + +## Consequences + +Users will be able to get past the blocker of having incorrect information in the certifying official roles for submissions. + +We will have a user management feature that has only some critical functions and lacks others; this may generate more help desk tickets. + +We will support the [deletion of `Access` objects](https://github.com/GSA-TTS/FAC/issues/2739) and track some of the details. + +We will have to support redirection from the URLs from the pages that we create now to the future better version of user management. diff --git a/docs/deploying.md b/docs/deploying.md index 72831e07ba..c88bff91bf 100644 --- a/docs/deploying.md +++ b/docs/deploying.md @@ -101,6 +101,7 @@ Select a space: cf logs gsa-fac ``` 9. Post the most recent dbbackup and mediabackup file names in https://github.com/GSA-TTS/FAC/issues/2221 + - *NOTE* the following is not necessary as of this time. django-dbbackup is not backing up the media due to the file sizes being too large, and we will be using another method for getting the media backed up (TBD) To see more about branching and the deployment steps, see the [Branching](branching.md) page.