diff --git a/.gitignore b/.gitignore index d685b5c..ca7e53d 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,4 @@ crudActions/ finalUserPolicies/ userPolicies*/ presentPolicies*/ -logs*/ \ No newline at end of file +logs*/ diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 36aeaee..091883b 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,4 +1,5 @@ # Contributing to AWSZeroTrustPolicy + We love your input! We want to make contributing to this project as easy and transparent as possible, whether it's: - Reporting a bug @@ -8,9 +9,11 @@ We love your input! We want to make contributing to this project as easy and tra - Becoming a maintainer ## We Develop with Github + We use github to host code, to track issues and feature requests, as well as accept pull requests. ## We Use [Github Flow](https://docs.github.com/en/get-started/quickstart/github-flow), So All Code Changes Happen Through Pull Requests + Pull requests are the best way to propose changes to the codebase (we use [Github Flow](https://docs.github.com/en/get-started/quickstart/github-flow)). We actively welcome your pull requests: 1. Fork the repo and create your branch from `master`. @@ -21,14 +24,15 @@ Pull requests are the best way to propose changes to the codebase (we use [Githu 6. Issue that pull request! ## Any contributions you make will be under the MIT Software License + In short, when you submit code changes, your submissions are understood to be under the same [MIT License](http://choosealicense.com/licenses/mit/) that covers the project. Feel free to contact the maintainers if that's a concern. ## Report bugs using Github's [issues](https://github.com/CloudDefenseAI/AWSZeroTrustPolicy/issues) + We use GitHub issues to track public bugs. Report a bug by [opening a new issue](https://github.com/CloudDefenseAI/AWSZeroTrustPolicy/issues/new); it's that easy! ## Write bug reports with detail, background, and sample code - **Great Bug Reports** tend to have: - A quick summary and/or background @@ -39,14 +43,15 @@ We use GitHub issues to track public bugs. Report a bug by [opening a new issue] - What actually happens - Notes (possibly including why you think this might be happening, or stuff you tried that didn't work) -People *love* thorough bug reports. I'm not even kidding. - +People _love_ thorough bug reports. I'm not even kidding. -* 2 spaces for indentation rather than tabs -* You can try running `npm run lint` for style unification +- 2 spaces for indentation rather than tabs +- You can try running `npm run lint` for style unification ## License + By contributing, you agree that your contributions will be licensed under its MIT License. ## References + This document was adapted from the open-source contribution guidelines for [Facebook's Draft](https://github.com/facebook/draft-js/blob/a9316a723f9e918afde44dea68b5f9f39b7d9b00/CONTRIBUTING.md) diff --git a/Dockerfile b/Dockerfile index d0e77ad..1a69804 100644 --- a/Dockerfile +++ b/Dockerfile @@ -6,6 +6,7 @@ RUN apk add --no-cache python3-dev gcc musl-dev libffi-dev libpq-dev openssl-dev WORKDIR /app COPY requirements.txt ./ RUN pip install --no-cache-dir -r requirements.txt + COPY . . EXPOSE 8000 CMD ["sh", "-c", "redis-server & uvicorn app:app --reload --host 0.0.0.0"] diff --git a/LICENSE b/LICENSE index 517f702..1be647c 100644 --- a/LICENSE +++ b/LICENSE @@ -2,180 +2,180 @@ Version 2.0, January 2004 http://www.apache.org/licenses/ - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" @@ -186,16 +186,16 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright [2023] [CloudDefenseAI] +Copyright [2023] [CloudDefenseAI] - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/README.md b/README.md index 8b91611..3314003 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,7 @@ If you want to help and wish to contribute, please review our contribution guide ## License -This project is released under the [Apache-2.0 License]([url](https://github.com/CloudDefenseAI/AWSZeroTrustPolicy/blob/master/LICENSE)). +This project is released under the [Apache-2.0 License](<[url](https://github.com/CloudDefenseAI/AWSZeroTrustPolicy/blob/master/LICENSE)>). ## Disclaimer: diff --git a/app.py b/app.py index 36f7c70..b2016f2 100644 --- a/app.py +++ b/app.py @@ -7,6 +7,7 @@ app = FastAPI() + @app.post("/run") def run_script(script: Script): print(f"Timestamp: {datetime.now().strftime('%Y-%m-%d %H:%M:%S')}") @@ -14,10 +15,22 @@ def run_script(script: Script): print(json.dumps(script.dict(), indent=4)) try: - resp = runner(script.accountType, script.accessKey, script.secretKey, script.accountId, script.days - , script.bucketData, script.roleArn, script.externalId) + resp = runner( + script.accountType, + script.accessKey, + script.secretKey, + script.accountId, + script.days, + script.bucketData, + script.roleArn, + script.externalId, + ) except Exception as e: raise HTTPException(status_code=500, detail=str(e)) - return {"accountId": resp['accountId'], "generatedPolicies": resp['generatedPolicies'], "consolidatedPolicies": resp['consolidatedPolicies'], "excessivePolicies": resp['excessivePolicies']} - + return { + "accountId": resp["accountId"], + "generatedPolicies": resp["generatedPolicies"], + "consolidatedPolicies": resp["consolidatedPolicies"], + "excessivePolicies": resp["excessivePolicies"], + } diff --git a/aws/awsOps.py b/aws/awsOps.py index 2bdee9a..7d9f6b9 100644 --- a/aws/awsOps.py +++ b/aws/awsOps.py @@ -4,100 +4,110 @@ from botocore.exceptions import ClientError, NoCredentialsError, BotoCoreError from fastapi import HTTPException + class AWSOperations: def connect_to_iam_with_assumed_role(self, aws_credentials): # Create a new session with the temporary credentials session = boto3.Session( aws_access_key_id=aws_credentials.access_key, aws_secret_access_key=aws_credentials.secret_key, - aws_session_token=aws_credentials.token + aws_session_token=aws_credentials.token, ) # Use the new session to connect to IAM - iam_client = session.client('iam') + iam_client = session.client("iam") return iam_client def get_iam_connection(self): try: with open("config.json", "r") as f: data = json.loads(f.read()) - if data['accountType'] == "CloudFormation": + if data["accountType"] == "CloudFormation": aws_credentials = self.get_assume_role_credentials(data) iam_client = self.connect_to_iam_with_assumed_role(aws_credentials) - elif data['accountType'] == "Credential": - iam_client = self.connect("iam", data['aws_access_key_id'], data['aws_secret_access_key']) + elif data["accountType"] == "Credential": + iam_client = self.connect( + "iam", data["aws_access_key_id"], data["aws_secret_access_key"] + ) return iam_client except (FileNotFoundError, json.JSONDecodeError) as e: - raise HTTPException(status_code=500, detail=f"Error reading or parsing config.json: {e}") + raise HTTPException( + status_code=500, detail=f"Error reading or parsing config.json: {e}" + ) except (ClientError, NoCredentialsError, BotoCoreError) as e: raise HTTPException(status_code=500, detail=f"Error connecting to IAM: {e}") - - def connect(self,serviceName,aws_access_key_id,aws_secret_access_key): - s3Client = boto3.client(serviceName,aws_access_key_id=aws_access_key_id,aws_secret_access_key=aws_secret_access_key) + def connect(self, serviceName, aws_access_key_id, aws_secret_access_key): + s3Client = boto3.client( + serviceName, + aws_access_key_id=aws_access_key_id, + aws_secret_access_key=aws_secret_access_key, + ) return s3Client - def connect_to_s3_with_assumed_role(self,aws_credentials): + def connect_to_s3_with_assumed_role(self, aws_credentials): # Create a new session with the temporary credentials session = boto3.Session( aws_access_key_id=aws_credentials.access_key, aws_secret_access_key=aws_credentials.secret_key, - aws_session_token=aws_credentials.token + aws_session_token=aws_credentials.token, ) # Use the new session to connect to S3 - s3_client = session.client('s3') + s3_client = session.client("s3") return s3_client def getConnection(self): try: with open("config.json", "r") as f: data = json.loads(f.read()) - if data['accountType'] == "CloudFormation": + if data["accountType"] == "CloudFormation": aws_credentials = self.get_assume_role_credentials(data) s3_client = self.connect_to_s3_with_assumed_role(aws_credentials) - elif data['accountType'] == "Credential": - s3_client = self.connect("s3", data['aws_access_key_id'], data['aws_secret_access_key']) + elif data["accountType"] == "Credential": + s3_client = self.connect( + "s3", data["aws_access_key_id"], data["aws_secret_access_key"] + ) return s3_client except (FileNotFoundError, json.JSONDecodeError) as e: - raise HTTPException(status_code=500, detail=f"Error reading or parsing config.json: {e}") + raise HTTPException( + status_code=500, detail=f"Error reading or parsing config.json: {e}" + ) except (ClientError, NoCredentialsError, BotoCoreError) as e: raise HTTPException(status_code=500, detail=f"Error connecting to S3: {e}") def get_assume_role_credentials(self, account): try: - # Create an STS client with the IAM user's access and secret keys + # Create an STS client with the IAM user's access and secret keys sts_client = boto3.client( - 'sts', - aws_access_key_id=account['aws_access_key_id'], - aws_secret_access_key=account['aws_secret_access_key'] + "sts", + aws_access_key_id=account["aws_access_key_id"], + aws_secret_access_key=account["aws_secret_access_key"], ) # Assume the IAM role response = sts_client.assume_role( - RoleArn=account['role_arn'], - RoleSessionName='Assume_Role_Session', + RoleArn=account["role_arn"], + RoleSessionName="Assume_Role_Session", DurationSeconds=43200, - ExternalId=account['externalid'] + ExternalId=account["externalid"], ) # Extract the temporary credentials - creds = response['Credentials'] + creds = response["Credentials"] session_credentials = boto3.Session( - aws_access_key_id=creds['AccessKeyId'], - aws_secret_access_key=creds['SecretAccessKey'], - aws_session_token=creds['SessionToken'] + aws_access_key_id=creds["AccessKeyId"], + aws_secret_access_key=creds["SecretAccessKey"], + aws_session_token=creds["SessionToken"], ).get_credentials() # Create an AwsCredentials object with the temporary credentials aws_credentials = Credentials( access_key=session_credentials.access_key, secret_key=session_credentials.secret_key, - token=session_credentials.token + token=session_credentials.token, ) return aws_credentials except (ClientError, NoCredentialsError, BotoCoreError) as e: raise HTTPException(status_code=500, detail=f"Error assuming role: {e}") - - diff --git a/aws/comparePolicies.py b/aws/comparePolicies.py index 99d96e2..59d7db8 100644 --- a/aws/comparePolicies.py +++ b/aws/comparePolicies.py @@ -14,6 +14,7 @@ # hdServices = ['a4b','account','amplify','apprunner','appsync','aps','billing','codebuild','codecommit','connect','databrew','eks','emrcontainers','forecast','frauddetector','fsx','gamelift','greengrassv2','health','iot','iotanalytics','iotevents','iotfleethub','iotthingsgraph','kafka','kendra','kinesisvideo','lakeformation','licensemanager','lookoutvision','macie','managedblockchain','marketplacecatalog','mediaconnect','mediaconvert','medialive','mediapackage','mediapackage-vod','mediastore','mediastore-data','mediatailor','meteringmarketplace','migrationhub-config','mobile','mq','neptune','networkmanager','outposts','personalize','pinpoint','pinpoint-email','pinpoint-sms-voice','polly','pricing','qldb','quicksight','ram','rds-data','robomaker','route53resolver','sagemaker','sagemaker-a2i-runtime','sagemaker-edge','sagemaker-featurestore-runtime','sagemaker-runtime','savingsplans','schemas','secretsmanager','securityhub','serverlessrepo','servicecatalog','servicecatalog-appregistry','servicequotas','sesv2','shield','signer','sms','snowball','snowball-edge','sso','sso-oidc','ssm','stepfunctions','storagegateway','synthetics','textract','transcribe','transfer','translate','waf-regional','wafv2','worklink','workmail','workmailmessageflow','workspaces','xray','autoscaling','iam','ec2','s3','rds','elasticache','elasticbeanstalk','elasticloadbalancing','elasticmapreduce','cloudfront','cloudtrail','cloudwatch','cloudwatchevents','cloudwatchlogs','config','datapipeline','directconnect','dynamodb','ecr','ecs','elasticfilesystem','elastictranscoder','glacier','kinesis','kms','lambda','opsworks','redshift','route53','route53domains','sdb','ses','sns','sqs','storagegateway','sts','support','swf','waf','workspaces','xray'] # hdServices.extend(['acm','acm-pca','alexaforbusiness','amplifybackend','appconfig','appflow','appintegrations','appmesh','appstream','appsync','athena','auditmanager','autoscaling-plans','backup','batch','braket','budgets','ce','chime','cloud9','clouddirectory','cloudformation','cloudhsm','cloudhsmv2','cloudsearch','cloudsearchdomain','cloudtrail','cloudwatch','cloudwatchevents','cloudwatchlogs','codeartifact','codebuild','codecommit','codedeploy','codeguru-reviewer','codeguru-reviewer-runtime','codeguru-profiler','codeguru-profiler-runtime','codepipeline','codestar','codestar-connections','codestar-notifications','cognito-identity','cognito-idp','cognito-sync','comprehend','comprehendmedical','compute-optimizer','connect','connect-contact-lens','connectparticipant','cur','customer-profiles','dataexchange','datapipeline','datasync','dax','detective','devicefarm','devops-guru','directconnect','discovery','dlm','dms','docdb','ds','dynamodb','dynamodbstreams','ec2','ec2-instance-connect','ecr','ecr-public','ecs','eks','elastic-inference','elasticache','elasticbeanstalk','elasticfilesystem','elasticloadbalancing','elasticloadbalancingv2','elasticmapreduce','elastictranscoder','email','es','events','firehose','fms','forecast','forecastquery','frauddetector','fsx','gamelift','glacier','globalaccelerator','glue','greengrass','greengrassv2','groundstation','guardduty','health','healthlake','honeycode','iam','identitystore','imagebuilder','importexport','inspector','iot','iot-data','iot-jobs-data','iot1click-devices','iot1click-projects','iotanalytics','iotdeviceadvisor','iotevents','iotevents-data','iotfleethub','iotsecuretunneling','iotthingsgraph','iotwireless','ivs','kafka','kendra','kinesis','kinesis-video-archived-media','kinesis-video-media','kinesis-video-signaling','kinesisvideo','kinesisanalytics','kinesisanalyticsv2','kinesisvideoarchivedmedia','kinesis']) + def create_services_list(actions_data): for action in actions_data: action_data = json.loads(action) @@ -21,6 +22,7 @@ def create_services_list(actions_data): service = event_source.split(".")[0] services[service].add(service) + def create_service_actions_cache(services): service_actions_cache = {} @@ -37,19 +39,25 @@ def create_service_actions_cache(services): return service_actions_cache + def write_service_actions_cache_to_file(service_actions_cache, file_path): - with open(file_path, 'w') as f: + with open(file_path, "w") as f: json.dump(service_actions_cache, f, indent=2) + def load_policy(filepath): with open(filepath, "r") as f: policy = json.load(f) return policy + def is_valid_action(action): - return re.match(r'^[a-zA-Z0-9_]+:(\*|[a-zA-Z0-9_\*]+)$', action) + return re.match(r"^[a-zA-Z0-9_]+:(\*|[a-zA-Z0-9_\*]+)$", action) -def compare_policy_worker(present_policy_filepath, user_policy_filepath, output_filepath): + +def compare_policy_worker( + present_policy_filepath, user_policy_filepath, output_filepath +): print(f"Started thread for {user_policy_filepath}") current_policy = load_policy(present_policy_filepath) @@ -59,7 +67,10 @@ def compare_policy_worker(present_policy_filepath, user_policy_filepath, output_ with open(output_filepath, "w") as f_write: f_write.write(json.dumps(excessive_permissions, indent=2)) - print(f"Generated excessive policy for {os.path.basename(user_policy_filepath)}") + print( + f"Generated excessive policy for {os.path.basename(user_policy_filepath)}" + ) + # def expand_wildcard_actions(actions_list, service_actions_cache=None): # if service_actions_cache is None: @@ -84,6 +95,7 @@ def compare_policy_worker(present_policy_filepath, user_policy_filepath, output_ # return expanded_actions + def expand_wildcard_actions(actions_list, service_actions_cache=None): if service_actions_cache is None: with open("service_actions_cache.json", "r") as f: @@ -98,27 +110,33 @@ def expand_wildcard_actions(actions_list, service_actions_cache=None): if is_valid_action(action): service, action_name = action.split(":") if "*" in action_name: - expanded_actions.extend([f"{a}" for a in service_actions_cache.get(service, []) if action_name.replace("*", "") in a]) + expanded_actions.extend( + [ + f"{a}" + for a in service_actions_cache.get(service, []) + if action_name.replace("*", "") in a + ] + ) else: expanded_actions.append(action) - elif action == '*': + elif action == "*": for service in service_actions_cache: - expanded_actions.extend([f"{a}" for a in service_actions_cache[service]]) + expanded_actions.extend( + [f"{a}" for a in service_actions_cache[service]] + ) return expanded_actions + def compare_policy(current_policy, generated_policy): - excessive_permissions = { - "Version": "2012-10-17", - "Statement": [] - } + excessive_permissions = {"Version": "2012-10-17", "Statement": []} for current_statement in current_policy["Statement"]: excessive_statement = { "Effect": current_statement["Effect"], "Action": [], - "Resource": current_statement["Resource"] + "Resource": current_statement["Resource"], } current_actions_expanded = expand_wildcard_actions(current_statement["Action"]) @@ -126,7 +144,9 @@ def compare_policy(current_policy, generated_policy): for action in current_actions_expanded: action_in_generated = False for generated_statement in generated_policy["Statement"]: - generated_actions_expanded = expand_wildcard_actions(generated_statement["Action"]) + generated_actions_expanded = expand_wildcard_actions( + generated_statement["Action"] + ) # Check if the action and resource match in both policies if action in generated_actions_expanded: @@ -145,6 +165,7 @@ def compare_policy(current_policy, generated_policy): return excessive_permissions + def compare_policies(): crudKeys = crudConnection.get_all_keys() for user_arn in crudKeys: @@ -152,7 +173,9 @@ def compare_policies(): create_services_list(actions_data) service_actions_cache = create_service_actions_cache(services) - write_service_actions_cache_to_file(service_actions_cache, 'service_actions_cache.json') + write_service_actions_cache_to_file( + service_actions_cache, "service_actions_cache.json" + ) print("Service actions cache created successfully.") # present_policies_dir = "presentPolicies" diff --git a/aws/createServiceMap.py b/aws/createServiceMap.py index 9f49371..33136f0 100644 --- a/aws/createServiceMap.py +++ b/aws/createServiceMap.py @@ -6,6 +6,7 @@ services = defaultdict(set) + def create_services_list(actions_data): for action in actions_data: action_data = json.loads(action) @@ -13,6 +14,7 @@ def create_services_list(actions_data): service = event_source.split(".")[0] services[service].add(service) + def create_service_actions_cache(services): service_actions_cache = {} @@ -21,24 +23,30 @@ def create_service_actions_cache(services): service_actions_cache[service] = actions return service_actions_cache + def write_service_actions_cache_to_file(service_actions_cache, file_path): - with open(file_path, 'w') as f: + with open(file_path, "w") as f: json.dump(service_actions_cache, f, indent=2) + def load_policy(filepath): with open(filepath, "r") as f: policy = json.load(f) return policy + def is_valid_action(action): - return re.match(r'^[a-zA-Z0-9_]+:(\*|[a-zA-Z0-9_\*]+)$', action) + return re.match(r"^[a-zA-Z0-9_]+:(\*|[a-zA-Z0-9_\*]+)$", action) + def create_service_map(mergedData): print("Creating service map") for username, actions in mergedData.items(): - actions_list = [action for action in actions] - create_services_list(actions_list) + actions_list = [action for action in actions] + create_services_list(actions_list) service_actions_cache = create_service_actions_cache(services) - write_service_actions_cache_to_file(service_actions_cache, 'service_actions_cache.json') + write_service_actions_cache_to_file( + service_actions_cache, "service_actions_cache.json" + ) print("Service actions cache created successfully.") diff --git a/aws/dataCleanup.py b/aws/dataCleanup.py index 1a96e6a..d3031e8 100644 --- a/aws/dataCleanup.py +++ b/aws/dataCleanup.py @@ -1,5 +1,6 @@ import pendulum + class DataCleanup: def __init__(self, redis_connection): self.redis_connection = redis_connection diff --git a/aws/getPreviousPolicies.py b/aws/getPreviousPolicies.py index ca4b8fd..64e6eaa 100644 --- a/aws/getPreviousPolicies.py +++ b/aws/getPreviousPolicies.py @@ -8,19 +8,26 @@ from redisops.redisOps import RedisOperations arnStore = RedisOperations() -arnStore.connect("localhost",6379,2) - +arnStore.connect("localhost", 6379, 2) + + def is_valid_arn(arn): - return re.match(r'^arn:aws:iam::\d{12}:user/[\w+=,.@-]+$', arn.decode()) is not None + return re.match(r"^arn:aws:iam::\d{12}:user/[\w+=,.@-]+$", arn.decode()) is not None + def get_policies_for_users(path, merged_data): ops = AWSOperations() iam_client = ops.get_iam_connection() - valid_user_list = [user.encode() for user in merged_data.keys() if is_valid_arn(user.encode())] + valid_user_list = [ + user.encode() for user in merged_data.keys() if is_valid_arn(user.encode()) + ] with concurrent.futures.ThreadPoolExecutor() as executor: - futures = [executor.submit(get_previous_policies, iam_client, path, user.decode()) for user in valid_user_list] + futures = [ + executor.submit(get_previous_policies, iam_client, path, user.decode()) + for user in valid_user_list + ] for future in concurrent.futures.as_completed(futures): try: @@ -28,7 +35,8 @@ def get_policies_for_users(path, merged_data): except Exception as e: print(f"Error occurred during parallel processing: {e}") -def get_previous_policies(iam_client,path, current_user): + +def get_previous_policies(iam_client, path, current_user): user_arn = current_user current_user = current_user.split("/")[-1] arnStore.insertKeyVal(current_user, user_arn) @@ -38,39 +46,51 @@ def get_previous_policies(iam_client,path, current_user): if isinstance(current_user, bytes): current_user = current_user.decode() - policy_dict = {'Version': '2012-10-17', 'Statement': []} + policy_dict = {"Version": "2012-10-17", "Statement": []} if user_exists(iam_client, current_user): - policies = iam_client.list_attached_user_policies(UserName=current_user)['AttachedPolicies'] - inline_policies = iam_client.list_user_policies(UserName=current_user)['PolicyNames'] + policies = iam_client.list_attached_user_policies(UserName=current_user)[ + "AttachedPolicies" + ] + inline_policies = iam_client.list_user_policies(UserName=current_user)[ + "PolicyNames" + ] # Fetch managed policies for policy in policies: - policy_arn = policy['PolicyArn'] - policy_version = iam_client.get_policy(PolicyArn=policy_arn)['Policy']['DefaultVersionId'] - policy_doc = iam_client.get_policy_version(PolicyArn=policy_arn, VersionId=policy_version)['PolicyVersion']['Document'] - policy_dict['Statement'].extend(policy_doc['Statement']) + policy_arn = policy["PolicyArn"] + policy_version = iam_client.get_policy(PolicyArn=policy_arn)["Policy"][ + "DefaultVersionId" + ] + policy_doc = iam_client.get_policy_version( + PolicyArn=policy_arn, VersionId=policy_version + )["PolicyVersion"]["Document"] + policy_dict["Statement"].extend(policy_doc["Statement"]) # Fetch inline policies for policy_name in inline_policies: - policy_doc = iam_client.get_user_policy(UserName=current_user, PolicyName=policy_name)['PolicyDocument'] - policy_dict['Statement'].extend(policy_doc['Statement']) + policy_doc = iam_client.get_user_policy( + UserName=current_user, PolicyName=policy_name + )["PolicyDocument"] + policy_dict["Statement"].extend(policy_doc["Statement"]) else: print(f"User {current_user} does not exist. Creating an empty policy.") - outfileName = f'policy_{current_user}.json' - outfileName = outfileName.replace(":","-").replace("/","_").replace("\\","__") - with open(os.path.join(path, outfileName), 'w') as f: + outfileName = f"policy_{current_user}.json" + outfileName = ( + outfileName.replace(":", "-").replace("/", "_").replace("\\", "__") + ) + with open(os.path.join(path, outfileName), "w") as f: json.dump(policy_dict, f, indent=2) - print( - f"Fetched Present Policies for {current_user}") + print(f"Fetched Present Policies for {current_user}") except ClientError as e: print(f"Error occurred: {e}") + def user_exists(iam_client, user_name): try: iam_client.get_user(UserName=user_name) return True except iam_client.exceptions.NoSuchEntityException: - return False \ No newline at end of file + return False diff --git a/aws/policygenOps.py b/aws/policygenOps.py index 995463d..8ed5142 100644 --- a/aws/policygenOps.py +++ b/aws/policygenOps.py @@ -6,19 +6,29 @@ from utils.colors import colors from redisops.redisOps import RedisOperations import json + # from policy_sentry.writing.template import get_crud_template_dict # from policy_sentry.command.write_policy import write_policy_with_template from collections import defaultdict crudConnection = RedisOperations() -crudConnection.connect("localhost",6379,0) +crudConnection.connect("localhost", 6379, 0) arnStore = RedisOperations() -arnStore.connect("localhost",6379,2) +arnStore.connect("localhost", 6379, 2) + def get_event_type(event_name): read_events = ["^Get", "^Describe", "^Head"] - write_events = ["^Create", "^Put", "^Post", "^Copy", - "^Complete", "^Delete", "^Update", "^Modify"] + write_events = [ + "^Create", + "^Put", + "^Post", + "^Copy", + "^Complete", + "^Delete", + "^Update", + "^Modify", + ] tagging_events = ["^Tag", "^Untag"] list_events = ["^List"] for pattern in read_events: @@ -33,30 +43,32 @@ def get_event_type(event_name): if re.search(pattern, event_name): return "tagging" for pattern in list_events: - if re.search(pattern,event_name): + if re.search(pattern, event_name): return "list" return None + def load_service_actions_cache(file_path): - with open(file_path, 'r') as f: + with open(file_path, "r") as f: return json.load(f) + def load_service_replace_map(file_path): - with open(file_path, 'r') as f: + with open(file_path, "r") as f: return json.load(f) -def generate_least_privilege_policy(user_arn, actions_data, service_actions_cache, service_replace_map): - policy = { - "Version": "2012-10-17", - "Statement": [] - } + +def generate_least_privilege_policy( + user_arn, actions_data, service_actions_cache, service_replace_map +): + policy = {"Version": "2012-10-17", "Statement": []} resource_actions = defaultdict(lambda: defaultdict(set)) for action_data in actions_data: if action_data["username"] != user_arn: continue - service = action_data['eventSource'].split('.')[0] + service = action_data["eventSource"].split(".")[0] action = f"{service}:{action_data['eventName']}" if action in service_replace_map: @@ -65,16 +77,12 @@ def generate_least_privilege_policy(user_arn, actions_data, service_actions_cach if action not in service_actions_cache.get(service, []): continue - resource = action_data['arn'] - resource_prefix = resource.split(':')[5].split('/')[0] + resource = action_data["arn"] + resource_prefix = resource.split(":")[5].split("/")[0] resource_actions[resource_prefix][resource].add(action) for resource_prefix, resources in resource_actions.items(): - statement = { - "Effect": "Allow", - "Action": [], - "Resource": [] - } + statement = {"Effect": "Allow", "Action": [], "Resource": []} for resource, actions in resources.items(): statement["Action"].extend(actions) @@ -90,12 +98,15 @@ def generate_least_privilege_policy(user_arn, actions_data, service_actions_cach return policy + def generateLeastprivPolicies(user_arn, policy_output_dir, actions_list): service_actions_cache = load_service_actions_cache("service_actions_cache.json") service_replace_map = load_service_replace_map("service_replace_map.json") - policy = generate_least_privilege_policy(user_arn, actions_list, service_actions_cache, service_replace_map) + policy = generate_least_privilege_policy( + user_arn, actions_list, service_actions_cache, service_replace_map + ) username = user_arn.split("/")[-1] - filename = f'policy_{username}.json' + filename = f"policy_{username}.json" filename = filename.replace(":", "-").replace("/", "_").replace("\\", "__") with open(os.path.join(policy_output_dir, filename), "w") as f_write: @@ -103,8 +114,8 @@ def generateLeastprivPolicies(user_arn, policy_output_dir, actions_list): print(f"Generated policy for {username}") -def runPolicyGeneratorCRUD(filePath,date,region,bucketName, accountID): - with gzip.open(f"{filePath}", 'rt') as f: +def runPolicyGeneratorCRUD(filePath, date, region, bucketName, accountID): + with gzip.open(f"{filePath}", "rt") as f: parser = ijson.parse(f) startKey = "Records.item.eventVersion" startLoop = False @@ -116,20 +127,27 @@ def runPolicyGeneratorCRUD(filePath,date,region,bucketName, accountID): startLoop = True else: startLoop = False - if currentBlock.get("username") is not None and currentBlock.get("arn") is not None: + if ( + currentBlock.get("username") is not None + and currentBlock.get("arn") is not None + ): userNameCB = currentBlock.get("username") outerKey = f"{accountID}_{bucketName}_{date}_{region}" - crudConnection.push_back_nested_json(outerKey, userNameCB, currentBlock) + crudConnection.push_back_nested_json( + outerKey, userNameCB, currentBlock + ) currentBlock = {} if startLoop: if prefix == "Records.item.resources.item.ARN": currentBlock["arn"] = value if prefix == "Records.item.userIdentity.type": currentBlock["userIdentityType"] = value - if prefix == "Records.item.userIdentity.arn" and currentBlock.get("userIdentityType") == "IAMUser": + if ( + prefix == "Records.item.userIdentity.arn" + and currentBlock.get("userIdentityType") == "IAMUser" + ): currentBlock["username"] = value if prefix == "Records.item.eventSource": currentBlock["eventSource"] = value if prefix == "Records.item.eventName": currentBlock["eventName"] = value - diff --git a/aws/s3Ops.py b/aws/s3Ops.py index f7bb4a6..c3f73e8 100644 --- a/aws/s3Ops.py +++ b/aws/s3Ops.py @@ -2,13 +2,15 @@ import json from utils import helpers from utils.colors import colors + # from constants import default_regions from tqdm import tqdm import os import time + # from datetime import datetime import threading -from aws.policygenOps import runPolicyGeneratorCRUD , generateLeastprivPolicies +from aws.policygenOps import runPolicyGeneratorCRUD, generateLeastprivPolicies from redisops.redisOps import RedisOperations from aws.getPreviousPolicies import get_policies_for_users from aws.comparePolicies import compare_policies @@ -25,6 +27,7 @@ utc_timezone = timezone("UTC") pendulum.set_local_timezone(utc_timezone) + @contextmanager def measure_time_block(message: str = "Execution time"): start = time.time() @@ -35,10 +38,14 @@ def measure_time_block(message: str = "Execution time"): if duration >= 3600: hours = int(duration // 3600) duration %= 3600 - print(f"{message} completed in {hours} hour(s) {int(duration // 60)} minute(s) {duration % 60:.2f} second(s)") + print( + f"{message} completed in {hours} hour(s) {int(duration // 60)} minute(s) {duration % 60:.2f} second(s)" + ) elif duration >= 60: minutes = int(duration // 60) - print(f"{message} completed in {minutes} minute(s) {duration % 60:.2f} second(s)") + print( + f"{message} completed in {minutes} minute(s) {duration % 60:.2f} second(s)" + ) else: print(f"{message} completed in {duration:.2f} second(s)") @@ -58,12 +65,17 @@ def getConfig(self): def mergeData(self, accountId, num_days, bucketData): print(f"Merging data for {num_days}") now = pendulum.now() - previousData = [(now - pendulum.duration(days=i)).strftime("%Y/%m/%d") for i in range(num_days)] + previousData = [ + (now - pendulum.duration(days=i)).strftime("%Y/%m/%d") + for i in range(num_days) + ] merged_data = {} for bucketName, regions in bucketData.items(): for region, date in itertools.product(regions, previousData): - day_data_str = self.crudConnection.read_json(f"{bucketName}_{accountId}_{date}_{region}") + day_data_str = self.crudConnection.read_json( + f"{bucketName}_{accountId}_{date}_{region}" + ) if day_data_str: for username, actions in day_data_str.items(): if username not in merged_data: @@ -80,11 +92,11 @@ def is_request_processed(self, account_id, bucket_name, date, region): outer_key = f"{bucket_name}_{account_id}_{date}_{region}" return self.crudConnection.exists(outer_key) - def getObjects(self, completedBuckets, bucketData, num_days,unique_id): + def getObjects(self, completedBuckets, bucketData, num_days, unique_id): self.data_cleanup.cleanup() try: config = self.getConfig() - accountId = config['accountId'] + accountId = config["accountId"] today = pendulum.now() endDay = today.date() startDay = today.subtract(days=num_days).date() @@ -93,7 +105,19 @@ def getObjects(self, completedBuckets, bucketData, num_days,unique_id): for bucketName, regions in bucketData.items(): - thread = threading.Thread(target=bucketThreadFn, args=(completedBuckets, bucketName, regions, startDay, endDay, accountId, day_diff,unique_id)) + thread = threading.Thread( + target=bucketThreadFn, + args=( + completedBuckets, + bucketName, + regions, + startDay, + endDay, + accountId, + day_diff, + unique_id, + ), + ) thread.start() except KeyError as err: @@ -102,11 +126,11 @@ def getObjects(self, completedBuckets, bucketData, num_days,unique_id): except Exception as exp: helpers.logException(exp) - def getPolicies(self, account_id, num_days, bucketData,unique_id): + def getPolicies(self, account_id, num_days, bucketData, unique_id): user_policies_dir = f"userPolicies_{account_id}_{unique_id}" present_policies_dir = f"presentPolicies_{account_id}_{unique_id}" - mergedData = self.mergeData(account_id,num_days, bucketData) + mergedData = self.mergeData(account_id, num_days, bucketData) create_service_map(mergedData) with measure_time_block("Generating Policies"): @@ -123,7 +147,17 @@ def getPolicies(self, account_id, num_days, bucketData,unique_id): # print("Comparing the policies to get excessive policies") # compare_policies() -def bucketThreadFn(completedBuckets, bucketName, regions, startDay, endDay, accountId, day_diff,unique_id): + +def bucketThreadFn( + completedBuckets, + bucketName, + regions, + startDay, + endDay, + accountId, + day_diff, + unique_id, +): print("Started thread for Bucket : " + bucketName) s3Ops = s3Operations() @@ -134,7 +168,18 @@ def bucketThreadFn(completedBuckets, bucketName, regions, startDay, endDay, acco with ThreadPoolExecutor(max_workers=5) as executor: for idx, region in enumerate(regions): - executor.submit(regionThreadFn, startDay, endDay, accountId, region, s3Client, bucketName, completedRegions, region_events[idx],unique_id) + executor.submit( + regionThreadFn, + startDay, + endDay, + accountId, + region, + s3Client, + bucketName, + completedRegions, + region_events[idx], + unique_id, + ) for event in region_events: event.wait() @@ -144,7 +189,17 @@ def bucketThreadFn(completedBuckets, bucketName, regions, startDay, endDay, acco print(f"Completed thread for Bucket : {bucketName}") -def regionThreadFn(startDay, endDay, accountId, region, s3Client, bucketName ,completedRegions,region_event,unique_id): +def regionThreadFn( + startDay, + endDay, + accountId, + region, + s3Client, + bucketName, + completedRegions, + region_event, + unique_id, +): print(f"Starting thread for {region}") completedDays = [] @@ -152,7 +207,18 @@ def regionThreadFn(startDay, endDay, accountId, region, s3Client, bucketName ,co day = startDay while day <= endDay: - thread = threading.Thread(target=dayThreadFn, args=(day, accountId, region, s3Client, bucketName,completedDays,unique_id)) + thread = threading.Thread( + target=dayThreadFn, + args=( + day, + accountId, + region, + s3Client, + bucketName, + completedDays, + unique_id, + ), + ) day_threads.append(thread) thread.start() day = day.add(days=1) @@ -165,7 +231,7 @@ def regionThreadFn(startDay, endDay, accountId, region, s3Client, bucketName ,co region_event.set() -def dayThreadFn(day, accountId, region, s3Client, bucketName, completedDays,unique_id): +def dayThreadFn(day, accountId, region, s3Client, bucketName, completedDays, unique_id): date_str = day.strftime("%Y/%m/%d") s3obj = s3Operations() @@ -179,15 +245,17 @@ def dayThreadFn(day, accountId, region, s3Client, bucketName, completedDays,uniq print(f"Starting thread for {region} : {day.format('YYYY/MM/DD')}") prefix = f"AWSLogs/{accountId}/CloudTrail/{region}/{day.format('YYYY/MM/DD')}" response = s3Client.list_objects(Bucket=bucketName, Prefix=prefix) - contents = response.get('Contents', []) + contents = response.get("Contents", []) total_items = len(contents) processed_items = 0 for item in contents: - key = str(item['Key']) - filePath = key.split('/')[-1] - with open(os.path.join(f"logs_{accountId}_{unique_id}", filePath), "wb") as data: + key = str(item["Key"]) + filePath = key.split("/")[-1] + with open( + os.path.join(f"logs_{accountId}_{unique_id}", filePath), "wb" + ) as data: s3Client.download_fileobj(bucketName, key, data) downloaded = False retry_count = 0 @@ -195,7 +263,13 @@ def dayThreadFn(day, accountId, region, s3Client, bucketName, completedDays,uniq while not downloaded and retry_count < max_retries: try: - runPolicyGeneratorCRUD(f"logs_{accountId}_{unique_id}/{filePath}", f"{day.format('YYYY/MM/DD')}", region, accountId, bucketName) + runPolicyGeneratorCRUD( + f"logs_{accountId}_{unique_id}/{filePath}", + f"{day.format('YYYY/MM/DD')}", + region, + accountId, + bucketName, + ) os.remove(f"logs_{accountId}_{unique_id}/{filePath}") downloaded = True except OSError as e: @@ -205,17 +279,21 @@ def dayThreadFn(day, accountId, region, s3Client, bucketName, completedDays,uniq retry_count += 1 time.sleep(1) except Exception as e: - print(f"Error in runPolicyGeneratorCRUD for {region} : {day.format('YYYY/MM/DD')}: {str(e)}") + print( + f"Error in runPolicyGeneratorCRUD for {region} : {day.format('YYYY/MM/DD')}: {str(e)}" + ) retry_count += 1 time.sleep(1) processed_items += 1 - print(f"Processed {processed_items}/{total_items} items for {region} : {day.format('YYYY/MM/DD')}") + print( + f"Processed {processed_items}/{total_items} items for {region} : {day.format('YYYY/MM/DD')}" + ) print(f"Completed thread for {region} : {day.format('YYYY/MM/DD')}") completedDays.append(day) except Exception as e: - print(f"Error in dayThreadFn for {region} : {day.format('YYYY/MM/DD')}: {str(e)}") - - + print( + f"Error in dayThreadFn for {region} : {day.format('YYYY/MM/DD')}: {str(e)}" + ) diff --git a/constants.py b/constants.py index 24bf5a5..59553b6 100644 --- a/constants.py +++ b/constants.py @@ -1,7 +1,24 @@ aws_regions_testing = ["us-east-1"] -aws_regions = ["us-east-2", "us-east-1", "us-west-1", "us-west-2", "ap-south-1", "ap-northeast-3", "ap-northeast-2", "ap-southeast-1", - "ap-southeast-2", "ap-northeast-1", "ca-central-1", "eu-central-1", "eu-west-1", "eu-west-2", "eu-west-3", "eu-north-1", "sa-east-1"] +aws_regions = [ + "us-east-2", + "us-east-1", + "us-west-1", + "us-west-2", + "ap-south-1", + "ap-northeast-3", + "ap-northeast-2", + "ap-southeast-1", + "ap-southeast-2", + "ap-northeast-1", + "ca-central-1", + "eu-central-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "eu-north-1", + "sa-east-1", +] default_regions = aws_regions # default_regions = aws_regions_testing diff --git a/redisTest.py b/redisTest.py index db74b4d..fac05c7 100644 --- a/redisTest.py +++ b/redisTest.py @@ -5,6 +5,5 @@ redis_ops.connect("localhost", 6379, 0) redis_dump = redis_ops.get_redis_dump() -# Print the JSON dump in a pretty format print(json.dumps(redis_dump, indent=2)) diff --git a/redisops/redisOps.py b/redisops/redisOps.py index 6e83c5f..e5a5411 100644 --- a/redisops/redisOps.py +++ b/redisops/redisOps.py @@ -1,24 +1,25 @@ import redis import json + class RedisOperations: - def connect(self,host,port,dbNum): - self.r = redis.Redis(host=host,port=port,db=dbNum) + def connect(self, host, port, dbNum): + self.r = redis.Redis(host=host, port=port, db=dbNum) - def insertKeyVal(self,key,value): - self.r.set(key,value) + def insertKeyVal(self, key, value): + self.r.set(key, value) - def createList(self,key,value_list): + def createList(self, key, value_list): for item in value_list: - self.r.rpush(key,item) + self.r.rpush(key, item) # insert a value at the end of a list - def push_back(self,key,value): - self.r.rpush(key,value) + def push_back(self, key, value): + self.r.rpush(key, value) # insert a value at the front of a list - def push_front(self,key,value): - self.r.lpush(key,value) + def push_front(self, key, value): + self.r.lpush(key, value) def push_back_json(self, key, jsonObj): jsonStr = json.dumps(jsonObj) @@ -26,14 +27,14 @@ def push_back_json(self, key, jsonObj): if pos == None: self.r.rpush(key, jsonStr) - def pop_back(self,key): + def pop_back(self, key): self.r.rpop(key) - def pop_front(self,key): + def pop_front(self, key): self.r.lpop(key) - def insert_json(self,key,json_object): - self.r.set(key,json.dumps(json_object)) + def insert_json(self, key, json_object): + self.r.set(key, json.dumps(json_object)) def read_json(self, key): val = self.r.get(key) @@ -41,18 +42,18 @@ def read_json(self, key): return json.loads(val.decode()) return None - def insert_jsonobj_list(self,key,listOfJsonObjects): + def insert_jsonobj_list(self, key, listOfJsonObjects): for jsonObject in listOfJsonObjects: - self.push_back(key,json.dumps(jsonObject)) + self.push_back(key, json.dumps(jsonObject)) - def get_list_items(self,key): - items = self.r.lrange(key,0,-1) + def get_list_items(self, key): + items = self.r.lrange(key, 0, -1) return items def get_all_keys(self): return self.r.scan_iter() - # flush all keys from current DB + # flush all keys from current DB def flushdb(self): self.r.flushdb() diff --git a/requirements.txt b/requirements.txt index 2b4445c..6bb49b8 100644 --- a/requirements.txt +++ b/requirements.txt @@ -10,3 +10,4 @@ redis==4.5.4 tqdm==4.64.1 uvicorn==0.21.1 mangum==0.17.0 + diff --git a/runner.py b/runner.py index 37d33a7..b1bf47f 100644 --- a/runner.py +++ b/runner.py @@ -13,6 +13,7 @@ unique_id = uuid.uuid4().hex[:6] + @contextmanager def measure_time_block(message: str = "Execution time"): start = time.time() @@ -23,13 +24,18 @@ def measure_time_block(message: str = "Execution time"): if duration >= 3600: hours = int(duration // 3600) duration %= 3600 - print(f"{message} completed in {hours} hour(s) {int(duration // 60)} minute(s) {duration % 60:.2f} second(s)") + print( + f"{message} completed in {hours} hour(s) {int(duration // 60)} minute(s) {duration % 60:.2f} second(s)" + ) elif duration >= 60: minutes = int(duration // 60) - print(f"{message} completed in {minutes} minute(s) {duration % 60:.2f} second(s)") + print( + f"{message} completed in {minutes} minute(s) {duration % 60:.2f} second(s)" + ) else: print(f"{message} completed in {duration:.2f} second(s)") + def create_dirs(account_id): base_dir = os.path.dirname(os.path.abspath(__file__)) directories = [ @@ -47,6 +53,7 @@ def create_dirs(account_id): empty_directory(f"presentPolicies_{account_id}_{unique_id}") # empty_directory(f"excessivePolicies_{account_id}") + def empty_directory(directory_name): base_dir = os.path.dirname(os.path.abspath(__file__)) directory_path = os.path.join(base_dir, directory_name) @@ -71,7 +78,7 @@ def empty_directory(directory_name): def get_policy_from_file(folder, username): filename = f"{username}.json" filepath = os.path.join(folder, filename) - with open(filepath, 'r') as f: + with open(filepath, "r") as f: return json.load(f) @@ -90,10 +97,12 @@ def load_policies_from_directory(directory_name: str): policies = {} for file_path in policies_files: - with open(file_path, 'r') as policy_file: + with open(file_path, "r") as policy_file: policy = json.load(policy_file) - username = os.path.basename(file_path).replace('policy_', '').replace('.json', '') + username = ( + os.path.basename(file_path).replace("policy_", "").replace(".json", "") + ) user_arn = get_user_arn(username) policies[user_arn] = policy @@ -109,7 +118,17 @@ def reformat_bucket_data(bucket_data): reformatted_data[bucket_name] = [region] return reformatted_data -def runner(accountType,aws_access_key_id,aws_secret_access_key,accountId,num_days,bucketData,role_arn,externalid): + +def runner( + accountType, + aws_access_key_id, + aws_secret_access_key, + accountId, + num_days, + bucketData, + role_arn, + externalid, +): print(f"Running for {num_days} days") with measure_time_block("Data Population"): create_dirs(accountId) @@ -120,31 +139,37 @@ def runner(accountType,aws_access_key_id,aws_secret_access_key,accountId,num_day bucketData = reformat_bucket_data(bucketData) config_data = { - "accountType": accountType, - "bucketData": bucketData, - "aws_access_key_id": aws_access_key_id, - "aws_secret_access_key": aws_secret_access_key, - "externalid": externalid, - "role_arn": role_arn, - "accountId": accountId - } - - with open('config.json', 'w') as config_file: + "accountType": accountType, + "bucketData": bucketData, + "aws_access_key_id": aws_access_key_id, + "aws_secret_access_key": aws_secret_access_key, + "externalid": externalid, + "role_arn": role_arn, + "accountId": accountId, + } + + with open("config.json", "w") as config_file: json.dump(config_data, config_file) completedBuckets = [] - s3Ops.getObjects(completedBuckets,bucketData,num_days,unique_id) + s3Ops.getObjects(completedBuckets, bucketData, num_days, unique_id) while len(completedBuckets) < len(bucketData): time.sleep(10) print("Generating Policies") - s3Ops.getPolicies(accountId, num_days, bucketData,unique_id) - - generated_policies = load_policies_from_directory(f"userPolicies_{accountId}_{unique_id}") - consolidated_policies = load_policies_from_directory(f"presentPolicies_{accountId}_{unique_id}") - excessive_policies = load_policies_from_directory(f"excessivePolicies_{accountId}_{unique_id}") + s3Ops.getPolicies(accountId, num_days, bucketData, unique_id) + + generated_policies = load_policies_from_directory( + f"userPolicies_{accountId}_{unique_id}" + ) + consolidated_policies = load_policies_from_directory( + f"presentPolicies_{accountId}_{unique_id}" + ) + excessive_policies = load_policies_from_directory( + f"excessivePolicies_{accountId}_{unique_id}" + ) response = { "accountId": accountId, @@ -152,5 +177,5 @@ def runner(accountType,aws_access_key_id,aws_secret_access_key,accountId,num_day "consolidatedPolicies": consolidated_policies, "excessivePolicies": excessive_policies, } - + return response diff --git a/schemas.py b/schemas.py index 9cb7684..fa000f3 100644 --- a/schemas.py +++ b/schemas.py @@ -1,6 +1,7 @@ from pydantic import BaseModel from typing import Dict + class Script(BaseModel): accountType: str accessKey: str diff --git a/service_actions_cache.json b/service_actions_cache.json index 9faffcb..fbe9f4f 100644 --- a/service_actions_cache.json +++ b/service_actions_cache.json @@ -1,292 +1,292 @@ { - "sts": [ - "sts:AssumeRole", - "sts:AssumeRoleWithSAML", - "sts:AssumeRoleWithWebIdentity", - "sts:DecodeAuthorizationMessage", - "sts:GetAccessKeyInfo", - "sts:GetCallerIdentity", - "sts:GetFederationToken", - "sts:GetServiceBearerToken", - "sts:GetSessionToken", - "sts:SetSourceIdentity", - "sts:TagSession" - ], - "s3": [ - "s3:AbortMultipartUpload", - "s3:BypassGovernanceRetention", - "s3:CreateAccessPoint", - "s3:CreateAccessPointForObjectLambda", - "s3:CreateBucket", - "s3:CreateJob", - "s3:CreateMultiRegionAccessPoint", - "s3:DeleteAccessPoint", - "s3:DeleteAccessPointForObjectLambda", - "s3:DeleteAccessPointPolicy", - "s3:DeleteAccessPointPolicyForObjectLambda", - "s3:DeleteBucket", - "s3:DeleteBucketPolicy", - "s3:DeleteBucketWebsite", - "s3:DeleteJobTagging", - "s3:DeleteMultiRegionAccessPoint", - "s3:DeleteObject", - "s3:DeleteObjectTagging", - "s3:DeleteObjectVersion", - "s3:DeleteObjectVersionTagging", - "s3:DeleteStorageLensConfiguration", - "s3:DeleteStorageLensConfigurationTagging", - "s3:DescribeJob", - "s3:DescribeMultiRegionAccessPointOperation", - "s3:GetAccelerateConfiguration", - "s3:GetAccessPoint", - "s3:GetAccessPointConfigurationForObjectLambda", - "s3:GetAccessPointForObjectLambda", - "s3:GetAccessPointPolicy", - "s3:GetAccessPointPolicyForObjectLambda", - "s3:GetAccessPointPolicyStatus", - "s3:GetAccessPointPolicyStatusForObjectLambda", - "s3:GetAccountPublicAccessBlock", - "s3:GetAnalyticsConfiguration", - "s3:GetBucketAcl", - "s3:GetBucketCORS", - "s3:GetBucketLocation", - "s3:GetBucketLogging", - "s3:GetBucketNotification", - "s3:GetBucketObjectLockConfiguration", - "s3:GetBucketOwnershipControls", - "s3:GetBucketPolicy", - "s3:GetBucketPolicyStatus", - "s3:GetBucketPublicAccessBlock", - "s3:GetBucketRequestPayment", - "s3:GetBucketTagging", - "s3:GetBucketVersioning", - "s3:GetBucketWebsite", - "s3:GetEncryptionConfiguration", - "s3:GetIntelligentTieringConfiguration", - "s3:GetInventoryConfiguration", - "s3:GetJobTagging", - "s3:GetLifecycleConfiguration", - "s3:GetMetricsConfiguration", - "s3:GetMultiRegionAccessPoint", - "s3:GetMultiRegionAccessPointPolicy", - "s3:GetMultiRegionAccessPointPolicyStatus", - "s3:GetObject", - "s3:GetObjectAcl", - "s3:GetObjectAttributes", - "s3:GetObjectLegalHold", - "s3:GetObjectRetention", - "s3:GetObjectTagging", - "s3:GetObjectTorrent", - "s3:GetObjectVersion", - "s3:GetObjectVersionAcl", - "s3:GetObjectVersionAttributes", - "s3:GetObjectVersionForReplication", - "s3:GetObjectVersionTagging", - "s3:GetObjectVersionTorrent", - "s3:GetReplicationConfiguration", - "s3:GetStorageLensConfiguration", - "s3:GetStorageLensConfigurationTagging", - "s3:GetStorageLensDashboard", - "s3:InitiateReplication", - "s3:ListAccessPoints", - "s3:ListAccessPointsForObjectLambda", - "s3:ListAllMyBuckets", - "s3:ListBucket", - "s3:ListBucketMultipartUploads", - "s3:ListBucketVersions", - "s3:ListJobs", - "s3:ListMultiRegionAccessPoints", - "s3:ListMultipartUploadParts", - "s3:ListStorageLensConfigurations", - "s3:ObjectOwnerOverrideToBucketOwner", - "s3:PutAccelerateConfiguration", - "s3:PutAccessPointConfigurationForObjectLambda", - "s3:PutAccessPointPolicy", - "s3:PutAccessPointPolicyForObjectLambda", - "s3:PutAccessPointPublicAccessBlock", - "s3:PutAccountPublicAccessBlock", - "s3:PutAnalyticsConfiguration", - "s3:PutBucketAcl", - "s3:PutBucketCORS", - "s3:PutBucketLogging", - "s3:PutBucketNotification", - "s3:PutBucketObjectLockConfiguration", - "s3:PutBucketOwnershipControls", - "s3:PutBucketPolicy", - "s3:PutBucketPublicAccessBlock", - "s3:PutBucketRequestPayment", - "s3:PutBucketTagging", - "s3:PutBucketVersioning", - "s3:PutBucketWebsite", - "s3:PutEncryptionConfiguration", - "s3:PutIntelligentTieringConfiguration", - "s3:PutInventoryConfiguration", - "s3:PutJobTagging", - "s3:PutLifecycleConfiguration", - "s3:PutMetricsConfiguration", - "s3:PutMultiRegionAccessPointPolicy", - "s3:PutObject", - "s3:PutObjectAcl", - "s3:PutObjectLegalHold", - "s3:PutObjectRetention", - "s3:PutObjectTagging", - "s3:PutObjectVersionAcl", - "s3:PutObjectVersionTagging", - "s3:PutReplicationConfiguration", - "s3:PutStorageLensConfiguration", - "s3:PutStorageLensConfigurationTagging", - "s3:ReplicateDelete", - "s3:ReplicateObject", - "s3:ReplicateTags", - "s3:RestoreObject", - "s3:UpdateJobPriority", - "s3:UpdateJobStatus" - ], - "ssm": [ - "ssm:AddTagsToResource", - "ssm:AssociateOpsItemRelatedItem", - "ssm:CancelCommand", - "ssm:CancelMaintenanceWindowExecution", - "ssm:CreateActivation", - "ssm:CreateAssociation", - "ssm:CreateAssociationBatch", - "ssm:CreateDocument", - "ssm:CreateMaintenanceWindow", - "ssm:CreateOpsItem", - "ssm:CreateOpsMetadata", - "ssm:CreatePatchBaseline", - "ssm:CreateResourceDataSync", - "ssm:DeleteActivation", - "ssm:DeleteAssociation", - "ssm:DeleteDocument", - "ssm:DeleteInventory", - "ssm:DeleteMaintenanceWindow", - "ssm:DeleteOpsMetadata", - "ssm:DeleteParameter", - "ssm:DeleteParameters", - "ssm:DeletePatchBaseline", - "ssm:DeleteResourceDataSync", - "ssm:DeregisterManagedInstance", - "ssm:DeregisterPatchBaselineForPatchGroup", - "ssm:DeregisterTargetFromMaintenanceWindow", - "ssm:DeregisterTaskFromMaintenanceWindow", - "ssm:DescribeActivations", - "ssm:DescribeAssociation", - "ssm:DescribeAssociationExecutionTargets", - "ssm:DescribeAssociationExecutions", - "ssm:DescribeAutomationExecutions", - "ssm:DescribeAutomationStepExecutions", - "ssm:DescribeAvailablePatches", - "ssm:DescribeDocument", - "ssm:DescribeDocumentParameters", - "ssm:DescribeDocumentPermission", - "ssm:DescribeEffectiveInstanceAssociations", - "ssm:DescribeEffectivePatchesForPatchBaseline", - "ssm:DescribeInstanceAssociationsStatus", - "ssm:DescribeInstanceInformation", - "ssm:DescribeInstancePatchStates", - "ssm:DescribeInstancePatchStatesForPatchGroup", - "ssm:DescribeInstancePatches", - "ssm:DescribeInstanceProperties", - "ssm:DescribeInventoryDeletions", - "ssm:DescribeMaintenanceWindowExecutionTaskInvocations", - "ssm:DescribeMaintenanceWindowExecutionTasks", - "ssm:DescribeMaintenanceWindowExecutions", - "ssm:DescribeMaintenanceWindowSchedule", - "ssm:DescribeMaintenanceWindowTargets", - "ssm:DescribeMaintenanceWindowTasks", - "ssm:DescribeMaintenanceWindows", - "ssm:DescribeMaintenanceWindowsForTarget", - "ssm:DescribeOpsItems", - "ssm:DescribeParameters", - "ssm:DescribePatchBaselines", - "ssm:DescribePatchGroupState", - "ssm:DescribePatchGroups", - "ssm:DescribePatchProperties", - "ssm:DescribeSessions", - "ssm:DisassociateOpsItemRelatedItem", - "ssm:GetAutomationExecution", - "ssm:GetCalendar", - "ssm:GetCalendarState", - "ssm:GetCommandInvocation", - "ssm:GetConnectionStatus", - "ssm:GetDefaultPatchBaseline", - "ssm:GetDeployablePatchSnapshotForInstance", - "ssm:GetDocument", - "ssm:GetInventory", - "ssm:GetInventorySchema", - "ssm:GetMaintenanceWindow", - "ssm:GetMaintenanceWindowExecution", - "ssm:GetMaintenanceWindowExecutionTask", - "ssm:GetMaintenanceWindowExecutionTaskInvocation", - "ssm:GetMaintenanceWindowTask", - "ssm:GetManifest", - "ssm:GetOpsItem", - "ssm:GetOpsMetadata", - "ssm:GetOpsSummary", - "ssm:GetParameter", - "ssm:GetParameterHistory", - "ssm:GetParameters", - "ssm:GetParametersByPath", - "ssm:GetPatchBaseline", - "ssm:GetPatchBaselineForPatchGroup", - "ssm:GetServiceSetting", - "ssm:LabelParameterVersion", - "ssm:ListAssociationVersions", - "ssm:ListAssociations", - "ssm:ListCommandInvocations", - "ssm:ListCommands", - "ssm:ListComplianceItems", - "ssm:ListComplianceSummaries", - "ssm:ListDocumentMetadataHistory", - "ssm:ListDocumentVersions", - "ssm:ListDocuments", - "ssm:ListInstanceAssociations", - "ssm:ListInventoryEntries", - "ssm:ListOpsItemEvents", - "ssm:ListOpsItemRelatedItems", - "ssm:ListOpsMetadata", - "ssm:ListResourceComplianceSummaries", - "ssm:ListResourceDataSync", - "ssm:ListTagsForResource", - "ssm:ModifyDocumentPermission", - "ssm:PutCalendar", - "ssm:PutComplianceItems", - "ssm:PutConfigurePackageResult", - "ssm:PutInventory", - "ssm:PutParameter", - "ssm:RegisterDefaultPatchBaseline", - "ssm:RegisterManagedInstance", - "ssm:RegisterPatchBaselineForPatchGroup", - "ssm:RegisterTargetWithMaintenanceWindow", - "ssm:RegisterTaskWithMaintenanceWindow", - "ssm:RemoveTagsFromResource", - "ssm:ResetServiceSetting", - "ssm:ResumeSession", - "ssm:SendAutomationSignal", - "ssm:SendCommand", - "ssm:StartAssociationsOnce", - "ssm:StartAutomationExecution", - "ssm:StartChangeRequestExecution", - "ssm:StartSession", - "ssm:StopAutomationExecution", - "ssm:TerminateSession", - "ssm:UnlabelParameterVersion", - "ssm:UpdateAssociation", - "ssm:UpdateAssociationStatus", - "ssm:UpdateDocument", - "ssm:UpdateDocumentDefaultVersion", - "ssm:UpdateDocumentMetadata", - "ssm:UpdateInstanceAssociationStatus", - "ssm:UpdateInstanceInformation", - "ssm:UpdateMaintenanceWindow", - "ssm:UpdateMaintenanceWindowTarget", - "ssm:UpdateMaintenanceWindowTask", - "ssm:UpdateManagedInstanceRole", - "ssm:UpdateOpsItem", - "ssm:UpdateOpsMetadata", - "ssm:UpdatePatchBaseline", - "ssm:UpdateResourceDataSync", - "ssm:UpdateServiceSetting" - ] -} \ No newline at end of file + "sts": [ + "sts:AssumeRole", + "sts:AssumeRoleWithSAML", + "sts:AssumeRoleWithWebIdentity", + "sts:DecodeAuthorizationMessage", + "sts:GetAccessKeyInfo", + "sts:GetCallerIdentity", + "sts:GetFederationToken", + "sts:GetServiceBearerToken", + "sts:GetSessionToken", + "sts:SetSourceIdentity", + "sts:TagSession" + ], + "s3": [ + "s3:AbortMultipartUpload", + "s3:BypassGovernanceRetention", + "s3:CreateAccessPoint", + "s3:CreateAccessPointForObjectLambda", + "s3:CreateBucket", + "s3:CreateJob", + "s3:CreateMultiRegionAccessPoint", + "s3:DeleteAccessPoint", + "s3:DeleteAccessPointForObjectLambda", + "s3:DeleteAccessPointPolicy", + "s3:DeleteAccessPointPolicyForObjectLambda", + "s3:DeleteBucket", + "s3:DeleteBucketPolicy", + "s3:DeleteBucketWebsite", + "s3:DeleteJobTagging", + "s3:DeleteMultiRegionAccessPoint", + "s3:DeleteObject", + "s3:DeleteObjectTagging", + "s3:DeleteObjectVersion", + "s3:DeleteObjectVersionTagging", + "s3:DeleteStorageLensConfiguration", + "s3:DeleteStorageLensConfigurationTagging", + "s3:DescribeJob", + "s3:DescribeMultiRegionAccessPointOperation", + "s3:GetAccelerateConfiguration", + "s3:GetAccessPoint", + "s3:GetAccessPointConfigurationForObjectLambda", + "s3:GetAccessPointForObjectLambda", + "s3:GetAccessPointPolicy", + "s3:GetAccessPointPolicyForObjectLambda", + "s3:GetAccessPointPolicyStatus", + "s3:GetAccessPointPolicyStatusForObjectLambda", + "s3:GetAccountPublicAccessBlock", + "s3:GetAnalyticsConfiguration", + "s3:GetBucketAcl", + "s3:GetBucketCORS", + "s3:GetBucketLocation", + "s3:GetBucketLogging", + "s3:GetBucketNotification", + "s3:GetBucketObjectLockConfiguration", + "s3:GetBucketOwnershipControls", + "s3:GetBucketPolicy", + "s3:GetBucketPolicyStatus", + "s3:GetBucketPublicAccessBlock", + "s3:GetBucketRequestPayment", + "s3:GetBucketTagging", + "s3:GetBucketVersioning", + "s3:GetBucketWebsite", + "s3:GetEncryptionConfiguration", + "s3:GetIntelligentTieringConfiguration", + "s3:GetInventoryConfiguration", + "s3:GetJobTagging", + "s3:GetLifecycleConfiguration", + "s3:GetMetricsConfiguration", + "s3:GetMultiRegionAccessPoint", + "s3:GetMultiRegionAccessPointPolicy", + "s3:GetMultiRegionAccessPointPolicyStatus", + "s3:GetObject", + "s3:GetObjectAcl", + "s3:GetObjectAttributes", + "s3:GetObjectLegalHold", + "s3:GetObjectRetention", + "s3:GetObjectTagging", + "s3:GetObjectTorrent", + "s3:GetObjectVersion", + "s3:GetObjectVersionAcl", + "s3:GetObjectVersionAttributes", + "s3:GetObjectVersionForReplication", + "s3:GetObjectVersionTagging", + "s3:GetObjectVersionTorrent", + "s3:GetReplicationConfiguration", + "s3:GetStorageLensConfiguration", + "s3:GetStorageLensConfigurationTagging", + "s3:GetStorageLensDashboard", + "s3:InitiateReplication", + "s3:ListAccessPoints", + "s3:ListAccessPointsForObjectLambda", + "s3:ListAllMyBuckets", + "s3:ListBucket", + "s3:ListBucketMultipartUploads", + "s3:ListBucketVersions", + "s3:ListJobs", + "s3:ListMultiRegionAccessPoints", + "s3:ListMultipartUploadParts", + "s3:ListStorageLensConfigurations", + "s3:ObjectOwnerOverrideToBucketOwner", + "s3:PutAccelerateConfiguration", + "s3:PutAccessPointConfigurationForObjectLambda", + "s3:PutAccessPointPolicy", + "s3:PutAccessPointPolicyForObjectLambda", + "s3:PutAccessPointPublicAccessBlock", + "s3:PutAccountPublicAccessBlock", + "s3:PutAnalyticsConfiguration", + "s3:PutBucketAcl", + "s3:PutBucketCORS", + "s3:PutBucketLogging", + "s3:PutBucketNotification", + "s3:PutBucketObjectLockConfiguration", + "s3:PutBucketOwnershipControls", + "s3:PutBucketPolicy", + "s3:PutBucketPublicAccessBlock", + "s3:PutBucketRequestPayment", + "s3:PutBucketTagging", + "s3:PutBucketVersioning", + "s3:PutBucketWebsite", + "s3:PutEncryptionConfiguration", + "s3:PutIntelligentTieringConfiguration", + "s3:PutInventoryConfiguration", + "s3:PutJobTagging", + "s3:PutLifecycleConfiguration", + "s3:PutMetricsConfiguration", + "s3:PutMultiRegionAccessPointPolicy", + "s3:PutObject", + "s3:PutObjectAcl", + "s3:PutObjectLegalHold", + "s3:PutObjectRetention", + "s3:PutObjectTagging", + "s3:PutObjectVersionAcl", + "s3:PutObjectVersionTagging", + "s3:PutReplicationConfiguration", + "s3:PutStorageLensConfiguration", + "s3:PutStorageLensConfigurationTagging", + "s3:ReplicateDelete", + "s3:ReplicateObject", + "s3:ReplicateTags", + "s3:RestoreObject", + "s3:UpdateJobPriority", + "s3:UpdateJobStatus" + ], + "ssm": [ + "ssm:AddTagsToResource", + "ssm:AssociateOpsItemRelatedItem", + "ssm:CancelCommand", + "ssm:CancelMaintenanceWindowExecution", + "ssm:CreateActivation", + "ssm:CreateAssociation", + "ssm:CreateAssociationBatch", + "ssm:CreateDocument", + "ssm:CreateMaintenanceWindow", + "ssm:CreateOpsItem", + "ssm:CreateOpsMetadata", + "ssm:CreatePatchBaseline", + "ssm:CreateResourceDataSync", + "ssm:DeleteActivation", + "ssm:DeleteAssociation", + "ssm:DeleteDocument", + "ssm:DeleteInventory", + "ssm:DeleteMaintenanceWindow", + "ssm:DeleteOpsMetadata", + "ssm:DeleteParameter", + "ssm:DeleteParameters", + "ssm:DeletePatchBaseline", + "ssm:DeleteResourceDataSync", + "ssm:DeregisterManagedInstance", + "ssm:DeregisterPatchBaselineForPatchGroup", + "ssm:DeregisterTargetFromMaintenanceWindow", + "ssm:DeregisterTaskFromMaintenanceWindow", + "ssm:DescribeActivations", + "ssm:DescribeAssociation", + "ssm:DescribeAssociationExecutionTargets", + "ssm:DescribeAssociationExecutions", + "ssm:DescribeAutomationExecutions", + "ssm:DescribeAutomationStepExecutions", + "ssm:DescribeAvailablePatches", + "ssm:DescribeDocument", + "ssm:DescribeDocumentParameters", + "ssm:DescribeDocumentPermission", + "ssm:DescribeEffectiveInstanceAssociations", + "ssm:DescribeEffectivePatchesForPatchBaseline", + "ssm:DescribeInstanceAssociationsStatus", + "ssm:DescribeInstanceInformation", + "ssm:DescribeInstancePatchStates", + "ssm:DescribeInstancePatchStatesForPatchGroup", + "ssm:DescribeInstancePatches", + "ssm:DescribeInstanceProperties", + "ssm:DescribeInventoryDeletions", + "ssm:DescribeMaintenanceWindowExecutionTaskInvocations", + "ssm:DescribeMaintenanceWindowExecutionTasks", + "ssm:DescribeMaintenanceWindowExecutions", + "ssm:DescribeMaintenanceWindowSchedule", + "ssm:DescribeMaintenanceWindowTargets", + "ssm:DescribeMaintenanceWindowTasks", + "ssm:DescribeMaintenanceWindows", + "ssm:DescribeMaintenanceWindowsForTarget", + "ssm:DescribeOpsItems", + "ssm:DescribeParameters", + "ssm:DescribePatchBaselines", + "ssm:DescribePatchGroupState", + "ssm:DescribePatchGroups", + "ssm:DescribePatchProperties", + "ssm:DescribeSessions", + "ssm:DisassociateOpsItemRelatedItem", + "ssm:GetAutomationExecution", + "ssm:GetCalendar", + "ssm:GetCalendarState", + "ssm:GetCommandInvocation", + "ssm:GetConnectionStatus", + "ssm:GetDefaultPatchBaseline", + "ssm:GetDeployablePatchSnapshotForInstance", + "ssm:GetDocument", + "ssm:GetInventory", + "ssm:GetInventorySchema", + "ssm:GetMaintenanceWindow", + "ssm:GetMaintenanceWindowExecution", + "ssm:GetMaintenanceWindowExecutionTask", + "ssm:GetMaintenanceWindowExecutionTaskInvocation", + "ssm:GetMaintenanceWindowTask", + "ssm:GetManifest", + "ssm:GetOpsItem", + "ssm:GetOpsMetadata", + "ssm:GetOpsSummary", + "ssm:GetParameter", + "ssm:GetParameterHistory", + "ssm:GetParameters", + "ssm:GetParametersByPath", + "ssm:GetPatchBaseline", + "ssm:GetPatchBaselineForPatchGroup", + "ssm:GetServiceSetting", + "ssm:LabelParameterVersion", + "ssm:ListAssociationVersions", + "ssm:ListAssociations", + "ssm:ListCommandInvocations", + "ssm:ListCommands", + "ssm:ListComplianceItems", + "ssm:ListComplianceSummaries", + "ssm:ListDocumentMetadataHistory", + "ssm:ListDocumentVersions", + "ssm:ListDocuments", + "ssm:ListInstanceAssociations", + "ssm:ListInventoryEntries", + "ssm:ListOpsItemEvents", + "ssm:ListOpsItemRelatedItems", + "ssm:ListOpsMetadata", + "ssm:ListResourceComplianceSummaries", + "ssm:ListResourceDataSync", + "ssm:ListTagsForResource", + "ssm:ModifyDocumentPermission", + "ssm:PutCalendar", + "ssm:PutComplianceItems", + "ssm:PutConfigurePackageResult", + "ssm:PutInventory", + "ssm:PutParameter", + "ssm:RegisterDefaultPatchBaseline", + "ssm:RegisterManagedInstance", + "ssm:RegisterPatchBaselineForPatchGroup", + "ssm:RegisterTargetWithMaintenanceWindow", + "ssm:RegisterTaskWithMaintenanceWindow", + "ssm:RemoveTagsFromResource", + "ssm:ResetServiceSetting", + "ssm:ResumeSession", + "ssm:SendAutomationSignal", + "ssm:SendCommand", + "ssm:StartAssociationsOnce", + "ssm:StartAutomationExecution", + "ssm:StartChangeRequestExecution", + "ssm:StartSession", + "ssm:StopAutomationExecution", + "ssm:TerminateSession", + "ssm:UnlabelParameterVersion", + "ssm:UpdateAssociation", + "ssm:UpdateAssociationStatus", + "ssm:UpdateDocument", + "ssm:UpdateDocumentDefaultVersion", + "ssm:UpdateDocumentMetadata", + "ssm:UpdateInstanceAssociationStatus", + "ssm:UpdateInstanceInformation", + "ssm:UpdateMaintenanceWindow", + "ssm:UpdateMaintenanceWindowTarget", + "ssm:UpdateMaintenanceWindowTask", + "ssm:UpdateManagedInstanceRole", + "ssm:UpdateOpsItem", + "ssm:UpdateOpsMetadata", + "ssm:UpdatePatchBaseline", + "ssm:UpdateResourceDataSync", + "ssm:UpdateServiceSetting" + ] +} diff --git a/service_replace_map.json b/service_replace_map.json index 5aee592..f3302f7 100644 --- a/service_replace_map.json +++ b/service_replace_map.json @@ -1,6 +1,6 @@ { - "s3:HeadObject": "s3:GetObject", - "s3:HeadBucket": "s3:ListBucket", - "s3:ListObjects": "s3:ListBucket", - "s3:GetBucketReplication": "s3:GetReplicationConfiguration" + "s3:HeadObject": "s3:GetObject", + "s3:HeadBucket": "s3:ListBucket", + "s3:ListObjects": "s3:ListBucket", + "s3:GetBucketReplication": "s3:GetReplicationConfiguration" }