From 5d8ebdfb6202f2849ebe925a97a88f78d4c3ec73 Mon Sep 17 00:00:00 2001 From: initstring <26131150+initstring@users.noreply.github.com> Date: Thu, 15 Oct 2020 21:46:03 +1100 Subject: [PATCH 1/2] big ol chunka updates --- README.md | 13 +++---- cloud_enum.py | 15 ++++++-- enum_tools/azure_checks.py | 2 +- enum_tools/fuzz.txt | 2 ++ enum_tools/gcp_checks.py | 71 ++++++++++++++++++++++++++++++++++---- enum_tools/utils.py | 15 ++++---- 6 files changed, 95 insertions(+), 23 deletions(-) diff --git a/README.md b/README.md index 116d440..daedeb9 100644 --- a/README.md +++ b/README.md @@ -4,8 +4,7 @@ Multi-cloud OSINT tool. Enumerate public resources in AWS, Azure, and Google Clo Currently enumerates the following: **Amazon Web Services**: -- Open S3 Buckets -- Protected S3 Buckets +- Open / Protected S3 Buckets - awsapps (WorkMail, WorkDocs, Connect, etc.) **Microsoft Azure**: @@ -16,8 +15,8 @@ Currently enumerates the following: - Web Apps **Google Cloud Platform** -- Open GCP Buckets -- Protected GCP Buckets +- Open / Protected GCP Buckets +- Open / Protected Firebase Realtime Databases - Google App Engine sites - Cloud Functions (enumerates project/regions with existing functions, then brute forces actual function names) @@ -71,10 +70,10 @@ optional arguments: -kf KEYFILE, --keyfile KEYFILE Input file with a single keyword per line. -m MUTATIONS, --mutations MUTATIONS - Mutations. Default: cloud_enum/mutations.txt. + Mutations. Default: enum_tools/fuzz.txt -b BRUTE, --brute BRUTE List to brute-force Azure container names. Default: - cloud_enum/brute.txt. + enum_tools/fuzz.txt -t THREADS, --threads THREADS Threads for HTTP brute-force. Default = 5 -ns NAMESERVER, --nameserver NAMESERVER @@ -84,6 +83,8 @@ optional arguments: --disable-aws Disable Amazon checks. --disable-azure Disable Azure checks. --disable-gcp Disable Google checks. + -qs, --quickscan Disable all mutations and second-level scans + ``` # Thanks diff --git a/cloud_enum.py b/cloud_enum.py index df07aa3..5458277 100755 --- a/cloud_enum.py +++ b/cloud_enum.py @@ -79,6 +79,9 @@ def parse_arguments(): parser.add_argument('--disable-gcp', action='store_true', help='Disable Google checks.') + parser.add_argument('-qs', '--quickscan', action='store_true', + help='Disable all mutations and second-level scans') + args = parser.parse_args() # Ensure mutations file is readable @@ -128,7 +131,10 @@ def print_status(args): Print a short pre-run status message """ print("Keywords: {}".format(', '.join(args.keyword))) - print("Mutations: {}".format(args.mutations)) + if args.quickscan: + print("Mutations: NONE! (Using quickscan)") + else: + print("Mutations: {}".format(args.mutations)) print("Brute-list: {}".format(args.brute)) print("") @@ -209,8 +215,11 @@ def main(): # Give our Windows friends a chance at pretty colors check_windows() - # First, build a sort base list of target names - mutations = read_mutations(args.mutations) + # First, build a sorted base list of target names + if args.quickscan: + mutations = [] + else: + mutations = read_mutations(args.mutations) names = build_names(args.keyword, mutations) # All the work is done in the individual modules diff --git a/enum_tools/azure_checks.py b/enum_tools/azure_checks.py index 6ec8128..7f6062d 100644 --- a/enum_tools/azure_checks.py +++ b/enum_tools/azure_checks.py @@ -280,7 +280,7 @@ def run_all(names, args): valid_accounts = check_storage_accounts(names, args.threads, args.nameserver) - if valid_accounts: + if valid_accounts and not args.quickscan: brute_force_containers(valid_accounts, args.brute, args.threads) check_azure_websites(names, args.nameserver, args.threads) diff --git a/enum_tools/fuzz.txt b/enum_tools/fuzz.txt index c20d1ef..b854ca4 100644 --- a/enum_tools/fuzz.txt +++ b/enum_tools/fuzz.txt @@ -105,6 +105,7 @@ export files fileshare filestore +firebase firestore functions gateway @@ -182,6 +183,7 @@ qa repo reports resources +rtdb s3 saas screenshots diff --git a/enum_tools/gcp_checks.py b/enum_tools/gcp_checks.py index 69efe03..6f2c9b5 100644 --- a/enum_tools/gcp_checks.py +++ b/enum_tools/gcp_checks.py @@ -14,6 +14,7 @@ # Known GCP domain names GCP_URL = 'storage.googleapis.com' +FBRTDB_URL = 'firebaseio.com' APPSPOT_URL = 'appspot.com' FUNC_URL = 'cloudfunctions.net' @@ -66,6 +67,57 @@ def check_gcp_buckets(names, threads): # Stop the time utils.stop_timer(start_time) +def print_fbrtdb_response(reply): + """ + Parses the HTTP reply of a brute-force attempt + + This function is passed into the class object so we can view results + in real-time. + """ + if reply.status_code == 404: + pass + elif reply.status_code == 200: + utils.printc(" OPEN GOOGLE FIREBASE RTDB: {}\n" + .format(reply.url), 'green') + elif reply.status_code == 401: + utils.printc(" Protected Google Firebase RTDB: {}\n" + .format(reply.url), 'orange') + elif reply.status_code == 402: + utils.printc(" Payment required on Google Firebase RTDB: {}\n" + .format(reply.url), 'orange') + else: + print(" Unknown status codes being received from {}:\n" + " {}: {}" + .format(reply.url, reply.status_code, reply.reason)) + +def check_fbrtdb(names, threads): + """ + Checks for Google Firebase RTDB + """ + print("[+] Checking for Google Firebase Realtime Databases") + + # Start a counter to report on elapsed time + start_time = utils.start_timer() + + # Initialize the list of correctly formatted urls + candidates = [] + + # Take each mutated keyword craft a url with the correct format + for name in names: + # Firebase RTDB names cannot include a period. We'll exlcude + # those from the global candidates list + if '.' not in name: + candidates.append('{}.{}/.json'.format(name, FBRTDB_URL)) + + # Send the valid names to the batch HTTP processor + utils.get_url_batch(candidates, use_ssl=True, + callback=print_fbrtdb_response, + threads=threads, + redir=False) + + # Stop the time + utils.stop_timer(start_time) + def print_appspot_response(reply): """ Parses the HTTP reply of a brute-force attempt @@ -75,12 +127,12 @@ def print_appspot_response(reply): """ if reply.status_code == 404: pass - elif (str(reply.status_code)[0] == 5): + elif str(reply.status_code)[0] == 5: utils.printc(" Google App Engine app with a 50x error: {}\n" .format(reply.url), 'orange') elif (reply.status_code == 200 - or reply.status_code == 302 - or reply.status_code == 404): + or reply.status_code == 302 + or reply.status_code == 404): utils.printc(" Google App Engine app: {}\n" .format(reply.url), 'green') else: @@ -156,7 +208,7 @@ def print_functions_response2(reply): " {}: {}" .format(reply.url, reply.status_code, reply.reason)) -def check_functions(names, brute_list, threads): +def check_functions(names, brute_list, quickscan, threads): """ Checks for Google Cloud Functions running on cloudfunctions.net @@ -197,6 +249,10 @@ def check_functions(names, brute_list, threads): utils.stop_timer(start_time) return + # Also bail out if doing a quick scan + if quickscan: + return + # If we did find something, we'll use the brute list. This will allow people # to provide a separate fuzzing list if they choose. print("[*] Brute-forcing function names in {} project/region combos" @@ -233,6 +289,7 @@ def run_all(names, args): """ print(BANNER) - check_gcp_buckets(names, args.threads) - check_appspot(names, args.threads) - check_functions(names, args.brute, args.threads) + #check_gcp_buckets(names, args.threads) + check_fbrtdb(names, args.threads) + #check_appspot(names, args.threads) + #check_functions(names, args.brute, args.quickscan, args.threads) diff --git a/enum_tools/utils.py b/enum_tools/utils.py index 4ebae98..95c3bdf 100644 --- a/enum_tools/utils.py +++ b/enum_tools/utils.py @@ -54,13 +54,16 @@ def get_url_batch(url_list, use_ssl=False, callback='', threads=5, redir=True): else: proto = 'http://' - # Start a requests object - session = FuturesSession(executor=ThreadPoolExecutor(max_workers=threads)) - # Using the async requests-futures module, work in batches based on # the 'queue' list created above. Call each URL, sending the results # back to the callback function. for batch in queue: + # I used to initialize the session object outside of this loop, BUT + # there were a lot of errors that looked related to pool cleanup not + # happening. Putting it in here fixes the issue. + # There is an unresolved discussion here: + # https://github.com/ross/requests-futures/issues/20 + session = FuturesSession(executor=ThreadPoolExecutor(max_workers=threads+5)) batch_pending = {} batch_results = {} @@ -76,9 +79,9 @@ def get_url_batch(url_list, use_ssl=False, callback='', threads=5, redir=True): # Timeout is set due to observation of some large jobs simply # hanging forever with no exception raised. batch_results[url] = batch_pending[url].result(timeout=30) - except requests.exceptions.ConnectionError: - print(" [!] Connection error on {}. Investigate if there" - " are many of these.".format(url)) + except requests.exceptions.ConnectionError as error_msg: + print(" [!] Connection error on {}:".format(url)) + print(error_msg) except TimeoutError: print(" [!] Timeout on {}. Investigate if there are" " many of these".format(url)) From 60984244f8d57baefbb53fb5f5a261ecc43eb4ec Mon Sep 17 00:00:00 2001 From: initstring <26131150+initstring@users.noreply.github.com> Date: Thu, 15 Oct 2020 21:50:22 +1100 Subject: [PATCH 2/2] removed test comments --- enum_tools/gcp_checks.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/enum_tools/gcp_checks.py b/enum_tools/gcp_checks.py index 6f2c9b5..36e85cb 100644 --- a/enum_tools/gcp_checks.py +++ b/enum_tools/gcp_checks.py @@ -289,7 +289,7 @@ def run_all(names, args): """ print(BANNER) - #check_gcp_buckets(names, args.threads) + check_gcp_buckets(names, args.threads) check_fbrtdb(names, args.threads) - #check_appspot(names, args.threads) - #check_functions(names, args.brute, args.quickscan, args.threads) + check_appspot(names, args.threads) + check_functions(names, args.brute, args.quickscan, args.threads)