Skip to content

Commit

Permalink
Merge pull request #35 from initstring/updated-oct
Browse files Browse the repository at this point in the history
big ol chunka updates
  • Loading branch information
initstring authored Oct 24, 2020
2 parents bb1a4b6 + 6098424 commit dfc6dd1
Show file tree
Hide file tree
Showing 6 changed files with 93 additions and 21 deletions.
13 changes: 7 additions & 6 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,7 @@ Multi-cloud OSINT tool. Enumerate public resources in AWS, Azure, and Google Clo
Currently enumerates the following:

**Amazon Web Services**:
- Open S3 Buckets
- Protected S3 Buckets
- Open / Protected S3 Buckets
- awsapps (WorkMail, WorkDocs, Connect, etc.)

**Microsoft Azure**:
Expand All @@ -16,8 +15,8 @@ Currently enumerates the following:
- Web Apps

**Google Cloud Platform**
- Open GCP Buckets
- Protected GCP Buckets
- Open / Protected GCP Buckets
- Open / Protected Firebase Realtime Databases
- Google App Engine sites
- Cloud Functions (enumerates project/regions with existing functions, then brute forces actual function names)

Expand Down Expand Up @@ -69,10 +68,10 @@ optional arguments:
-kf KEYFILE, --keyfile KEYFILE
Input file with a single keyword per line.
-m MUTATIONS, --mutations MUTATIONS
Mutations. Default: cloud_enum/mutations.txt.
Mutations. Default: enum_tools/fuzz.txt
-b BRUTE, --brute BRUTE
List to brute-force Azure container names. Default:
cloud_enum/brute.txt.
enum_tools/fuzz.txt
-t THREADS, --threads THREADS
Threads for HTTP brute-force. Default = 5
-ns NAMESERVER, --nameserver NAMESERVER
Expand All @@ -82,6 +81,8 @@ optional arguments:
--disable-aws Disable Amazon checks.
--disable-azure Disable Azure checks.
--disable-gcp Disable Google checks.
-qs, --quickscan Disable all mutations and second-level scans
```

# Thanks
Expand Down
15 changes: 12 additions & 3 deletions cloud_enum.py
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,9 @@ def parse_arguments():
parser.add_argument('--disable-gcp', action='store_true',
help='Disable Google checks.')

parser.add_argument('-qs', '--quickscan', action='store_true',
help='Disable all mutations and second-level scans')

args = parser.parse_args()

# Ensure mutations file is readable
Expand Down Expand Up @@ -128,7 +131,10 @@ def print_status(args):
Print a short pre-run status message
"""
print("Keywords: {}".format(', '.join(args.keyword)))
print("Mutations: {}".format(args.mutations))
if args.quickscan:
print("Mutations: NONE! (Using quickscan)")
else:
print("Mutations: {}".format(args.mutations))
print("Brute-list: {}".format(args.brute))
print("")

Expand Down Expand Up @@ -209,8 +215,11 @@ def main():
# Give our Windows friends a chance at pretty colors
check_windows()

# First, build a sort base list of target names
mutations = read_mutations(args.mutations)
# First, build a sorted base list of target names
if args.quickscan:
mutations = []
else:
mutations = read_mutations(args.mutations)
names = build_names(args.keyword, mutations)

# All the work is done in the individual modules
Expand Down
2 changes: 1 addition & 1 deletion enum_tools/azure_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@ def run_all(names, args):

valid_accounts = check_storage_accounts(names, args.threads,
args.nameserver)
if valid_accounts:
if valid_accounts and not args.quickscan:
brute_force_containers(valid_accounts, args.brute, args.threads)

check_azure_websites(names, args.nameserver, args.threads)
Expand Down
2 changes: 2 additions & 0 deletions enum_tools/fuzz.txt
Original file line number Diff line number Diff line change
Expand Up @@ -105,6 +105,7 @@ export
files
fileshare
filestore
firebase
firestore
functions
gateway
Expand Down Expand Up @@ -182,6 +183,7 @@ qa
repo
reports
resources
rtdb
s3
saas
screenshots
Expand Down
67 changes: 62 additions & 5 deletions enum_tools/gcp_checks.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@

# Known GCP domain names
GCP_URL = 'storage.googleapis.com'
FBRTDB_URL = 'firebaseio.com'
APPSPOT_URL = 'appspot.com'
FUNC_URL = 'cloudfunctions.net'

Expand Down Expand Up @@ -66,6 +67,57 @@ def check_gcp_buckets(names, threads):
# Stop the time
utils.stop_timer(start_time)

def print_fbrtdb_response(reply):
"""
Parses the HTTP reply of a brute-force attempt
This function is passed into the class object so we can view results
in real-time.
"""
if reply.status_code == 404:
pass
elif reply.status_code == 200:
utils.printc(" OPEN GOOGLE FIREBASE RTDB: {}\n"
.format(reply.url), 'green')
elif reply.status_code == 401:
utils.printc(" Protected Google Firebase RTDB: {}\n"
.format(reply.url), 'orange')
elif reply.status_code == 402:
utils.printc(" Payment required on Google Firebase RTDB: {}\n"
.format(reply.url), 'orange')
else:
print(" Unknown status codes being received from {}:\n"
" {}: {}"
.format(reply.url, reply.status_code, reply.reason))

def check_fbrtdb(names, threads):
"""
Checks for Google Firebase RTDB
"""
print("[+] Checking for Google Firebase Realtime Databases")

# Start a counter to report on elapsed time
start_time = utils.start_timer()

# Initialize the list of correctly formatted urls
candidates = []

# Take each mutated keyword craft a url with the correct format
for name in names:
# Firebase RTDB names cannot include a period. We'll exlcude
# those from the global candidates list
if '.' not in name:
candidates.append('{}.{}/.json'.format(name, FBRTDB_URL))

# Send the valid names to the batch HTTP processor
utils.get_url_batch(candidates, use_ssl=True,
callback=print_fbrtdb_response,
threads=threads,
redir=False)

# Stop the time
utils.stop_timer(start_time)

def print_appspot_response(reply):
"""
Parses the HTTP reply of a brute-force attempt
Expand All @@ -75,12 +127,12 @@ def print_appspot_response(reply):
"""
if reply.status_code == 404:
pass
elif (str(reply.status_code)[0] == 5):
elif str(reply.status_code)[0] == 5:
utils.printc(" Google App Engine app with a 50x error: {}\n"
.format(reply.url), 'orange')
elif (reply.status_code == 200
or reply.status_code == 302
or reply.status_code == 404):
or reply.status_code == 302
or reply.status_code == 404):
utils.printc(" Google App Engine app: {}\n"
.format(reply.url), 'green')
else:
Expand Down Expand Up @@ -156,7 +208,7 @@ def print_functions_response2(reply):
" {}: {}"
.format(reply.url, reply.status_code, reply.reason))

def check_functions(names, brute_list, threads):
def check_functions(names, brute_list, quickscan, threads):
"""
Checks for Google Cloud Functions running on cloudfunctions.net
Expand Down Expand Up @@ -197,6 +249,10 @@ def check_functions(names, brute_list, threads):
utils.stop_timer(start_time)
return

# Also bail out if doing a quick scan
if quickscan:
return

# If we did find something, we'll use the brute list. This will allow people
# to provide a separate fuzzing list if they choose.
print("[*] Brute-forcing function names in {} project/region combos"
Expand Down Expand Up @@ -234,5 +290,6 @@ def run_all(names, args):
print(BANNER)

check_gcp_buckets(names, args.threads)
check_fbrtdb(names, args.threads)
check_appspot(names, args.threads)
check_functions(names, args.brute, args.threads)
check_functions(names, args.brute, args.quickscan, args.threads)
15 changes: 9 additions & 6 deletions enum_tools/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,13 +54,16 @@ def get_url_batch(url_list, use_ssl=False, callback='', threads=5, redir=True):
else:
proto = 'http://'

# Start a requests object
session = FuturesSession(executor=ThreadPoolExecutor(max_workers=threads))

# Using the async requests-futures module, work in batches based on
# the 'queue' list created above. Call each URL, sending the results
# back to the callback function.
for batch in queue:
# I used to initialize the session object outside of this loop, BUT
# there were a lot of errors that looked related to pool cleanup not
# happening. Putting it in here fixes the issue.
# There is an unresolved discussion here:
# https://github.com/ross/requests-futures/issues/20
session = FuturesSession(executor=ThreadPoolExecutor(max_workers=threads+5))
batch_pending = {}
batch_results = {}

Expand All @@ -76,9 +79,9 @@ def get_url_batch(url_list, use_ssl=False, callback='', threads=5, redir=True):
# Timeout is set due to observation of some large jobs simply
# hanging forever with no exception raised.
batch_results[url] = batch_pending[url].result(timeout=30)
except requests.exceptions.ConnectionError:
print(" [!] Connection error on {}. Investigate if there"
" are many of these.".format(url))
except requests.exceptions.ConnectionError as error_msg:
print(" [!] Connection error on {}:".format(url))
print(error_msg)
except TimeoutError:
print(" [!] Timeout on {}. Investigate if there are"
" many of these".format(url))
Expand Down

0 comments on commit dfc6dd1

Please sign in to comment.