forked from sa7mon/S3Scanner
-
Notifications
You must be signed in to change notification settings - Fork 0
/
s3scanner.py
147 lines (116 loc) · 5.31 KB
/
s3scanner.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
#########
#
# AWS S3scanner - Scans domain names for S3 buckets
#
# Author: Dan Salmon (twitter.com/bltjetpack, github.com/sa7mon)
# Created: 6/19/17
# License: Creative Commons (CC BY-NC-SA 4.0))
#
#########
import argparse
import s3utils as s3
import logging
import coloredlogs
import sys
currentVersion = '1.0.0'
# We want to use both formatter classes, so a custom class it is
class CustomFormatter(argparse.RawTextHelpFormatter, argparse.RawDescriptionHelpFormatter):
pass
# Instantiate the parser
parser = argparse.ArgumentParser(description='# s3scanner - Find S3 buckets and dump!\n'
'#\n'
'# Author: Dan Salmon - @bltjetpack, github.com/sa7mon\n',
prog='s3scanner', formatter_class=CustomFormatter)
# Declare arguments
parser.add_argument('-o', '--out-file', required=False, dest='outFile',
help='Name of file to save the successfully checked buckets in (Default: buckets.txt)')
# parser.add_argument('-c', '--include-closed', required=False, dest='includeClosed', action='store_true',
# help='Include found but closed buckets in the out-file')
parser.add_argument('-d', '--dump', required=False, dest='dump', action='store_true',
help='Dump all found open buckets locally')
parser.add_argument('-l', '--list', required=False, dest='list', action='store_true',
help='List all found open buckets locally')
parser.add_argument('--version', required=False, dest='version', action='store_true',
help='Display the current version of this tool')
parser.add_argument('buckets', help='Name of text file containing buckets to check')
# parser.set_defaults(includeClosed=False)
parser.set_defaults(outFile='./buckets.txt')
parser.set_defaults(dump=False)
if len(sys.argv) == 1: # No args supplied, print the full help text instead of the short usage text
parser.print_help()
sys.exit(0)
elif len(sys.argv) == 2:
if sys.argv[1] == '--version': # Only --version arg supplied. Print the version and exit.
print(currentVersion)
sys.exit(0)
# Parse the args
args = parser.parse_args()
# Create file logger
flog = logging.getLogger('s3scanner-file')
flog.setLevel(logging.DEBUG) # Set log level for logger object
# Create file handler which logs even debug messages
fh = logging.FileHandler(args.outFile)
fh.setLevel(logging.DEBUG)
# Add the handler to logger
flog.addHandler(fh)
# Create secondary logger for logging to screen
slog = logging.getLogger('s3scanner-screen')
slog.setLevel(logging.INFO)
# Logging levels for the screen logger:
# INFO = found
# ERROR = not found
# The levels serve no other purpose than to specify the output color
levelStyles = {
'info': {'color': 'blue'},
'warning': {'color': 'yellow'},
'error': {'color': 'red'}
}
fieldStyles = {
'asctime': {'color': 'white'}
}
# Use coloredlogs to add color to screen logger. Define format and styles.
coloredlogs.install(level='DEBUG', logger=slog, fmt='%(asctime)s %(message)s',
level_styles=levelStyles, field_styles=fieldStyles)
if not s3.checkAwsCreds():
s3.awsCredsConfigured = False
slog.error("Warning: AWS credentials not configured. Open buckets will be shown as closed. Run:"
" `aws configure` to fix this.\n")
with open(args.buckets, 'r') as f:
for line in f:
line = line.rstrip() # Remove any extra whitespace
# Determine what kind of input we're given. Options:
# bucket name i.e. mybucket
# domain name i.e. flaws.cloud
# full S3 url i.e. flaws.cloud.s3-us-west-2.amazonaws.com
# bucket:region i.e. flaws.cloud:us-west-2
if ".amazonaws.com" in line: # We were given a full s3 url
bucket = line[:line.rfind(".s3")]
elif ":" in line: # We were given a bucket in 'bucket:region' format
bucket = line.split(":")[0]
else: # We were either given a bucket name or domain name
bucket = line
valid = s3.checkBucketName(bucket)
if not valid:
message = "{0:>11} : {1}".format("[invalid]", bucket)
slog.error(message)
continue
if s3.awsCredsConfigured:
b = s3.checkAcl(bucket)
else:
a = s3.checkBucketWithoutCreds(bucket)
b = {"found": a, "acls": "unknown - no aws creds"}
if b["found"]:
size = s3.getBucketSize(bucket) # Try to get the size of the bucket
message = "{0:>11} : {1}".format("[found]", bucket + " | " + size + " | ACLs: " + str(b["acls"]))
slog.info(message)
flog.debug(bucket)
if args.dump:
if size not in ["AccessDenied", "AllAccessDisabled"]:
slog.info("{0:>11} : {1} - {2}".format("[found]", bucket, "Attempting to dump...this may take a while."))
s3.dumpBucket(bucket)
if args.list:
if str(b["acls"]) not in ["AccessDenied", "AllAccessDisabled"]:
s3.listBucket(bucket)
else:
message = "{0:>11} : {1}".format("[not found]", bucket)
slog.error(message)