diff --git a/gsec.py b/gsec.py index 197e1e3..4df4300 100644 --- a/gsec.py +++ b/gsec.py @@ -1,9 +1,9 @@ from colorama import Fore from modules import fetch_requests, scan, urltoip, sub_output from utils import path_traversal, portscanner, loginscanner, techscanner, cmsscanner, passive_recon, crawler, api_scanner -from plugins import phpcheck, optionscheck, shellshock, robots, favicon, auth_tokens, cookies_check +from plugins import phpcheck, optionscheck, shellshock, robots, favicon, auth_tokens, cookies_check, sitemap, securitytxt from exploits import f5bigip_scanner -from vuln_db import hostheader_injection, nuclei_vulns, corsmisconfig, crossdomain, head_vuln, cache_poisoning, webservers_vulns, xss, blind_sqli +from vuln_db import hostheader_injection, nuclei_vulns, corsmisconfig, crossdomain, head_vuln, cache_poisoning, webservers_vulns import argparse import os import asyncio @@ -17,7 +17,7 @@ # ################################################################################## -version = "v1.4" +version = "v1.5" banner = f""" .__________________________. @@ -55,6 +55,9 @@ help="Target to scan", metavar="https://www.domain.com") +parser.add_argument('-pl', '--pluginlist', action='store_true', + help="list of plugins") + parser.add_argument('-u', '--updatetemplates', action='store_true', help="Update nuclei templates") @@ -66,6 +69,35 @@ args = parser.parse_args() +if args.pluginlist: + filenames = os.listdir("plugins") + filenames.remove("__init__.py") + file_desc = {} + for filename in filenames: + if filename.endswith(".py"): + if "securitytxt.py" in filename: + file_desc["securitytxt.py"] = " - security.txt is a proposed standard which allows websites to define security policies and contact details.\n" + if "auth_tokens.py" in filename: + file_desc["auth_token.py"] = " - Find authentication token leaks\n" + if "optionscheck.py" in filename: + file_desc["optionscheck.py"] = " - OPTIONS method determines the communication options available for a specific resource\n" + if "sitemap.py" in filename: + file_desc["sitemap.py"] = " - A sitemap is a file where a developer or organization can provide information about the pages, videos, and other files offered by the site or application\n" + if "favicon.py" in filename: + file_desc["favicon.py"] = " - Fetches favicon.ico and calculates its hash value to find assets in shodan.\n" + if "phpcheck.py" in filename: + file_desc["phpcheck.py"] = " - Checks a domain for PHP\n" + if "shellshock.py" in filename: + file_desc["shellshock.py"] = " - Scan a domain to find the shellshock vulnerability\n" + if "agent_list.py" in filename: + file_desc["agent_list.py"] = " - A list of user agents\n" + if "robots.py" in filename: + file_desc["robots.py"] = " - Checks fot the robots.txt file\n" + if "cookies_check.py" in filename: + file_desc["cookies_check.py"] = " - Prints the PHP SESSID cookies\n" + for k,v in file_desc.items(): + print(f"{k}{v}") + if args.version: print(f"{Fore.YELLOW}Gsec {Fore.MAGENTA}{version}") @@ -99,7 +131,7 @@ async def main(): if "http://" in args.target: print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} PROTOCOL: {Fore.GREEN}http") optionscheck.Get_Options(args.target) - portscanner.main(args.target) + portscanner.portscanner(args.target) fetch_requests.get_headers(args.target) scan.commands(f"python3 {os.path.abspath(os.getcwd())}/utils/securityheaders.py --target {args.target} --headers X-XSS-Protection") scan.commands(f"python3 {os.path.abspath(os.getcwd())}/utils/securityheaders.py --target {args.target} --headers Content-Security-Policy") @@ -110,6 +142,8 @@ async def main(): phpcheck.php_ident(args.target) techscanner.Tech(args.target) robots.robots_scan(args.target) + sitemap.sitemap(args.target) + securitytxt.securitytxt(args.target) cookies_check.phpsessid_session(args.target) auth_tokens.auth_tokens(args.target) favicon.favicon_hash(args.target) @@ -121,13 +155,11 @@ async def main(): head_vuln.head_auth_bypass(args.target) cache_poisoning.cache_dos_scan(args.target) webservers_vulns.Servers_scan(args.target) - xss.xss_scan(args.target) sub_output.subpro_scan(f"python3 {os.path.abspath(os.getcwd())}/vuln_db/ssrf.py {args.target}") sub_output.subpro_scan(f"python3 {os.path.abspath(os.getcwd())}/vuln_db/openredirect.py {args.target}") path_traversal.path_traversal_scan(args.target) f5bigip_scanner.scan_vuln(args.target) crawler.scan(args.target) - blind_sqli.main(args.target) api_scanner.swagger_ui(args.target) #await loginscanner.main(args.target) print("\n") diff --git a/plugins/optionscheck.py b/plugins/optionscheck.py index ed56cee..c3f56ac 100644 --- a/plugins/optionscheck.py +++ b/plugins/optionscheck.py @@ -6,7 +6,8 @@ header = {"User-Agent": user_agent_} def Get_Options(url: str) -> str: - r = requests.options(f"{url}", verify=False, headers=header) + s = requests.Session() + r = s.options(f"{url}", verify=False, headers=header) allowed = [] for item, value in r.headers.items(): if "Allow" in item: @@ -16,5 +17,22 @@ def Get_Options(url: str) -> str: if allowed: allowed = ", ".join(allowed) print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} OPTIONS: {Fore.GREEN}{allowed}") + if "PUT" not in allowed or "DELETE" not in allowed: + # Check for HTTP Method Override + http_method_delete = {"X-HTTP-Method": "DELETE"} + http_method_put = {"X-HTTP-Method": "PUT"} + r_method_override = s.get(f"{url}", verify=False, headers=http_method_delete) + if r_method_override.status_code == 200: + print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} OPTIONS: {Fore.GREEN}HTTP Method Override Possible for DELETE") + elif r_method_override.status_code == 405: + pass + r_method_put = s.get(f"{url}", verify=False, headers=http_method_put) + if r_method_put.status_code == 200: + print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} OPTIONS: {Fore.GREEN}HTTP Method Override Possible for PUT") + elif r_method_put.status_code == 405: + pass + + + else: pass diff --git a/plugins/phpcheck.py b/plugins/phpcheck.py index e3d6d4f..619c355 100644 --- a/plugins/phpcheck.py +++ b/plugins/phpcheck.py @@ -41,10 +41,13 @@ def php_ident(url: str) -> str: if php_header or php_language: print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} Language: {Fore.GREEN}{php_language}") php_info = sessions.get(f"{url}/phpinfo.php", verify=False, headers=header) + php_admin = sessions.get(f"{url}/phpadmin", verify=False, headers=header) if php_info.status_code == 200 and "404" not in php_info.text and "PHP Version" in php_info.text: print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} Found: {Fore.GREEN} {url}/phpinfo.php") elif php_info.status_code == 200: print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} Found: {Fore.GREEN} {url}/phpinfo.php") + elif php_admin.status_code == 200: + print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} Found: {Fore.GREEN} {url}/phpadmin") else: pass diff --git a/plugins/securitytxt.py b/plugins/securitytxt.py new file mode 100644 index 0000000..0d8f3bc --- /dev/null +++ b/plugins/securitytxt.py @@ -0,0 +1,16 @@ +from colorama import Fore +from plugins import agent_list +import requests + +user_a = agent_list.get_useragent() +header = {"User-Agent": user_a} + +def securitytxt(domain: str) -> str: + sec_loc = ["security.txt", ".well-known/security.txt"] + for sec_locs in sec_loc: + s = requests.Session() + r = s.get(f"{domain}/{sec_locs}", verify=False, headers=header) + if r.status_code == 200: + print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} Security.txt: {Fore.GREEN}{domain}/{sec_locs}") + else: + pass \ No newline at end of file diff --git a/plugins/sitemap.py b/plugins/sitemap.py new file mode 100644 index 0000000..181f566 --- /dev/null +++ b/plugins/sitemap.py @@ -0,0 +1,16 @@ +from colorama import Fore +from plugins import agent_list +import requests + +user_a = agent_list.get_useragent() +header = {"User-Agent": user_a} + +def sitemap(domain: str) -> str: + sitemap_loc = ["sitemap.txt", "sitemap.xml", "sitemap-index.xml", "sitemap/sitemap.xml"] + for sitemap_locs in sitemap_loc: + s = requests.Session() + r = s.get(f"{domain}/{sitemap_locs}", verify=False, headers=header) + if r.status_code == 200: + print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} Sitemap: {Fore.GREEN}{domain}/{sitemap_locs}") + else: + pass \ No newline at end of file diff --git a/utils/api_fuzzer.py b/utils/api_fuzzer.py new file mode 100644 index 0000000..3747b24 --- /dev/null +++ b/utils/api_fuzzer.py @@ -0,0 +1,64 @@ +from colorama import Fore +from time import perf_counter +import requests +import threading +import urllib3 +import sys + +urllib3.disable_warnings() + +user_agent = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.77 Safari/537.36" +header = {"User-Agent": user_agent} + +banner = f""" + + █████▒ █ ██ ▒███████▒ ▒███████▒ ▓██ ██▓ +▓██ ▒ ██ ▓██▒ ▒ ▒ ▒ ▄▀░ ▒ ▒ ▒ ▄▀░ ▒██ ██▒ +▒████ ░ ▓██ ▒██░ ░ ▒ ▄▀▒░ ░ ▒ ▄▀▒░ ▒██ ██░ +░▓█▒ ░ ▓▓█ ░██░ ▄▀▒ ░ ▄▀▒ ░ ░ ▐██▓░ +░▒█░ ▒▒█████▓ ▒███████▒ ▒███████▒ ░ ██▒▓░ + ▒ ░ ░▒▓▒ ▒ ▒ ░▒▒ ▓░▒░▒ ░▒▒ ▓░▒░▒ ██▒▒▒ + ░ ░░▒░ ░ ░ ░░▒ ▒ ░ ▒ ░░▒ ▒ ░ ▒ ▓██ ░▒░ + ░ ░ ░░░ ░ ░ ░ ░ ░ ░ ░ ░ ░ ░ ░ ░ ▒ ▒ ░░ + ░ ░ ░ ░ ░ ░ ░ + ░ ░ ░ ░ +{Fore.WHITE}Author: {Fore.CYAN}c0d3ninja +{Fore.WHITE}Version: {Fore.CYAN}v1.0 +""" + +print(f"{Fore.RED}{banner}") + +with open("payloads/api.txt", "r") as f: + api_list = (x.strip() for x in f.readlines()) + +def api_fuzzer(domain: str, api: str) -> None: + try: + s = requests.Session() + url = f"{domain}{api}" + r = s.get(url, headers=header, verify=False) + if r.status_code == 200: + print(f"{Fore.GREEN}[+] {Fore.WHITE} - {Fore.MAGENTA}{url}") + else: + print(f"{Fore.RED}[-] {Fore.WHITE} - {Fore.MAGENTA}{url}") + except requests.exceptions.RequestException: + pass + +def main(domain: str) -> None: + threads = [] + for api in api_list: + t = threading.Thread(target=api_fuzzer, args=(domain, api)) + t.start() + threads.append(t) + for thread in threads: + thread.join() + +if __name__ == "__main__": + time_before = perf_counter() + try: + main(sys.argv[1]) + except (urllib3.exceptions.MaxRetryError, requests.exceptions.RequestException): + print(f"{Fore.YELLOW}[!] {Fore.WHITE} - Exception occurred during scanning.") + print(f"{Fore.MAGENTA}Time: {Fore.WHITE}{perf_counter() - time_before}") + + + diff --git a/utils/cmsscanner.py b/utils/cmsscanner.py index e8a3050..7c928d4 100644 --- a/utils/cmsscanner.py +++ b/utils/cmsscanner.py @@ -209,6 +209,7 @@ def Shopify(url: str) -> str: meta_tag.append("Shopify") if shopify_name or js_files or found_endpoints or meta_tags: CMS.append("Shopify") + vuln_scan.shopify_vuln_scan(url) def main(url: str) -> str: diff --git a/utils/crawler.py b/utils/crawler.py index 1122bd9..8619665 100644 --- a/utils/crawler.py +++ b/utils/crawler.py @@ -20,7 +20,7 @@ def scan(url: str) -> str: for link in links_l: try: with open("output/spider.txt", "w") as f: - f.writelines(link) + f.write(link) except PermissionError: pass \ No newline at end of file diff --git a/utils/loginscanner.py b/utils/loginscanner.py index 04ce60d..391af8f 100644 --- a/utils/loginscanner.py +++ b/utils/loginscanner.py @@ -1,7 +1,6 @@ from utils import logins from plugins import agent_list from colorama import Fore -import httpx import asyncio diff --git a/utils/portscanner.py b/utils/portscanner.py index 61df232..16c0daf 100644 --- a/utils/portscanner.py +++ b/utils/portscanner.py @@ -1,39 +1,34 @@ from colorama import Fore -from modules import urltoip -import socket import threading +import socket +from modules import urltoip +import ipaddress -open_ports = [] -closed_ports = [] -start_port = 1 -end_port = 65000 - +ports = [80, 8080, 443, 8443] -def scan_port(port): +def portscanner(domain: str): + ip = urltoip.get_ip(domain) + open_ports = [] try: - sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.settimeout(1) - result = sock.connect_ex((ip_address, port)) - - if result == 0: - open_ports.append(f"{port}") - print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} PORTS: {Fore.GREEN}{', '.join(map(str,open_ports))}") - else: - pass - - sock.close() - except: + for port in ports: + sck = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + data = sck.connect_ex((ip, port)) + if data == 0: + open_ports.append(f"{port}") + sck.close() + else: + pass + print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} PORTS: {Fore.GREEN}{', '.join(map(str,open_ports))}") + except socket.error: + print (Fore.RED + "Could not connect to host") + pass + except KeyboardInterrupt: + print ("You pressed CTRL+C") + except ipaddress.AddressValueError: + print ("IP address not allowed") + except TypeError: pass -def main(domain: str): - global ip_address - ip_address = urltoip.get_ip(domain) - threads = [] - for port in range(start_port, end_port + 1): - thread = threading.Thread(target=scan_port, args=(port,)) - threads.append(thread) - thread.start() - - # wait for all threads to complete - for thread in threads: - thread.join() +if __name__=="__main__": + t1 = threading.Thread(target=portscanner, args=(ports,)) + t1.start() \ No newline at end of file diff --git a/vuln_db/nuclei_vulns.py b/vuln_db/nuclei_vulns.py index 8026369..4303967 100644 --- a/vuln_db/nuclei_vulns.py +++ b/vuln_db/nuclei_vulns.py @@ -4,26 +4,23 @@ def nuclei_cve_scan(domain: str) -> str: - sub_output.subpro_scan(f"nuclei -u {domain} -t cves/ -severity medium,high,critical -silent -c 100 -j -o vulnerable.json") - sub_output.subpro_scan(f"nuclei -u {domain} -t vulnerabilities/ -severity medium,high,critical -silent -c 100 -j -o vulnerable.json") + sub_output.subpro_scan(f"nuclei -u {domain} -t http/cves/ -severity medium,high,critical -silent -c 100 -j -o vulnerable.json") + sub_output.subpro_scan(f"nuclei -u {domain} -t http/vulnerabilities/ -severity medium,high,critical -silent -c 100 -j -o vulnerable.json") nuclei.parse() def nuclei_ultimate_scan(domain: str) -> str: print(f"{Fore.MAGENTA}[+] {Fore.CYAN}- {Fore.WHITE}Task{Fore.CYAN}:{Fore.LIGHTBLUE_EX} CVE{Fore.WHITE} Status: {Fore.GREEN}Running...") - sub_output.subpro_scan(f"nuclei -u {domain} -tags cve -severity medium,critical,high -silent -c 100 -j -o vulnerable.json") - sub_output.subpro_scan(f"nuclei -u {domain} -tags cve -silent -c 100 -j -o vulnerable.json") + sub_output.subpro_scan(f"nuclei -u {domain} -t http/cves/ -severity medium,critical,high -silent -c 100 -j -o vulnerable.json") nuclei.parse() print(f"{Fore.MAGENTA}[+] {Fore.CYAN}- {Fore.WHITE}Task{Fore.CYAN}:{Fore.LIGHTBLUE_EX} CVE{Fore.WHITE} Status: {Fore.GREEN}DONE!\n") print(f"{Fore.MAGENTA}[+] {Fore.CYAN}- {Fore.WHITE}Task{Fore.CYAN}:{Fore.LIGHTBLUE_EX} Vulnerabilities{Fore.WHITE} Status: {Fore.GREEN}Running...") - sub_output.subpro_scan(f"nuclei -u {domain} -t vulnerabilities/ -severity medium,critical,high -silent -c 100 -j -o vulnerable.json") - sub_output.subpro_scan(f"nuclei -u {domain} -t vulnerabilities/ -silent -c 100 -j -o vulnerable.json") + sub_output.subpro_scan(f"nuclei -u {domain} -t http/vulnerabilities/ -severity medium,critical,high -silent -c 100 -j -o vulnerable.json") nuclei.parse() print(f"{Fore.MAGENTA}[+] {Fore.CYAN}- {Fore.WHITE}Task{Fore.CYAN}:{Fore.LIGHTBLUE_EX} Vulnerabilities{Fore.WHITE} Status: {Fore.GREEN}DONE!\n") print(f"{Fore.MAGENTA}[+] {Fore.CYAN}- {Fore.WHITE}Task{Fore.CYAN}:{Fore.LIGHTBLUE_EX} Misconfigurations{Fore.WHITE} Status: {Fore.GREEN}Running...") - sub_output.subpro_scan(f"nuclei -u {domain} -t misconfiguration/ -severity medium,critical,high -silent -c 100 -j -o vulnerable.json") - sub_output.subpro_scan(f"nuclei -u {domain} -t misconfiguration/ -silent -c 100 -j -o vulnerable.json") + sub_output.subpro_scan(f"nuclei -u {domain} -t http/misconfiguration/ -severity medium,critical,high -silent -c 100 -j -o vulnerable.json") nuclei.parse() print(f"{Fore.MAGENTA}[+] {Fore.CYAN}- {Fore.WHITE}Task{Fore.CYAN}:{Fore.LIGHTBLUE_EX} Misconfiguration{Fore.WHITE} Status: {Fore.GREEN}DONE!\n") print(f"{Fore.MAGENTA}[+] {Fore.CYAN}- {Fore.WHITE}Status: {Fore.GREEN}All Tasks done!!") \ No newline at end of file diff --git a/vuln_db/xss.py b/vuln_db/xss.py deleted file mode 100644 index 8c0e935..0000000 --- a/vuln_db/xss.py +++ /dev/null @@ -1,57 +0,0 @@ -from bs4 import BeautifulSoup -from colorama import Fore -import requests -import urllib.parse -import re - -payloads = [ - "", - "", - "", - "javascript:alert('XSS')", - "data:text/html;base64,PHNjcmlwdD5hbGVydCgnWFNTJyk8L3NjcmlwdD4=" -] - -def xss_scan(domain: str) -> str: - # send a GET request to the URL - response = requests.get(domain) - - # parse the HTML content using BeautifulSoup - soup = BeautifulSoup(response.content, "html.parser") - - # find all input fields in the form tag with user input options - input_fields = soup.find_all("input", {"type": ["text", "password", "email", "number", "search", "user"]}) - - # iterate through the input fields and send an XSS payload to each field - parsed_payloads = [] - for field in input_fields: - # create a sample XSS payloads - - # set the value of the current input field to the XSS payload - for payload in payloads: - encoded_payload = urllib.parse.quote_plus(payload) - field["value"] = payload - - # submit the form data using a POST request - form_data = {} - for form_field in soup.find_all("input"): - form_data[form_field.get("name")] = form_field.get("value") - response = requests.post(domain, data=form_data) - - # check the response for signs of successful XSS exploitation - if payload in response.text: - print(f'{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} XSS FOUND: {Fore.MAGENTA}{field.get("name")}') - - #if the XSS exploitation was not successful, URL encode the payload and try again - field["value"] = encoded_payload - form_data[field.get("name")] = encoded_payload - response = requests.post(domain, data=form_data) - - if encoded_payload in response.text: - print(f'{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} XSS FOUND: {Fore.MAGENTA}{field.get("name")}') - - scripts = soup.find_all("script") - # Iterate over all script tags - for script in scripts: - if re.search(r"(location|document|window)\.(hash|search|referrer|pathname|name|title|cookie|getElementById|getElementsByClassName|getElementsByTagName|write|writeln|innerHTML|outerHTML|setAttribute|getAttribute)\(", str(script)): - print(f'{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} Potential DOM XSS: {Fore.MAGENTA}{str(script)} {Fore.RESET}') \ No newline at end of file