Skip to content

Commit

Permalink
New updates
Browse files Browse the repository at this point in the history
  • Loading branch information
Miguel Sanchez committed Feb 26, 2023
1 parent 9c47d72 commit 2304a8a
Show file tree
Hide file tree
Showing 8 changed files with 210 additions and 44 deletions.
20 changes: 15 additions & 5 deletions gsec.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,14 @@
from colorama import Fore
from modules import fetch_requests, scan, urltoip
from modules import fetch_requests, scan, urltoip, sub_output
from utils import portscanner, loginscanner, techscanner, cmsscanner, passive_recon, crawler
from plugins import phpcheck, optionscheck, shellshock, robots, favicon, auth_tokens, cookies_check
from vuln_db import hostheader_injection, nuclei_vulns, corsmisconfig, crossdomain, head_vuln, cache_poisoning, path_traversal, webservers_vulns
from exploits import f5bigip_scanner
from vuln_db import hostheader_injection, nuclei_vulns, corsmisconfig, crossdomain, head_vuln, cache_poisoning, path_traversal, webservers_vulns, xss
import argparse
import os
import asyncio


##################################################################################
# Good Security Scanner
##################################################################################
Expand All @@ -22,7 +24,7 @@
| .___________________. |==| {Fore.YELLOW}Web Security Scanner{Fore.RESET}
| | ................. | | |
| | :::GSec Running!::| | | {Fore.YELLOW}Author: {Fore.MAGENTA}c0d3ninja{Fore.RESET}
| | ::::::::::::::::: | | | {Fore.YELLOW}Version: {Fore.MAGENTA}beta-v0.27{Fore.RESET}
| | ::::::::::::::::: | | | {Fore.YELLOW}Version: {Fore.MAGENTA}beta-v1.0{Fore.RESET}
| | :1337 bugs found!:| | | {Fore.YELLOW}Instagram: {Fore.MAGENTA}gotr00t0day{Fore.RESET}
| | ::::::::::::::::: | | |
| | ::::::::::::::::: | | |
Expand Down Expand Up @@ -92,7 +94,7 @@ async def main():
if "http://" in args.target:
print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} PROTOCOL: {Fore.GREEN}http")
optionscheck.Get_Options(args.target)
portscanner.portscanner(args.target)
portscanner.main(args.target)
fetch_requests.get_headers(args.target)
scan.commands(f"python3 {os.path.abspath(os.getcwd())}/utils/securityheaders.py --target {args.target} --headers X-XSS-Protection")
scan.commands(f"python3 {os.path.abspath(os.getcwd())}/utils/securityheaders.py --target {args.target} --headers Content-Security-Policy")
Expand All @@ -114,11 +116,19 @@ async def main():
head_vuln.head_auth_bypass(args.target)
cache_poisoning.cache_dos_scan(args.target)
webservers_vulns.Servers_scan(args.target)
xss.xss_scan(args.target)
sub_output.subpro_scan(f"python3 {os.path.abspath(os.getcwd())}/vuln_db/ssrf.py {args.target}")
path_traversal.path_traversal_scan(args.target)
f5bigip_scanner.scan_vuln(args.target)
crawler.scan(args.target)
await loginscanner.main(args.target)
print("\n")
print(f"\t\t {Fore.MAGENTA} SCAN FINISHED{Fore.LIGHTMAGENTA_EX}!{Fore.MAGENTA}!{Fore.YELLOW}!{Fore.RESET}")

if __name__ == "__main__":
asyncio.run(main())
try:
asyncio.run(main())
except ConnectionError:
pass
except ConnectionRefusedError:
pass
24 changes: 22 additions & 2 deletions modules/fetch_requests.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
from bs4 import BeautifulSoup
from colorama import Fore
from plugins import agent_list
from utils import webserver_scanner
import requests
import urllib3
import sys
import os
import ssl
import re

urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)

Expand Down Expand Up @@ -43,6 +45,8 @@ def do_requests(url: str) -> str:
sys.exit()
except AttributeError:
pass
except ssl.SSLCertVerificationError:
pass

def get_headers(url: str) -> str:
sessions = requests.Session()
Expand All @@ -64,6 +68,20 @@ def get_headers(url: str) -> str:
print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} SERVER: {Fore.GREEN}{', '.join(map(str,server_output))}")
else:
pass
if "Apache" in server_output:
apache_version = webserver_scanner.apache_version()
webpage_server = re.search(r'([\d.]+)', server_output).group(1)
if webpage_server < apache_version:
print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} Apache {webpage_server}: {Fore.GREEN} Is outdated, current version is {apache_version}")

if "nginx" in server_output:
try:
nginx_version = webserver_scanner.nginx_version()
webpage_server = re.search(r'([\d.]+)', server_output).group(1)
if webpage_server < nginx_version:
print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} nginx {webpage_server}: {Fore.GREEN} Is outdated, current version is {nginx_version}")
except TypeError:
pass
if via_output:
print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} VIA: {Fore.GREEN}{', '.join(map(str,via_output))}")
else:
Expand All @@ -78,4 +96,6 @@ def get_headers(url: str) -> str:
sys.exit()
except requests.exceptions.MissingSchema:
print("Invalid URL, please use http:// or https://")
sys.exit()
sys.exit()
except ssl.SSLCertVerificationError:
pass
22 changes: 13 additions & 9 deletions plugins/phpcheck.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
from colorama import Fore
from plugins import agent_list
import requests
import ssl

user_agent_ = agent_list.get_useragent()
header = {"User-Agent": user_agent_}
Expand All @@ -10,15 +11,18 @@ def php_ident(url: str) -> str:
php_index = []
php_header = []
php_language = []
sessions = requests.Session()
res = sessions.get(url, verify=False, headers=header)
for value, key in res.headers.items():
if "X-Powered-By" in value and "PHP" in key:
php_header.append(f"PHP")
indexphp = sessions.get(f"{url}/index.php", verify=False, headers=header)
if indexphp.status_code == 200 and "404" not in indexphp.text:
php_index.append("index.php")
if indexphp.status_code == 429:
try:
sessions = requests.Session()
res = sessions.get(url, verify=False, headers=header)
for value, key in res.headers.items():
if "X-Powered-By" in value and "PHP" in key:
php_header.append(f"PHP")
indexphp = sessions.get(f"{url}/index.php", verify=False, headers=header)
if indexphp.status_code == 200 and "404" not in indexphp.text:
php_index.append("index.php")
if indexphp.status_code == 429:
pass
except ssl.SSLCertVerificationError:
pass
try:
info = builtwith(f"{url}")
Expand Down
4 changes: 3 additions & 1 deletion plugins/robots.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,4 +18,6 @@ def robots_scan(domain: str) -> str:
f.writelines(data.read())
print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} Robots: {Fore.MAGENTA}Content of robots.txt saved to /output")
except urllib.error.HTTPError:
pass
pass
except urllib.error.URLError:
pass
63 changes: 36 additions & 27 deletions utils/portscanner.py
Original file line number Diff line number Diff line change
@@ -1,34 +1,43 @@
from colorama import Fore
import threading
import socket
from modules import urltoip
import ipaddress
import socket
import threading

ports = [80, 8080, 443]
open_ports = []
closed_ports = []
start_port = 1
end_port = 65000

def portscanner(domain: str):
ip = urltoip.get_ip(domain)
open_ports = []
# specify the range of ports to scan

# define a function to scan a single port and print the service banner
def scan_port(port):
try:
for port in ports:
sck = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
data = sck.connect_ex((ip, port))
if data == 0:
open_ports.append(f"{port}")
sck.close()
else:
pass
print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} PORTS: {Fore.GREEN}{', '.join(map(str,open_ports))}")
except socket.error:
print (Fore.RED + "Could not connect to host")
pass
except KeyboardInterrupt:
print ("You pressed CTRL+C")
except ipaddress.AddressValueError:
print ("IP address not allowed")
except TypeError:
# create a TCP socket and attempt to connect to the specified port
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
result = sock.connect_ex((ip_address, port))

# if the connection is successful, attempt to receive the service banner
if result == 0:
open_ports.append(f"{port}")
print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} PORTS: {Fore.GREEN}{', '.join(map(str,open_ports))}")

# close the socket
sock.close()
except:
pass

if __name__=="__main__":
t1 = threading.Thread(target=portscanner, args=(ports,))
t1.start()
# create a list of threads to scan each port in the range
def main(domain: str):
global ip_address
ip_address = urltoip.get_ip(domain)
threads = []
for port in range(start_port, end_port + 1):
thread = threading.Thread(target=scan_port, args=(port,))
threads.append(thread)
thread.start()

# wait for all threads to complete
for thread in threads:
thread.join()
21 changes: 21 additions & 0 deletions utils/webserver_scanner.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
import requests
import re
from bs4 import BeautifulSoup

def apache_version():
url = 'https://httpd.apache.org/download.cgi'

response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
version = soup.find("h1", attrs={'id':'apache24'}).get_text()
version_number = re.search(r'([\d.]+)', version).group(1)
return version_number

def nginx_version():
url = 'https://nginx.org/en/download.html'

response = requests.get(url)
soup = BeautifulSoup(response.text, 'html.parser')
version = soup.find('a', attrs={'href':'/download/nginx-1.22.1.tar.gz'}).get_text()
version_number = re.search(r'([\d.]+)', version).group(1)
return version_number
43 changes: 43 additions & 0 deletions vuln_db/ssrf.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
from colorama import Fore
import requests
import re
import threading
import sys

# specify the URL to test
url = sys.argv[1]

# create a list of domains to test for SSRF
domains = ["localhost", "127.0.0.1", "0.0.0.0", "localhost.localdomain", "localhost6.localdomain6", "0:0:0:0:0:0:0:1"]

# define a function to check a single parameter for SSRF
def check_parameter(parameter):
for domain in domains:
try:
# send a GET request with the current domain in the parameter
response = requests.get(url.replace(parameter, domain))

# check the response for signs of successful SSRF exploitation
if re.search(r"connection refused|network is unreachable", response.text, re.IGNORECASE):
print(f'{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} SSRF Found in: {Fore.MAGENTA}{parameter}')
break
except:
# handle any exceptions that occur during the request
pass

# send a GET request to the URL and extract all parameters
response = requests.get(url)
parameters = re.findall(r"\?(\w+)=", response.text)

# create a list of threads to check each parameter for SSRF
if __name__ == "__main__":
threads = []
for parameter in parameters:
thread = threading.Thread(target=check_parameter, args=(parameter,))
threads.append(thread)
thread.start()

# wait for all threads to complete
for thread in threads:
thread.join()

57 changes: 57 additions & 0 deletions vuln_db/xss.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
from bs4 import BeautifulSoup
from colorama import Fore
import requests
import urllib.parse
import re

payloads = [
"<script>alert('XSS')</script>",
"<img src=x onerror=alert('XSS')>",
"<svg onload=alert('XSS')>",
"javascript:alert('XSS')",
"data:text/html;base64,PHNjcmlwdD5hbGVydCgnWFNTJyk8L3NjcmlwdD4="
]

def xss_scan(domain: str) -> str:
# send a GET request to the URL
response = requests.get(domain)

# parse the HTML content using BeautifulSoup
soup = BeautifulSoup(response.content, "html.parser")

# find all input fields in the form tag with user input options
input_fields = soup.find_all("input", {"type": ["text", "password", "email", "number", "search", "user"]})

# iterate through the input fields and send an XSS payload to each field
parsed_payloads = []
for field in input_fields:
# create a sample XSS payloads

# set the value of the current input field to the XSS payload
for payload in payloads:
encoded_payload = urllib.parse.quote_plus(payload)
field["value"] = payload

# submit the form data using a POST request
form_data = {}
for form_field in soup.find_all("input"):
form_data[form_field.get("name")] = form_field.get("value")
response = requests.post(domain, data=form_data)

# check the response for signs of successful XSS exploitation
if payload in response.text:
print(f'{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} XSS FOUND: {Fore.MAGENTA}{field.get("name")}')

#if the XSS exploitation was not successful, URL encode the payload and try again
field["value"] = encoded_payload
form_data[field.get("name")] = encoded_payload
response = requests.post(domain, data=form_data)

if encoded_payload in response.text:
print(f'{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} XSS FOUND: {Fore.MAGENTA}{field.get("name")}')

scripts = soup.find_all("script")
# Iterate over all script tags
for script in scripts:
if re.search(r"(location|document|window)\.(hash|search|referrer|pathname|name|title|cookie|getElementById|getElementsByClassName|getElementsByTagName|write|writeln|innerHTML|outerHTML|setAttribute|getAttribute)\(", str(script)):
print(f'{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} Potential DOM XSS: {Fore.MAGENTA}{str(script)}')

0 comments on commit 2304a8a

Please sign in to comment.