From 94ab79eca20b8eefa6532b644c57ab73b91718d2 Mon Sep 17 00:00:00 2001 From: Miguel Sanchez Date: Tue, 13 Dec 2022 19:11:31 -0500 Subject: [PATCH] updates --- utils/crawler.py | 7 +++++-- utils/loginscanner.py | 5 +++-- utils/osdetect.py | 2 ++ 3 files changed, 10 insertions(+), 4 deletions(-) diff --git a/utils/crawler.py b/utils/crawler.py index 8b38b7e..1122bd9 100644 --- a/utils/crawler.py +++ b/utils/crawler.py @@ -18,6 +18,9 @@ def scan(url: str) -> str: if page_links not in duplicate_links: links_l.append(page_links) for link in links_l: - with open("output/spider.txt", "a") as f: - f.writelines(link) + try: + with open("output/spider.txt", "w") as f: + f.writelines(link) + except PermissionError: + pass \ No newline at end of file diff --git a/utils/loginscanner.py b/utils/loginscanner.py index f0b39d9..04ce60d 100644 --- a/utils/loginscanner.py +++ b/utils/loginscanner.py @@ -16,8 +16,9 @@ async def get_responses(client, link_paths: str): found_adminlinks = [] r = await client.get(link_paths) if r.status_code == 200 and "404" not in r.text and "Page Not Found" not in r.text: - found_adminlinks.append(link_paths) - print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} Login: {Fore.GREEN} {', '.join(map(str,found_adminlinks))}") + found_adminlinks.append(link_paths + "\n") + with open("output/loginpages.txt") as f: + f.writelines(found_adminlinks) except RuntimeError: pass except ValueError: diff --git a/utils/osdetect.py b/utils/osdetect.py index 71e2839..9112c7d 100644 --- a/utils/osdetect.py +++ b/utils/osdetect.py @@ -35,4 +35,6 @@ def osdetection_scan(url: str): print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} OS: {Fore.RED} Not Detected!") print(f"{Fore.MAGENTA}[+] {Fore.CYAN}-{Fore.WHITE} OS: {Fore.GREEN} {os}") except scapy.error.Scapy_Exception: + pass + except PermissionError: pass \ No newline at end of file