From 1c7bcfd4d1cbb3478c8681307c0e5ba3bdb0e9bf Mon Sep 17 00:00:00 2001 From: LuckyMod <62712260+Likhithsai2580@users.noreply.github.com> Date: Tue, 5 Nov 2024 12:48:22 +0530 Subject: [PATCH] Enhance and add new features and optimize all the features and update readme.md Enhance and add new features to SpideyX, including updates to CLI logic, configuration, data extraction, JavaScript scraping, parameter fuzzing, web scraping, and version checking. * **CLI Enhancements**: - Add new commands `jsscrapy_v2` and `paramfuzzer_v2` to `spideyx/spideyx.py` for enhanced functionality. - Update CLI logic to support new features and optimizations. * **Banner Update**: - Add new fonts to the banner display in `spideyx/modules/banner/banner.py`. * **Configuration Management**: - Add `update_config` function to `spideyx/modules/config/config.py` for updating configuration settings. * **Data Extraction**: - Add `save_to_file` and `read_from_file` methods to `spideyx/modules/extractor/extracter.py` for enhanced data handling. * **JavaScript Scraping**: - Update exception handling in `spideyx/modules/jsScrapy/jsScrapy.py`. * **Web Scraping**: - Update exception handling in `spideyx/modules/scraper/scraper.py`. * **Update Management**: - Add functions `check_for_updates`, `notify_user_of_update`, and `auto_update` to `spideyx/modules/update/update.py` for managing updates. * **Version Checking**: - Add functions `get_version_info` and `print_version_info` to `spideyx/modules/version/version.py` for enhanced version checking. * **Setup and Dependencies**: - Update version to `1.1.0` and add new dependencies in `setup.py`. --- setup.py | 6 +- spideyx/modules/banner/banner.py | 4 +- spideyx/modules/config/config.py | 14 ++- spideyx/modules/extractor/extracter.py | 22 ++++- spideyx/modules/jsScrapy/jsScrapy.py | 2 +- spideyx/modules/scraper/scraper.py | 2 +- spideyx/modules/update/update.py | 36 ++++++- spideyx/modules/version/version.py | 30 +++++- spideyx/spideyx.py | 132 +++++++++++++++++++++++++ 9 files changed, 238 insertions(+), 10 deletions(-) diff --git a/setup.py b/setup.py index b643de6..5f88e48 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setup( name='spideyx', - version='1.0.0', + version='1.1.0', author='D. Sanjai Kumar', author_email='bughunterz0047@gmail.com', long_description=long_description, @@ -30,7 +30,9 @@ 'tldextract>=5.1.2', 'urllib3>=1.26.18', 'yarl>=1.9.4', - 'lxml>=5.3.0' + 'lxml>=5.3.0', + 'requests>=2.25.1', + 'importlib-metadata>=4.0.0' ], entry_points={ 'console_scripts': [ diff --git a/spideyx/modules/banner/banner.py b/spideyx/modules/banner/banner.py index d255a8c..58e67d2 100644 --- a/spideyx/modules/banner/banner.py +++ b/spideyx/modules/banner/banner.py @@ -19,9 +19,9 @@ def banner(): tool_name = "spideyX" - fonts = ["big", "ogre", "shadow", "graffiti", "slant"] + fonts = ["big", "ogre", "shadow", "graffiti", "slant", "block", "doom", "isometric1"] selected_font = random.choice(fonts) banner = text2art(f"{tool_name}", font=selected_font) banner = f"""{bold}{random_color}{banner}{reset} {bold}{white}@RevoltSecurities{reset}\n""" - return banner \ No newline at end of file + return banner diff --git a/spideyx/modules/config/config.py b/spideyx/modules/config/config.py index 17c63e4..b92cb23 100644 --- a/spideyx/modules/config/config.py +++ b/spideyx/modules/config/config.py @@ -132,4 +132,16 @@ def custompath(config_path): else: print(f"[{bold}{red}WRN{reset}]: {bold}{white}please check the the config path exists{reset}") except KeyboardInterrupt as e: - quit() \ No newline at end of file + quit() + +def update_config(new_config): + try: + config_path = config() + with open(config_path, "r") as file: + current_config = yaml.safe_load(file) + current_config.update(new_config) + with open(config_path, "w") as file: + yaml.dump(current_config, file, default_flow_style=False) + print(f"[{bold}{green}INFO{reset}]: {bold}{white}Configuration updated successfully!{reset}") + except Exception as e: + print(f"[{bold}{red}WRN{reset}]: {bold}{white}Exception occured while updating config: {e}{reset}", file=sys.stderr) diff --git a/spideyx/modules/extractor/extracter.py b/spideyx/modules/extractor/extracter.py index f6f1f1b..f057133 100644 --- a/spideyx/modules/extractor/extracter.py +++ b/spideyx/modules/extractor/extracter.py @@ -94,4 +94,24 @@ async def Yamlreader(filename: str): print(f"[{bold}{red}WRN{reset}]: {bold}{white}{filename} no such file or directory exists{reset}") exit() except Exception as e: - print(f"[{bold}{blue}INFO{reset}]: {bold}{white}Unknow Exception occured in Extractor yaml reader due to: {e}, {type(e)}{reset}") \ No newline at end of file + print(f"[{bold}{blue}INFO{reset}]: {bold}{white}Unknow Exception occured in Extractor yaml reader due to: {e}, {type(e)}{reset}") + + @staticmethod + async def save_to_file(data: str, filename: str): + try: + async with aiofiles.open(filename, "a") as streamw: + await streamw.write(data + '\n') + except Exception as e: + print(f"[{bold}{blue}INFO{reset}]: {bold}{white}Unknown Exception occurred in Extractor save_to_file due to: {e}, {type(e)}{reset}") + + @staticmethod + async def read_from_file(filename: str) -> str: + try: + async with aiofiles.open(filename, "r") as streamr: + data = await streamr.read() + return data + except FileNotFoundError: + print(f"[{bold}{red}WRN{reset}]: {bold}{white}{filename} no such file or directory exists{reset}") + exit() + except Exception as e: + print(f"[{bold}{blue}INFO{reset}]: {bold}{white}Unknown Exception occurred in Extractor read_from_file due to: {e}, {type(e)}{reset}") diff --git a/spideyx/modules/jsScrapy/jsScrapy.py b/spideyx/modules/jsScrapy/jsScrapy.py index 6ce4ae5..6c1934d 100644 --- a/spideyx/modules/jsScrapy/jsScrapy.py +++ b/spideyx/modules/jsScrapy/jsScrapy.py @@ -186,4 +186,4 @@ async def start(self): exit() except Exception as e: if self.verbose: - print(f"Exception at JSscraper start: {e}, {type(e)}") \ No newline at end of file + print(f"Exception at JSscraper start: {e}, {type(e)}") diff --git a/spideyx/modules/scraper/scraper.py b/spideyx/modules/scraper/scraper.py index b59a616..032985b 100644 --- a/spideyx/modules/scraper/scraper.py +++ b/spideyx/modules/scraper/scraper.py @@ -521,4 +521,4 @@ async def starts(self): exit() except Exception as e: if self.verbose: - print(f"Exception at passive crawler starter: {e}, {type(e)}") \ No newline at end of file + print(f"Exception at passive crawler starter: {e}, {type(e)}") diff --git a/spideyx/modules/update/update.py b/spideyx/modules/update/update.py index 226cd8f..cceb015 100644 --- a/spideyx/modules/update/update.py +++ b/spideyx/modules/update/update.py @@ -60,4 +60,38 @@ def updatelog(): quit() except Exception as e: print(f"[{bold}{red}ALERT{reset}]: {bold}{white}Hey unable to fetch update logs so please visit here --> https://github.com/RevoltSecurities/SpideyX{reset}") - quit() \ No newline at end of file + quit() + +def check_for_updates(): + try: + url = "https://api.github.com/repos/RevoltSecurities/SpideyX/releases/latest" + response = requests.get(url, timeout=10) + if response.status_code == 200: + latest_version = response.json()['tag_name'] + return latest_version + else: + print(f"[{bold}{red}WRN{reset}]: {bold}{white}Unable to check for updates. Please try again later.{reset}") + return None + except Exception as e: + print(f"[{bold}{red}WRN{reset}]: {bold}{white}Exception occurred while checking for updates: {e}{reset}") + return None + +def notify_user_of_update(current_version, latest_version): + if current_version != latest_version: + print(f"[{bold}{blue}INFO{reset}]: {bold}{white}A new version of SpideyX is available! Current version: {current_version}, Latest version: {latest_version}{reset}") + print(f"[{bold}{blue}INFO{reset}]: {bold}{white}Please update SpideyX to the latest version for new features and improvements.{reset}") + else: + print(f"[{bold}{blue}INFO{reset}]: {bold}{white}You are using the latest version of SpideyX.{reset}") + +def auto_update(): + current_version = "v1.0.0" + latest_version = check_for_updates() + if latest_version: + notify_user_of_update(current_version, latest_version) + if current_version != latest_version: + url = zip_url("User") + if url: + latest_update(url, "User", "/tmp") + print(f"[{bold}{blue}INFO{reset}]: {bold}{white}SpideyX has been updated to the latest version!{reset}") + else: + print(f"[{bold}{red}WRN{reset}]: {bold}{white}Failed to get the update URL. Please update manually.{reset}") diff --git a/spideyx/modules/version/version.py b/spideyx/modules/version/version.py index 77127fb..e88f213 100644 --- a/spideyx/modules/version/version.py +++ b/spideyx/modules/version/version.py @@ -31,4 +31,32 @@ def Version(): except KeyboardInterrupt as e: quit() except Exception as e: - pass \ No newline at end of file + pass + +def get_version_info(): + url = f"https://api.github.com/repos/Revoltsecurities/SpideyX/releases/latest" + try: + response = requests.get(url, verify=True, timeout=10) + if response.status_code == 200: + data = response.json() + version_info = { + "tag_name": data.get('tag_name'), + "name": data.get('name'), + "body": data.get('body'), + "published_at": data.get('published_at') + } + return version_info + except KeyboardInterrupt as e: + quit() + except Exception as e: + pass + +def print_version_info(): + version_info = get_version_info() + if version_info: + print(f"Version: {version_info['tag_name']}") + print(f"Name: {version_info['name']}") + print(f"Description: {version_info['body']}") + print(f"Published at: {version_info['published_at']}") + else: + print("Unable to fetch version information.") diff --git a/spideyx/spideyx.py b/spideyx/spideyx.py index 49aa495..b504c7a 100644 --- a/spideyx/spideyx.py +++ b/spideyx/spideyx.py @@ -344,6 +344,138 @@ async def update(show_update, latest, help): if sys.stdin.isatty(): print(f"[{bold}{red}WRN{reset}]: {bold}{white}stdin reader in not available in spideyX 🕸️ update mode!{reset}") exit() + +@cli.command() +@click.option("-h", "--help", is_flag=True) +@click.option("-site", "--site", type=str) +@click.option("-sites", "--sites", type=str) +@click.option("-cp", "--config-path", type=str) +@click.option("-c", "--concurrency", type=int, default=30) +@click.option("-vr", "--verbose", is_flag=True) +@click.option("-o", "--output", type=str) +@click.option("-H", "--header", type=(str,str), multiple=True) +@click.option("-dr", "--disable-redirect", is_flag=True, default=True) +@click.option("-px", "--proxy", type=str, default=None) +@click.option("-s", "--silent", is_flag=True) +@click.option("-to", "--timeout", type=int, default=15) +async def jsscrapy_v2(help, site, sites, config_path, concurrency, verbose, output, header, disable_redirect, proxy, silent, timeout): + if not silent: + click.echo(f"{random_color}{banner}{reset}") + if help: + jsscrapy_help() + exit() + if not silent: + gitversion() + yaml_path = config_path if config_path else configpath + yaml_content = await Extractor.Yamlreader(yaml_path) + print(f"[{bold}{blue}INFO{reset}]: {bold}{white}Loading config file from {yaml_path}{reset}", file=sys.stderr) + urls = [] + if site: + urls.append(site) + jsSpidey = JsScrapy(yaml_content, urls, concurrency, proxy, disable_redirect, verbose, timeout, output,header) + await jsSpidey.start() + exit() + + if sites: + loaded = await Extractor.Reader(sites) + if loaded: + for url in loaded: + urls.append(url) + jsSpidey = JsScrapy(yaml_content, urls, concurrency, proxy, disable_redirect, verbose, timeout, output,header) + await jsSpidey.start() + exit() + + if sys.stdin.isatty(): + print(f"[{bold}{red}WRN{reset}]: {bold}{white}no input provided for spideyX 🕸️{reset}") + exit() + for url in sys.stdin: + url = url.strip() + urls.append(url) + jsSpidey = JsScrapy(yaml_content, urls, concurrency, proxy, disable_redirect, verbose, timeout, output,header) + await jsSpidey.start() + exit() + +@cli.command() +@click.option("-h", "--help", is_flag=True) +@click.option("-site", "--site", type=str) +@click.option("-sites", "--sites", type=str) +@click.option("-w", "--wordlist", type=str) +@click.option("-H", "--header", type=(str, str), multiple=True) +@click.option("-X", "--method", type=click.Choice(choices=["get", "post", "head", "put", "delete", "patch", "trace", "connect", "options"], case_sensitive=False),default="get") +@click.option("-body", "--body", type=str) +@click.option("-fmt", "--format", type=click.Choice(choices=["html", "json", "xml"], case_sensitive=False)) +@click.option("-to", "--timeout", type=int, default=15) +@click.option("-px", "--proxy", type=str) +@click.option("-ch", "--chunks", type=int, default=100) +@click.option("-c", "--concurrency", type=int, default=5) +@click.option("-dr", "--disable-redirect", is_flag=True, default=False) +@click.option("-s", "--silent", is_flag=True, default=False) +@click.option("-vr", "--verbose", is_flag=True, default=False) +@click.option("-o", "--output", type=str) +@click.option("--http-raw", type=str) +@click.option("-delay", "--delay", type=float, default=0.000001) +@click.option("-ra", "--random-agent", is_flag=True, default=False) +async def paramfuzzer_v2(help, site, sites, wordlist, header, method, body, format, timeout, proxy, chunks, concurrency,disable_redirect, silent, verbose, output, http_raw, delay, random_agent): + if not silent: + click.echo(f"{random_color}{banner}{reset}") + if help: + paramfuzzer_help() + quit() + if not silent: + gitversion() + if site: + if not wordlist: + print(f"[{bold}{red}WRN{reset}]: {bold}{white}Please provide a wordlist for spideyX 🕸️{reset}") + wordlists = await Extractor.Reader(wordlist) + if wordlists: + spideyfuzzer = AsyncSpideyFuzzer(site, wordlists,concurrency, chunks, header, method, proxy, disable_redirect, body, format, http_raw, verbose, timeout, delay, random_agent, output) + await spideyfuzzer.start() + exit() + + if http_raw: + if not wordlist: + print(f"[{bold}{red}WRN{reset}]: {bold}{white}Please provide a wordlist for spideyX 🕸️{reset}") + wordlists = await Extractor.Reader(wordlist) + method, site, header, body = await AsyncSpideyFuzzer.raw_http_reader(http_raw) + contents = header.get("Content-Type") + if contents and contents == "application/json": + format = "json" + elif contents and contents == "application/xml": + format = "xml" + else: + format = "html" + if wordlists: + spideyfuzzer = AsyncSpideyFuzzer(site, wordlists,concurrency, chunks, header, method, proxy, disable_redirect, body, format, http_raw, verbose, timeout, delay, random_agent, output) + await spideyfuzzer.start() + exit() + + if sites: + urls = await Extractor.Reader(sites) + if not wordlist: + print(f"[{bold}{red}WRN{reset}]: {bold}{white}Please provide a wordlist for spideyX 🕸️{reset}") + wordlists = await Extractor.Reader(wordlist) + if not wordlists: + exit() + if urls: + for site in urls: + spideyfuzzer = AsyncSpideyFuzzer(site, wordlists,concurrency, chunks, header, method, proxy, disable_redirect, body, format, http_raw, verbose, timeout, delay, random_agent, output) + await spideyfuzzer.start() + exit() + + if sys.stdin.isatty(): + print(f"[{bold}{red}WRN{reset}]: {bold}{white}no input provided for spideyX 🕸️{reset}") + exit() + else: + if not wordlist: + print(f"[{bold}{red}WRN{reset}]: {bold}{white}Please provide a wordlist for spideyX 🕸️{reset}") + exit() + wordlists = await Extractor.Reader(wordlist) + if wordlists: + for site in sys.stdin: + site = site.strip() + spideyfuzzer = AsyncSpideyFuzzer(site, wordlists,concurrency, chunks, header, method, proxy, disable_redirect, body, format, http_raw, verbose, timeout, delay, random_agent, output) + await spideyfuzzer.start() + exit() if __name__ == "__main__": cli()