diff --git a/.gitignore b/.gitignore index bf3b2c8..2684be0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +# SSTImap custom plugins +plugins/custom/* + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/README.md b/README.md index 52f135f..56f682c 100644 --- a/README.md +++ b/README.md @@ -262,6 +262,7 @@ If you plan to contribute something big from this list, inform me to avoid worki - [ ] Make template and base language evaluation functionality more uniform - [ ] Add more payloads for different engines +- [ ] Parse raw HTTP request from file - [ ] Variable dumping functionality - [ ] Blind value extraction - [ ] Better documentation (or at least any documentation) diff --git a/core/channel.py b/core/channel.py index 9035a68..ac7786a 100644 --- a/core/channel.py +++ b/core/channel.py @@ -1,3 +1,5 @@ +import time + import requests import urllib3 from utils.loggers import log @@ -97,12 +99,6 @@ def _parse_header(self, all_injectable=False): self.injs.append({'field': 'Header', 'part': 'param', 'param': param}) if self.tag in value or all_injectable: self.injs.append({'field': 'Header', 'part': 'value', 'value': value, 'param': param}) - if self.args.get('random_agent'): - user_agent = get_agent() - else: - user_agent = self.args.get('user_agent') - if 'user-agent' not in [p.lower() for p in self.header_params.keys()]: - self.header_params['User-Agent'] = user_agent def _parse_post(self, all_injectable=False): if self.args.get('data'): @@ -203,6 +199,14 @@ def req(self, injection): log.debug(f'[HEDR] {header_params}') if len(cookie_params) > 1: log.debug(f'[COOK] {cookie_params}') + if self.args.get('random_agent'): + user_agent = get_agent() + else: + user_agent = self.args.get('user_agent') + if 'user-agent' not in [p.lower() for p in header_params.keys()]: + header_params['User-Agent'] = user_agent + if self.args['delay']: + time.sleep(self.args['delay']) try: result = requests.request(method=self.http_method, url=url_params, params=get_params, data=post_params, headers=header_params, cookies=cookie_params, proxies=self.proxies, diff --git a/core/checks.py b/core/checks.py index a778ec9..163fafb 100644 --- a/core/checks.py +++ b/core/checks.py @@ -1,9 +1,14 @@ -from utils.loggers import log -from core.clis import Shell, MultilineShell -from core.tcpserver import TcpServer +import json +import os import telnetlib +import urllib from urllib import parse import socket +from utils.loggers import log +from core.clis import Shell, MultilineShell +from core.tcpserver import TcpServer +from utils.crawler import crawl, find_forms +from core.channel import Channel def plugins(legacy=False): @@ -211,3 +216,94 @@ def check_template_injection(channel): else: log.log(22, 'No reverse TCP shell capabilities have been detected on the target') return current_plugin + + +def scan_website(args): + urls = set() + forms = set() + single_url = args.get('url', None) + if single_url: + urls.add(single_url) + preloaded_urls = args.get('loaded_urls', None) + if preloaded_urls: + urls.update(preloaded_urls) + preloaded_forms = args.get('loaded_forms', None) + if preloaded_forms: + forms.update(preloaded_forms) + if args['load_forms']: + if os.path.isdir(args['load_forms']): + args['load_forms'] = f"{args['load_forms']}/forms.json" + if os.path.exists(args['load_forms']): + try: + with open(args['load_forms'], 'r') as stream: + loaded_forms = set([tuple(x) for x in json.load(stream)]) + forms.update(loaded_forms) + log.log(21, f"Loaded {len(loaded_forms)} forms from file: {args['load_forms']}") + except Exception as e: + log.log(22, f"Error occurred while loading forms from file:\n{repr(e)}") + if not forms or args['forms']: + if args['load_urls']: + if os.path.isdir(args['load_urls']): + args['load_urls'] = f"{args['load_urls']}/urls.txt" + if os.path.exists(args['load_urls']): + try: + with open(args['load_urls'], 'r') as stream: + loaded_urls = set([x.strip() for x in stream.readlines()]) + urls.update(loaded_urls) + log.log(21, f"Loaded {len(loaded_urls)} URL(s) from file: {args['load_urls']}") + except Exception as e: + log.log(22, f"Error occurred while loading URLs from file:\n{repr(e)}") + if args['crawl_depth']: + crawled_urls = crawl(urls, args) + urls.update(crawled_urls) + args['crawled_urls'] = crawled_urls + if args['save_urls']: + if os.path.isdir(args['save_urls']): + args['save_urls'] = f"{args['save_urls']}/sstimap_urls.txt" + try: + with open(args['save_urls'], 'w') as stream: + stream.write("\n".join(crawled_urls)) + log.log(21, f"Saved URLs to file: {args['save_urls']}") + except Exception as e: + log.log(22, f"Error occurred while saving URLs to file:\n{repr(e)}") + else: + log.log(25, "Skipping URL loading and crawling as forms are already supplied") + args['target_urls'] = urls + if args['forms']: + crawled_forms = find_forms(urls, args) + forms.update(crawled_forms) + args['crawled_forms'] = crawled_forms + if args['save_forms'] and crawled_forms: + if os.path.isdir(args['save_forms']): + args['save_forms'] = f"{args['save_forms']}/sstimap_forms.json" + try: + with open(args['save_forms'], 'w') as stream: + json.dump([x for x in crawled_forms], stream, indent=4) + log.log(21, f"Saved forms to file: {args['save_forms']}") + except Exception as e: + log.log(22, f"Error occurred while saving forms to file:\n{repr(e)}") + args['target_forms'] = forms + if not urls and not forms: + log.log(22, 'No targets found') + return + elif not forms: + for url in urls: + log.log(27, f'Scanning url: {url}') + url_args = args.copy() + url_args['url'] = url + channel = Channel(url_args) + result = check_template_injection(channel) + if channel.data.get('engine'): + return result # TODO: save vulnerabilities + else: + for form in forms: + log.log(27, f'Scanning form with url: {form[0]}') + url_args = args.copy() + url_args['url'] = form[0] + url_args['method'] = form[1] + url_args['data'] = urllib.parse.parse_qs(form[2], keep_blank_values=True) + channel = Channel(url_args) + result = check_template_injection(channel) + if channel.data.get('engine'): + return result # TODO: save vulnerabilities + return \ No newline at end of file diff --git a/core/interactive.py b/core/interactive.py index 70a889f..4a0b04e 100644 --- a/core/interactive.py +++ b/core/interactive.py @@ -1,5 +1,8 @@ import cmd -from utils.crawler import crawl, find_page_forms +import json +import os + +from utils import config from utils.loggers import log from urllib import parse from core import checks @@ -15,13 +18,18 @@ class InteractiveShell(cmd.Cmd): def __init__(self, args): cmd.Cmd.__init__(self) self.prompt = f"SSTImap> " - self.sstimap_options = args + self.sstimap_options = args.copy() self.sstimap_options.update({"tpl_shell": False, "tpl_cmd": None, "os_shell": False, "os_cmd": None, "bind_shell": None, "reverse_shell": None, "upload": None, "download": None, - "eval_shell": False, "eval_cmd": None}) + "eval_shell": False, "eval_cmd": None, "load_urls": None, "load_forms": None, + "save_urls": None, "save_forms": None, "loaded_urls": set(), "loaded_forms": set()}) if self.sstimap_options["url"]: self.do_url(args.get("url")) self.channel = Channel(self.sstimap_options) + if args["load_urls"]: + self.do_load_urls(args["load_urls"]) + if args["load_forms"]: + self.do_load_forms(args["load_forms"]) self.current_plugin = None self.checked = False @@ -48,8 +56,8 @@ def do_help(self, line): Target: url, target [URL] Set target URL (e.g. 'https://example.com/?name=test') - crawl [DEPTH] Crawl up to depth (0 - do not crawl) - forms Search page(s) for forms + load_urls [PATH] Load URLs from txt file or directory + load_forms [PATH] Load forms from json file or directory run, test, check Run SSTI detection on the target Request: @@ -59,18 +67,27 @@ def do_help(self, line): cookie, cookies {rm} [COOKIE] Cookie to send (e.g. 'Field=Value'). To remove by prefix, use "data rm PREFIX". Whithout arguments, shows cookies list method, http_method [METHOD] Set HTTP method to use (default 'GET') agent, user_agent [AGENT] Set User-Agent header value to use - random, random_agent Toggle using random User-Agent header value from a list of desktop browsers on every attempt + random, random_agent Toggle using random User-Agent header value from a list of desktop browsers on every request + delay [DELAY] Delay between requests (Default/0: no delay) proxy [PROXY] Use a proxy to connect to the target URL ssl, verify_ssl Toggle verifying SSL certificates (not verified by default) + log_response Toggle including HTTP responses into ~/.sstimap/sstimap.log + +Crawler: + crawl [DEPTH] Crawl up to depth (0 - do not crawl) + forms Search page(s) for forms + exclude [PATTERN] Regex pattern to exclude from crawler + domains [DOMAINS] Crawl other domains: Y(es) / S(ubdomains) / N(o). Default: S + save_urls [PATH] Save crawled URLs to txt file or directory (run or no PATH: reset) + save_forms [PATH] Save crawled forms to json file or directory (run or no PATH: reset) Detection: lvl, level [LEVEL] Set level of escaping to perform (1-5, Default: 1) force, force_level [LEVEL] [CLEVEL] Force a LEVEL and CLEVEL to test engine [ENGINE] Check only this backend template engine. For all, use '*' technique [TECHNIQUE] Use techniques R(endered) T(ime-based blind). Default: RT + blind_delay [DELAY] Delay to detect time-based blind injection (Default: 4 seconds) legacy Toggle including old payloads, that no longer work with newer versions of the engines - exclude [PATTERN] Regex pattern to exclude from crawler - domains [DOMAINS] Crawl other domains: Y(es) / S(ubdomains) / N(o). Default: S Exploitation: tpl, tpl_shell Prompt for an interactive shell on the template engine @@ -86,18 +103,46 @@ def do_help(self, line): down, download [REMOTE] [LOCAL] Download REMOTE to LOCAL files SSTImap: - reload, reload_plugins Reload all SSTImap plugins""") + reload, reload_plugins Reload all SSTImap plugins + config [PATH] Update settings from config file or directory""") def do_version(self, line): """Show current SSTImap version""" log.log(23, f'Current SSTImap version: {self.sstimap_options["version"]}') + def do_config(self, line): + if line: + if os.path.isdir(line): + line = f"{line}/config.json" + if os.path.exists(line): + custom_config = {} + with open(line, 'r') as stream: + try: + custom_config = json.load(stream) + except json.JSONDecodeError as e: + log.log(25, f'Error while loading config: {repr(e)}') + config.config_update(self.sstimap_options, custom_config) + log.log(24, f'Config updated from file: {line}') + return + log.log(25, 'Provide file or directory to read config from.') + def do_options(self, line): """Show current SSTImap options""" crawl_domains = {"Y": "Yes", "S": "Subdomains only", "N": "No"} log.log(23, f'Current SSTImap {self.sstimap_options["version"]} interactive mode options:') - if not self.sstimap_options["url"]: + if not self.sstimap_options["url"] and not self.sstimap_options["loaded_urls"] \ + and not self.sstimap_options["loaded_forms"]: log.log(25, f'URL is not set.') + elif self.sstimap_options["loaded_forms"]: + log.log(26, f'Forms to scan: {len(self.sstimap_options["loaded_forms"])}') + if self.sstimap_options["forms"]: + ulen = 1 if self.sstimap_options["url"] else 0 + if self.sstimap_options["loaded_urls"]: + ulen += len(self.sstimap_options["loaded_urls"]) + log.log(26, f'URLs to scan: {ulen}') + elif self.sstimap_options["loaded_urls"]: + log.log(26, f'URLs to scan: ' + f'{len(self.sstimap_options["loaded_urls"]) + (1 if self.sstimap_options["url"] else 0)}') else: log.log(26, f'URL: {self.sstimap_options["url"]}') log.log(26, f'Injection marker: {self.sstimap_options["marker"]}') @@ -110,11 +155,14 @@ def do_options(self, line): if self.sstimap_options["cookies"]: cookies = "\n ".join(self.sstimap_options["cookies"]) log.log(26, f'Cookies:\n {cookies}') - log.log(26, f'HTTP method: {self.sstimap_options["method"]}') + log.log(26, f'HTTP method: ' + f'{self.sstimap_options["method"] if self.sstimap_options["method"] else "Detect automatically"}') if self.sstimap_options["random_agent"]: log.log(26, 'User-Agent is randomised') else: log.log(26, f'User-Agent: {self.sstimap_options["user_agent"]}') + if self.sstimap_options["delay"]: + log.log(26, f'Delay between requests: {self.sstimap_options["delay"]}s') if self.sstimap_options["proxy"]: log.log(26, f'Proxy: {self.sstimap_options["proxy"]}') log.log(26, f'Verify SSL: {self.sstimap_options["verify_ssl"]}') @@ -127,14 +175,18 @@ def do_options(self, line): f'{"+" if not self.sstimap_options["engine"] and self.sstimap_options["legacy"] else ""}') if self.sstimap_options["crawl_depth"] > 0: log.log(26, f'Crawler depth: {self.sstimap_options["crawl_depth"]}') + if self.sstimap_options["crawl_exclude"]: + log.log(26, f'Crawler exclude RE: "{self.sstimap_options["crawl_exclude"]}"') + log.log(26, f'Crawl other domains: {crawl_domains.get(self.sstimap_options["crawl_domains"].upper())}') else: - log.log(26, 'Crawler depth: no crawl') - if self.sstimap_options["crawl_exclude"]: - log.log(26, f'Crawler exclude RE: "{self.sstimap_options["crawl_exclude"]}"') - log.log(26, f'Crawl other domains: {crawl_domains.get(self.sstimap_options["crawl_exclude"].upper())}') + log.log(26, 'Crawler: no crawl') log.log(26, f'Form detection: {self.sstimap_options["forms"]}') log.log(26, f'Attack technique: {self.sstimap_options["technique"]}') + if "T" in self.sstimap_options["technique"]: + log.log(26, f'Time-based blind detection delay: {self.sstimap_options["time_based_blind_delay"]}') log.log(26, f'Force overwrite files: {self.sstimap_options["force_overwrite"]}') + if self.sstimap_options["log_response"]: + log.log(26, 'HTTP responses will be included into ~/.sstimap/sstimap.log') do_opt = do_options @@ -158,11 +210,89 @@ def do_url(self, line): return log.log(24, f'Target URL is set to {line}') self.sstimap_options["url"] = line - self.set_module(f'\033[31m{url.netloc}\033[0m') + if not (self.sstimap_options['loaded_forms'] or self.sstimap_options['loaded_urls']): + self.set_module(f'\033[31m{url.netloc}\033[0m') self.checked = False do_target = do_url + def do_load_urls(self, line): + if line: + if os.path.isdir(line): + line = f"{line}/urls.txt" + if os.path.exists(line): + try: + with open(line, 'r') as stream: + self.sstimap_options["loaded_urls"] = set([x.strip() for x in stream.readlines()]) + log.log(21, f"Loaded {len(self.sstimap_options['loaded_urls'])} URL(s) from file: {line}") + if not self.sstimap_options['loaded_forms']: + self.set_module(f"\033[31m{len(self.sstimap_options['loaded_urls'])} URLs\033[0m") + self.checked = False + except Exception as e: + log.log(22, f"Error occurred while loading URLs from file:\n{repr(e)}") + return + log.log(25, 'Provide valid file or directory to read URLs from.') + else: + self.sstimap_options["loaded_urls"] = None + if not self.sstimap_options['loaded_forms']: + self.set_module(f'\033[31m{parse.urlparse(self.sstimap_options["url"]).netloc}' + f'\033[0m' if self.sstimap_options["url"] else "") + + def do_load_forms(self, line): + if line: + if os.path.isdir(line): + line = f"{line}/forms.json" + if os.path.exists(line): + try: + with open(line, 'r') as stream: + self.sstimap_options["loaded_forms"] = set([tuple(x) for x in json.load(stream)]) + log.log(21, f"Loaded {len(self.sstimap_options['loaded_forms'])} forms from file: {line}") + self.set_module(f"\033[31m{len(self.sstimap_options['loaded_forms'])} forms\033[0m") + self.checked = False + except Exception as e: + log.log(22, f"Error occurred while loading forms from file:\n{repr(e)}") + return + log.log(25, 'Provide valid file or directory to read forms from.') + else: + self.sstimap_options["loaded_forms"] = None + if self.sstimap_options['loaded_urls']: + self.set_module(f"\033[31m{len(self.sstimap_options['loaded_urls'])} URLs\033[0m") + else: + self.set_module(f'\033[31m{parse.urlparse(self.sstimap_options["url"]).netloc}' + f'\033[0m' if self.sstimap_options["url"] else "") + + def do_save_urls(self, line): + if line: + if self.sstimap_options.get('crawled_urls', None): + if os.path.isdir(line): + line = f"{line}/sstimap_urls.txt" + try: + with open(line, 'w') as stream: + stream.write("\n".join(self.sstimap_options['crawled_urls'])) + log.log(21, f"Saved URLs to file: {line}") + except Exception as e: + log.log(22, f"Error occurred while saving URLs to file:\n{repr(e)}") + else: + log.log(25, 'No URLs crawled to save.') + return + log.log(25, 'Provide valid file or directory to save URLs to.') + + def do_save_forms(self, line): + if line: + if self.sstimap_options.get('crawled_forms', None): + if os.path.isdir(line): + line = f"{line}/sstimap_forms.json" + try: + with open(line, 'w') as stream: + json.dump([x for x in self.sstimap_options['crawled_forms']], stream, indent=4) + log.log(21, f"Saved forms to file: {line}") + except Exception as e: + log.log(22, f"Error occurred while saving forms to file:\n{repr(e)}") + else: + log.log(25, 'No forms detected to save.') + return + log.log(25, 'Provide valid file or directory to save forms to.') + def do_crawl(self, line): if not line.isnumeric(): line = "0" @@ -189,51 +319,19 @@ def do_forms(self, line): def do_run(self, line): """Check target URL for SSTI vulnerabilities""" - if not self.sstimap_options["url"]: + if not (self.sstimap_options["url"] or self.sstimap_options["loaded_urls"] or self.sstimap_options["loaded_forms"]): log.log(22, 'Target URL cannot be empty.') return try: - if self.sstimap_options['crawl_depth'] or self.sstimap_options['forms']: - # crawler mode - urls = set([self.sstimap_options['url']]) - if self.sstimap_options['crawl_depth']: - crawled_urls = set() - for url in urls: - crawled_urls.update(crawl(url, self.sstimap_options)) - urls.update(crawled_urls) - if not self.sstimap_options['forms']: - for url in urls: - log.log(27, f'Scanning url: {url}') - url_options = self.sstimap_options.copy() - url_options['url'] = url - self.channel = Channel(url_options) - self.current_plugin = checks.check_template_injection(self.channel) - if self.channel.data.get('engine'): - break # TODO: save vulnerabilities - else: - forms = set() - log.log(23, 'Starting form detection...') - for url in urls: - forms.update(find_page_forms(url, self.sstimap_options)) - for form in forms: - log.log(27, f'Scanning form with url: {form[0]}') - url_options = self.sstimap_options.copy() - url_options['url'] = form[0] - url_options['method'] = form[1] - url_options['data'] = parse.parse_qs(form[2], keep_blank_values=True) - self.channel = Channel(url_options) - self.current_plugin = checks.check_template_injection(self.channel) - if self.channel.data.get('engine'): - break # TODO: save vulnerabilities - if not forms: - log.log(22, f'No forms were detected to scan') - else: - # predetermined mode - self.channel = Channel(self.sstimap_options) - self.current_plugin = checks.check_template_injection(self.channel) + self.current_plugin = checks.scan_website(self.sstimap_options) except (KeyboardInterrupt, EOFError): log.log(26, 'Exiting SSTI detection') - self.checked = True + if self.current_plugin: + self.checked = True + self.sstimap_options["loaded_urls"] = None + self.sstimap_options["loaded_forms"] = None + self.set_module(f'\033[31m{parse.urlparse(self.sstimap_options["url"]).netloc}' + f'\033[0m' if self.sstimap_options["url"] else "") do_check = do_run do_test = do_run @@ -336,6 +434,17 @@ def do_random_agent(self, line): do_random = do_random_agent + def do_delay(self, line): + """Set DELAY between requests""" + try: + self.sstimap_options["delay"] = max(float(line), 0) + except: + log.log(22, 'Invalid delay time.') + return + log.log(24, f'Delay between requests is set to {self.sstimap_options["delay"]}') + + do_request_delay = do_delay + def do_proxy(self, line): """Use proxy""" if line == "": @@ -353,6 +462,12 @@ def do_verify_ssl(self, line): do_ssl = do_verify_ssl + def do_log_response(self, line): + """Switch log_response option""" + overwrite = not self.sstimap_options["log_response"] + log.log(24, f'Value of \'log_response\' is set to {overwrite}') + self.sstimap_options["log_response"] = overwrite + # Detection commands def do_level(self, line): @@ -409,6 +524,17 @@ def do_crawl_domains(self, line): do_domains = do_crawl_domains + def do_blind_delay(self, line): + """Set DELAY for blind SSTI detection""" + try: + self.sstimap_options["time_based_blind_delay"] = max(int(line), 1) + except: + log.log(22, 'Invalid time-based blind injection delay time.') + return + log.log(24, f'Delay for time-based blind injection detection is set to {self.sstimap_options["time_based_blind_delay"]}') + + do_time_based_blind_delay = do_blind_delay + def do_legacy(self, line): """Switch legacy option""" overwrite = not self.sstimap_options["legacy"] diff --git a/sstimap.py b/sstimap.py index ca0bb54..a911130 100755 --- a/sstimap.py +++ b/sstimap.py @@ -5,70 +5,30 @@ sys.exit() if sys.version_info.minor > 11: print('\033[33m[!]\033[0m This version of SSTImap was not tested with Python3.'+str(sys.version_info.minor)) -import urllib import importlib import os from utils import cliparser from core import checks -from core.channel import Channel from core.interactive import InteractiveShell from utils.loggers import log -from utils.crawler import crawl, find_page_forms -from utils.config import config_args +from utils.config import config_args, version import traceback - -version = '1.1.3' - - def main(): args = vars(cliparser.options) args = config_args(args) args['version'] = version - if not (args['url'] or args['interactive']): + if not (args['url'] or args['interactive'] or args['load_urls'] or args['load_forms']): # no target specified - log.log(22, 'SSTImap requires target url (-u, --url) or interactive mode (-i, --interactive)') + log.log(22, 'SSTImap requires target URL (-u, --url), URLs/forms file (--load-urls / --load-forms) ' + 'or interactive mode (-i, --interactive)') elif args['interactive']: # interactive mode log.log(23, 'Starting SSTImap in interactive mode. Type \'help\' to see the details.') InteractiveShell(args).cmdloop() - elif args['crawl_depth'] or args['forms']: - # crawler mode - urls = set([args.get('url')]) - if args['crawl_depth']: - crawled_urls = set() - for url in urls: - crawled_urls.update(crawl(url, args)) - urls.update(crawled_urls) - if not args['forms']: - for url in urls: - log.log(27, f'Scanning url: {url}') - url_args = args.copy() - url_args['url'] = url - channel = Channel(url_args) - checks.check_template_injection(channel) - if channel.data.get('engine'): - break # TODO: save vulnerabilities - else: - forms = set() - log.log(23, 'Starting form detection...') - for url in urls: - forms.update(find_page_forms(url, args)) - for form in forms: - log.log(27, f'Scanning form with url: {form[0]}') - url_args = args.copy() - url_args['url'] = form[0] - url_args['method'] = form[1] - url_args['data'] = urllib.parse.parse_qs(form[2], keep_blank_values=True) - channel = Channel(url_args) - checks.check_template_injection(channel) - if channel.data.get('engine'): - break # TODO: save vulnerabilities - if not forms: - log.log(25, f'No forms were detected to scan') else: # predetermined mode - checks.check_template_injection(Channel(args)) + checks.scan_website(args) def load_plugins(): diff --git a/utils/cliparser.py b/utils/cliparser.py index 807bb62..8bb2443 100644 --- a/utils/cliparser.py +++ b/utils/cliparser.py @@ -25,19 +25,17 @@ def banner(): parser = argparse.ArgumentParser(description='SSTImap is an automatic SSTI detection and exploitation tool ' 'with predetermined and interactive modes.') parser.add_argument('-v', '--version', action='version', version=f'SSTImap version {version}') +parser.add_argument("--config", dest="config", help="Use custom config file or directory") target = parser.add_argument_group(title="target", description="At least one of these options has to be provided to define target(s)") target.add_argument("-u", "--url", dest="url", help="Target URL (e.g. 'https://example.com/?name=test')") -target.add_argument("-i", "--interactive", action="store_true", dest="interactive", +target.add_argument("-i", "--interactive", action="store_const", const=True, dest="interactive", help="Run SSTImap in interactive mode") -target.add_argument("-c", "--crawl", dest="crawl_depth", type=int, - help="Depth to crawl (default/0: don't crawl)") -target.add_argument("-f", "--forms", action="store_true", dest="forms", - help="Scan page(s) for forms") - +target.add_argument("--load-urls", dest="load_urls", help="File or directory to load URLs from") +target.add_argument("--load-forms", dest="load_forms", help="File or directory to load forms from") request = parser.add_argument_group(title="request", description="These options can specify how to connect to the " "target URL and add possible attack vectors") @@ -52,14 +50,28 @@ def banner(): request.add_argument("-m", "--method", dest="method", help="HTTP method to use (default 'GET')") request.add_argument("-a", "--user-agent", dest="user_agent", - help="User-Agent header value to use", default=f'SSTImap/{version}') -request.add_argument("-A", "--random-user-agent", action="store_true", dest="random_agent", - help="Random User-Agent header value from a list of desktop browsers on every attempt") + help="User-Agent header value to use") +request.add_argument("-A", "--random-user-agent", action="store_const", const=True, dest="random_agent", + help="Random User-Agent header value from a list of desktop browsers on every request") +request.add_argument("--delay", dest="delay", type=float, help="Delay between requests (Default/0: no delay)") request.add_argument("-p", "--proxy", dest="proxy", help="Use a proxy to connect to the target URL") -request.add_argument("-V", "--verify-ssl", action="store_true", dest="verify_ssl", +request.add_argument("-V", "--verify-ssl", action="store_const", const=True, dest="verify_ssl", help="Verify SSL certificates (not verified by default)") +request.add_argument("--log-response", action="store_const", const=True, dest="log_response", + help="Include HTTP responses into ~/.sstimap/sstimap.log") +crawler = parser.add_argument_group(title="crawler", description="These options can specify how to detect URLs and " + "forms on the target website.") +crawler.add_argument("-c", "--crawl", dest="crawl_depth", type=int, + help="Depth to crawl (default/0: don't crawl)") +crawler.add_argument("-f", "--forms", action="store_const", const=True, dest="forms", + help="Scan page(s) for forms") +crawler.add_argument("--crawl-exclude", dest="crawl_exclude", help="Regex in URLs to not crawl") +crawler.add_argument("--crawl-domains", dest="crawl_domains", + help="Crawl other domains: Y(es) / S(ubdomains) / N(o). Default: S") +crawler.add_argument("--save-urls", dest="save_urls", help="File or directory to save crawled URLs to") +crawler.add_argument("--save-forms", dest="save_forms", help="File or directory to save crawled forms to") detection = parser.add_argument_group(title="detection", description="These options can be used to customize the detection phase.") @@ -71,27 +83,23 @@ def banner(): help="Check only this backend template engine") detection.add_argument("-r", "--technique", dest="technique", help="Techniques R(endered) T(ime-based blind). Default: RT") -detection.add_argument("-P", "--legacy", "--legacy-payloads", dest="legacy", action="store_true", +detection.add_argument("--blind-delay", dest="time_based_blind_delay", type=int, + help="Delay to detect time-based blind injection (Default: 4 seconds)") +detection.add_argument("-P", "--legacy", "--legacy-payloads", dest="legacy", action="store_const", const=True, help="Include old payloads, that no longer work with newer versions of the engines") -detection.add_argument("--crawl-exclude", dest="crawl_exclude", help="Regex in URLs to not crawl") -detection.add_argument("--crawl-domains", dest="crawl_domains", - help="Crawl other domains: Y(es) / S(ubdomains) / N(o). Default: S") -detection.add_argument("--config", dest="config", - help="Use custom config file or directory") - payload = parser.add_argument_group(title="payload", description="These options can be used to get access to the template engine, " "filesystem or OS shell after an attack.") -payload.add_argument("-t", "--tpl-shell", dest="tpl_shell", action="store_true", +payload.add_argument("-t", "--tpl-shell", dest="tpl_shell", action="store_const", const=True, help="Prompt for an interactive shell on the template engine") payload.add_argument("-T", "--tpl-code", dest="tpl_code", help="Inject code in the template engine") -payload.add_argument("-x", "--eval-shell", dest="eval_shell", action="store_true", +payload.add_argument("-x", "--eval-shell", dest="eval_shell", action="store_const", const=True, help="Prompt for an interactive shell on the template engine base language") payload.add_argument("-X", "--eval-code", dest="eval_code", help="Evaluate code in the template engine base language") -payload.add_argument("-s", "--os-shell", dest="os_shell", action="store_true", +payload.add_argument("-s", "--os-shell", dest="os_shell", action="store_const", const=True, help="Prompt for an interactive operating system shell") payload.add_argument("-S", "--os-cmd", dest="os_cmd", help="Execute an operating system command") @@ -99,7 +107,7 @@ def banner(): help="Spawn a system shell on a TCP PORT of the target and connect to it") payload.add_argument("-R", "--reverse-shell", dest="reverse_shell", nargs=2, metavar=("HOST", "PORT",), help="Run a system shell and back-connect to local HOST PORT") -payload.add_argument("-F", "--force-overwrite", dest="force_overwrite", action="store_true", +payload.add_argument("-F", "--force-overwrite", dest="force_overwrite", action="store_const", const=True, help="Force file overwrite when uploading") payload.add_argument("-U", "--upload", dest="upload", metavar=("LOCAL", "REMOTE",), help="Upload LOCAL to REMOTE files", nargs=2) diff --git a/utils/config.py b/utils/config.py index dc2a45d..7011171 100644 --- a/utils/config.py +++ b/utils/config.py @@ -2,14 +2,29 @@ import sys import json + +version = '1.1.4' + +# Defaults to be overwritten by config.json, ~/.sstimap/config.json, user-supplied config and arguments defaults = { + "base_path": "~/.sstimap/", "crawl_depth": 0, "marker": '*', "level": 1, "technique": "RT", "crawl_domains": "S", "log_response": False, - "time_based_blind_delay": 4 + "time_based_blind_delay": 4, + "user_agent": f'SSTImap/{version}', + "interactive": False, + "random_agent": False, + "verify_ssl": False, + "forms": False, + "legacy": False, + "tpl_shell": False, + "eval_shell": False, + "os_shell": False, + "force_overwrite": False } config = {} user_config = {} @@ -19,7 +34,7 @@ try: config = json.load(stream) except json.JSONDecodeError as e: - print(f'[!][config] {e}') + print(f'[!][config] {repr(e)}') base_path = os.path.expanduser(config.get("base_path", "~/.sstimap/")) if not os.path.isdir(base_path): @@ -30,7 +45,7 @@ try: user_config = json.load(stream) except json.JSONDecodeError as e: - print(f'[!][user config] {e}') + print(f'[!][user config] {repr(e)}') def config_update(base, added): @@ -53,7 +68,7 @@ def config_args(args): try: custom_config = json.load(stream) except json.JSONDecodeError as e: - print(f'[!][custom config] {e}') + print(f'[!][custom config] {repr(e)}') config_update(res, custom_config) config_update(res, args) return res diff --git a/utils/crawler.py b/utils/crawler.py index 4ce9b57..c65ae69 100644 --- a/utils/crawler.py +++ b/utils/crawler.py @@ -4,6 +4,7 @@ """ import re +import time import urllib import urllib3 import html @@ -13,24 +14,26 @@ from html5lib import parse from utils.loggers import log +from utils.random_agent import get_agent CRAWL_EXCLUDE_EXTENSIONS = ( "3ds", "3g2", "3gp", "7z", "DS_Store", "a", "aac", "adp", "ai", "aif", "aiff", "apk", "ar", "asf", "au", "avi", "bak", - "bin", "bk", "bmp", "btif", "bz2", "cab", "caf", "cgm", "cmx", "cpio", "cr2", "dat", "deb", "djvu", "dll", "dmg", "dmp", - "dng", "doc", "docx", "dot", "dotx", "dra", "dsk", "dts", "dtshd", "dvb", "dwg", "dxf", "ear", "ecelp4800", "ecelp7470", - "ecelp9600", "egg", "eol", "eot", "epub", "exe", "f4v", "fbs", "fh", "fla", "flac", "fli", "flv", "fpx", "fst", "fvt", - "g3", "gif", "gz", "h261", "h263", "h264", "ico", "ief", "image", "img", "ipa", "iso", "jar", "jpeg", "jpg", "jpgv", - "jpm", "jxr", "ktx", "lvp", "lz", "lzma", "lzo", "m3u", "m4a", "m4v", "mar", "mdi", "mid", "mj2", "mka", "mkv", "mmr", - "mng", "mov", "movie", "mp3", "mp4", "mp4a", "mpeg", "mpg", "mpga", "mxu", "nef", "npx", "o", "oga", "ogg", "ogv", - "otf", "pbm", "pcx", "pdf", "pea", "pgm", "pic", "png", "pnm", "ppm", "pps", "ppt", "pptx", "ps", "psd", "pya", "pyc", - "pyo", "pyv", "qt", "rar", "ras", "raw", "rgb", "rip", "rlc", "rz", "s3m", "s7z", "scm", "scpt", "sgi", "shar", "sil", - "smv", "so", "sub", "swf", "tar", "tbz2", "tga", "tgz", "tif", "tiff", "tlz", "ts", "ttf", "uvh", "uvi", "uvm", "uvp", - "uvs", "uvu", "viv", "vob", "war", "wav", "wax", "wbmp", "wdp", "weba", "webm", "webp", "whl", "wm", "wma", "wmv", - "wmx", "woff", "woff2", "wvx", "xbm", "xif", "xls", "xlsx", "xlt", "xm", "xpi", "xpm", "xwd", "xz", "z", "zip", "zipx" + "bin", "bk", "bmp", "btif", "bz2", "cab", "caf", "cgm", "cmx", "cpio", "cr2", "css", "dat", "deb", "djvu", "dll", "dmg", + "dmp", "dng", "doc", "docx", "dot", "dotx", "dra", "dsk", "dts", "dtshd", "dvb", "dwg", "dxf", "ear", "ecelp4800", + "ecelp7470", "ecelp9600", "egg", "eol", "eot", "epub", "exe", "f4v", "fbs", "fh", "fla", "flac", "fli", "flv", "fpx", + "fst", "fvt", "g3", "gif", "gz", "h261", "h263", "h264", "ico", "ief", "image", "img", "ipa", "iso", "jar", "jpeg", + "jpg", "jpgv", "jpm", "js", "jxr", "ktx", "lvp", "lz", "lzma", "lzo", "m3u", "m4a", "m4v", "mar", "mdi", "mid", "mj2", + "mka", "mkv", "mmr", "mng", "mov", "movie", "mp3", "mp4", "mp4a", "mpeg", "mpg", "mpga", "mxu", "nef", "npx", "o", + "oga", "ogg", "ogv", "otf", "pbm", "pcx", "pdf", "pea", "pgm", "pic", "png", "pnm", "ppm", "pps", "ppt", "pptx", "ps", + "psd", "pya", "pyc", "pyo", "pyv", "qt", "rar", "ras", "raw", "rgb", "rip", "rlc", "rz", "s3m", "s7z", "scm", "scpt", + "sgi", "shar", "sil", "smv", "so", "sql", "sub", "svg", "swf", "tar", "tbz2", "tga", "tgz", "tif", "tiff", "tlz", "ts", + "ttf", "uvh", "uvi", "uvm", "uvp", "uvs", "uvu", "viv", "vob", "war", "wav", "wax", "wbmp", "wdp", "weba", "webm", + "webp", "whl", "wm", "wma", "wmv", "wmx", "woff", "woff2", "wvx", "xbm", "xif", "xls", "xlsx", "xlt", "xm", "xpi", + "xpm", "xwd", "xz", "z", "zip", "zipx" ) -def crawl(target, args): +def crawl(targets, args): log.log(23, 'Starting page crawler...') if not args.get('verify_ssl'): urllib3.disable_warnings() @@ -40,6 +43,7 @@ def crawl(target, args): except: log.log(22, f'Invalid RE: "{args.get("crawl_exclude")}"') return + def crawlThread(curr_depth, current): if current in visited: return @@ -50,8 +54,15 @@ def crawlThread(curr_depth, current): visited.add(current) content = None if current: + if args.get('random_agent'): + user_agent = get_agent() + else: + user_agent = args.get('user_agent') + if args['delay']: + time.sleep(args['delay']) try: - content = requests.request(method='GET', url=current, proxies={'http': args.get('proxy'), 'https': args.get('proxy')}, verify=args.get('verify_ssl')).text + content = requests.request(method='GET', url=current, headers={'User-Agent': user_agent}, verify=args.get('verify_ssl'), + proxies={'http': args.get('proxy'), 'https': args.get('proxy')}).text except requests.exceptions.ConnectionError as e: if e and e.args[0] and e.args[0].args[0] == 'Connection aborted.': log.log(25, 'Error: connection aborted, bad status line.') @@ -75,7 +86,7 @@ def crawlThread(curr_depth, current): for tag in tags: href = tag.get("href") if hasattr(tag, "get") else tag.group("href") if href: - url = urllib.parse.urljoin(current, html.unescape(href)) + url = urllib.parse.urljoin(current, html.unescape(href)).split("#")[0].split(" ")[0] try: if re.search(r"\A[^?]+\.(?P\w+)(\?|\Z)", url).group("result").lower() in CRAWL_EXCLUDE_EXTENSIONS: continue @@ -83,7 +94,7 @@ def crawlThread(curr_depth, current): pass if url: host = urllib.parse.urlparse(url).netloc.split(":")[0] - if url in visited: + if url in visited or url in worker[curr_depth+1]: continue elif args.get('crawl_exclude') and pattern.search(url): log.log(26, f"Skipping: {url}") @@ -107,17 +118,18 @@ def crawlThread(curr_depth, current): pass except AssertionError: # for invalid HTML pass - if not target: + if not targets: return set() visited = set() - worker = [set([target])] + worker = [set(targets)] results = set() - target_host = urllib.parse.urlparse(target).netloc.split(":")[0] try: for depth in range(args.get('crawl_depth')): results.update(worker[depth]) worker.append(set()) for url in worker[depth]: + if depth == 0: + target_host = urllib.parse.urlparse(url).netloc.split(":")[0] crawlThread(depth, url) results.update(worker[args.get('crawl_depth')]) if not results: @@ -127,14 +139,23 @@ def crawlThread(curr_depth, current): log.log(26, "User aborted during crawling. SSTImap will use partial list") return results + def find_page_forms(url, args): if not args.get('verify_ssl'): urllib3.disable_warnings() retVal = set() target = (url, "GET", "") retVal.add(target) + log.log(24, f'Form found: GET {url} ""') + if args.get('random_agent'): + user_agent = get_agent() + else: + user_agent = args.get('user_agent') + if args['delay']: + time.sleep(args['delay']) try: - request = requests.request(method='GET', url=url, proxies={'http': args.get('proxy'), 'https': args.get('proxy')}, verify=args.get('verify_ssl')) + request = requests.request(method='GET', url=url, headers={'User-Agent': user_agent}, verify=args.get('verify_ssl'), + proxies={'http': args.get('proxy'), 'https': args.get('proxy')}) raw = request.content content = request.text except requests.exceptions.ConnectionError as e: @@ -168,6 +189,7 @@ def find_page_forms(url, args): continue target = (url, method, data) retVal.add(target) + log.log(24, f'Form found: {method} {url} "{data if data else ""}"') except (ValueError, TypeError) as ex: log.log(25, f"There has been a problem while processing page forms ('{repr(ex)}')") try: @@ -179,6 +201,7 @@ def find_page_forms(url, args): data = data.rstrip('&') target = (url, "POST", data) retVal.add(target) + log.log(24, f'Form found: POST {url} "{data if data else ""}"') for match in re.finditer(r"(?s)(\w+)\.open\(['\"]POST['\"],\s*['\"]([^'\"]+)['\"]\).*?\1\.send\(([^)]+)\)", content): url = urllib.parse.urljoin(url, html.unescape(match.group(2))) data = match.group(3) @@ -186,8 +209,15 @@ def find_page_forms(url, args): data = data.strip("['\"]") target = (url, "POST", data) retVal.add(target) + log.log(24, f'Form found: POST {url} "{data if data else ""}"') except UnicodeDecodeError: pass return retVal +def find_forms(urls, args): + forms = set() + log.log(23, 'Starting form detection...') + for url in urls: + forms.update(find_page_forms(url, args)) + return forms