diff --git a/.flake8 b/.flake8 deleted file mode 100644 index a6f057338..000000000 --- a/.flake8 +++ /dev/null @@ -1,5 +0,0 @@ -[flake8] -select = F,E722 -ignore = F403,F405,F541 -per-file-ignores = - */__init__.py:F401,F403 diff --git a/README.md b/README.md index ee3a130b3..f029b9432 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@ [![bbot_banner](https://github.com/user-attachments/assets/f02804ce-9478-4f1e-ac4d-9cf5620a3214)](https://github.com/blacklanternsecurity/bbot) -[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Recon Village 2024](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://www.reconvillage.org/talks) [![PyPi Downloads](https://static.pepy.tech/personalized-badge/bbot?right_color=orange&left_color=grey)](https://pepy.tech/project/bbot) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) +[![Python Version](https://img.shields.io/badge/python-3.9+-FF8400)](https://www.python.org) [![License](https://img.shields.io/badge/license-GPLv3-FF8400.svg)](https://github.com/blacklanternsecurity/bbot/blob/dev/LICENSE) [![DEF CON Recon Village 2024](https://img.shields.io/badge/DEF%20CON%20Demo%20Labs-2023-FF8400.svg)](https://www.reconvillage.org/talks) [![PyPi Downloads](https://static.pepy.tech/personalized-badge/bbot?right_color=orange&left_color=grey)](https://pepy.tech/project/bbot) [![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) [![Tests](https://github.com/blacklanternsecurity/bbot/actions/workflows/tests.yml/badge.svg?branch=stable)](https://github.com/blacklanternsecurity/bbot/actions?query=workflow%3A"tests") [![Codecov](https://codecov.io/gh/blacklanternsecurity/bbot/branch/dev/graph/badge.svg?token=IR5AZBDM5K)](https://codecov.io/gh/blacklanternsecurity/bbot) [![Discord](https://img.shields.io/discord/859164869970362439)](https://discord.com/invite/PZqkgxu5SA) ### **BEEĀ·bot** is a multipurpose scanner inspired by [Spiderfoot](https://github.com/smicallef/spiderfoot), built to automate your **Recon**, **Bug Bounties**, and **ASM**! diff --git a/bbot/cli.py b/bbot/cli.py index c1ec117b2..97d748808 100755 --- a/bbot/cli.py +++ b/bbot/cli.py @@ -130,8 +130,8 @@ async def _main(): ] if deadly_modules and not options.allow_deadly: log.hugewarning(f"You enabled the following deadly modules: {','.join(deadly_modules)}") - log.hugewarning(f"Deadly modules are highly intrusive") - log.hugewarning(f"Please specify --allow-deadly to continue") + log.hugewarning("Deadly modules are highly intrusive") + log.hugewarning("Please specify --allow-deadly to continue") return False # --current-preset diff --git a/bbot/core/config/logger.py b/bbot/core/config/logger.py index 505ad2fab..54866a63b 100644 --- a/bbot/core/config/logger.py +++ b/bbot/core/config/logger.py @@ -68,7 +68,7 @@ def __init__(self, core): self.listener = None # if we haven't set up logging yet, do it now - if not "_BBOT_LOGGING_SETUP" in os.environ: + if "_BBOT_LOGGING_SETUP" not in os.environ: os.environ["_BBOT_LOGGING_SETUP"] = "1" self.queue = multiprocessing.Queue() self.setup_queue_handler() diff --git a/bbot/core/core.py b/bbot/core/core.py index 23b1a9d62..581405277 100644 --- a/bbot/core/core.py +++ b/bbot/core/core.py @@ -106,7 +106,7 @@ def default_config(self): if DEFAULT_CONFIG is None: self.default_config = self.files_config.get_default_config() # ensure bbot home dir - if not "home" in self.default_config: + if "home" not in self.default_config: self.default_config["home"] = "~/.bbot" return DEFAULT_CONFIG diff --git a/bbot/core/event/base.py b/bbot/core/event/base.py index bb6d92e91..48fcce775 100644 --- a/bbot/core/event/base.py +++ b/bbot/core/event/base.py @@ -203,7 +203,7 @@ def __init__( # self.scan holds the instantiated scan object (for helpers, etc.) self.scan = scan if (not self.scan) and (not self._dummy): - raise ValidationError(f"Must specify scan") + raise ValidationError("Must specify scan") # self.scans holds a list of scan IDs from scans that encountered this event self.scans = [] if scans is not None: @@ -222,7 +222,7 @@ def __init__( self.parent = parent if (not self.parent) and (not self._dummy): - raise ValidationError(f"Must specify event parent") + raise ValidationError("Must specify event parent") if tags is not None: for tag in tags: @@ -301,9 +301,9 @@ def internal(self, value): The purpose of internal events is to enable speculative/explorative discovery without cluttering the console with irrelevant or uninteresting events. """ - if not value in (True, False): + if value not in (True, False): raise ValueError(f'"internal" must be boolean, not {type(value)}') - if value == True: + if value is True: self.add_tag("internal") else: self.remove_tag("internal") @@ -1013,12 +1013,12 @@ def __init__(self, *args, **kwargs): if not self.host: for parent in self.get_parents(include_self=True): # inherit closest URL - if not "url" in self.data: + if "url" not in self.data: parent_url = getattr(parent, "parsed_url", None) if parent_url is not None: self.data["url"] = parent_url.geturl() # inherit closest path - if not "path" in self.data and isinstance(parent.data, dict) and not parent.type == "HTTP_RESPONSE": + if "path" not in self.data and isinstance(parent.data, dict) and not parent.type == "HTTP_RESPONSE": parent_path = parent.data.get("path", None) if parent_path is not None: self.data["path"] = parent_path @@ -1227,7 +1227,7 @@ def sanitize_data(self, data): def add_tag(self, tag): host_same_as_parent = self.parent and self.host == self.parent.host - if tag == "spider-danger" and host_same_as_parent and not "spider-danger" in self.tags: + if tag == "spider-danger" and host_same_as_parent and "spider-danger" not in self.tags: # increment the web spider distance if self.type == "URL_UNVERIFIED": self.web_spider_distance += 1 @@ -1249,7 +1249,7 @@ def with_port(self): def _words(self): first_elem = self.parsed_url.path.lstrip("/").split("/")[0] - if not "." in first_elem: + if "." not in first_elem: return extract_words(first_elem) return set() @@ -1665,7 +1665,7 @@ def make_event( event.parent = parent if context is not None: event.discovery_context = context - if internal == True: + if internal is True: event.internal = True if tags: event.tags = tags.union(event.tags) diff --git a/bbot/core/helpers/command.py b/bbot/core/helpers/command.py index db05a05e1..d4f017b33 100644 --- a/bbot/core/helpers/command.py +++ b/bbot/core/helpers/command.py @@ -269,11 +269,11 @@ def _prepare_command_kwargs(self, command, kwargs): (['sudo', '-E', '-A', 'LD_LIBRARY_PATH=...', 'PATH=...', 'ls', '-l'], {'limit': 104857600, 'stdout': -1, 'stderr': -1, 'env': environ(...)}) """ # limit = 100MB (this is needed for cases like httpx that are sending large JSON blobs over stdout) - if not "limit" in kwargs: + if "limit" not in kwargs: kwargs["limit"] = 1024 * 1024 * 100 - if not "stdout" in kwargs: + if "stdout" not in kwargs: kwargs["stdout"] = asyncio.subprocess.PIPE - if not "stderr" in kwargs: + if "stderr" not in kwargs: kwargs["stderr"] = asyncio.subprocess.PIPE sudo = kwargs.pop("sudo", False) @@ -286,7 +286,7 @@ def _prepare_command_kwargs(self, command, kwargs): # use full path of binary, if not already specified binary = command[0] - if not "/" in binary: + if "/" not in binary: binary_full_path = which(binary) if binary_full_path is None: raise SubprocessError(f'Command "{binary}" was not found') diff --git a/bbot/core/helpers/depsinstaller/installer.py b/bbot/core/helpers/depsinstaller/installer.py index fce5f077d..a65c57574 100644 --- a/bbot/core/helpers/depsinstaller/installer.py +++ b/bbot/core/helpers/depsinstaller/installer.py @@ -97,11 +97,11 @@ async def install(self, *modules): or self.deps_behavior == "force_install" ): if not notified: - log.hugeinfo(f"Installing module dependencies. Please be patient, this may take a while.") + log.hugeinfo("Installing module dependencies. Please be patient, this may take a while.") notified = True log.verbose(f'Installing dependencies for module "{m}"') # get sudo access if we need it - if preloaded.get("sudo", False) == True: + if preloaded.get("sudo", False) is True: self.ensure_root(f'Module "{m}" needs root privileges to install its dependencies.') success = await self.install_module(m) self.setup_status[module_hash] = success @@ -159,7 +159,7 @@ async def install_module(self, module): deps_common = preloaded["deps"]["common"] if deps_common: for dep_common in deps_common: - if self.setup_status.get(dep_common, False) == True: + if self.setup_status.get(dep_common, False) is True: log.debug( f'Skipping installation of dependency "{dep_common}" for module "{module}" since it is already installed' ) @@ -244,7 +244,7 @@ def shell(self, module, commands): if success: log.info(f"Successfully ran {len(commands):,} shell commands") else: - log.warning(f"Failed to run shell dependencies") + log.warning("Failed to run shell dependencies") return success def tasks(self, module, tasks): diff --git a/bbot/core/helpers/diff.py b/bbot/core/helpers/diff.py index 59ee96567..795390f6f 100644 --- a/bbot/core/helpers/diff.py +++ b/bbot/core/helpers/diff.py @@ -183,7 +183,7 @@ async def compare( await self._baseline() - if timeout == None: + if timeout is None: timeout = self.timeout reflection = False @@ -238,11 +238,11 @@ async def compare( different_headers = self.compare_headers(self.baseline.headers, subject_response.headers) if different_headers: - log.debug(f"headers were different, no match") + log.debug("headers were different, no match") diff_reasons.append("header") - if self.compare_body(self.baseline_json, subject_json) == False: - log.debug(f"difference in HTML body, no match") + if self.compare_body(self.baseline_json, subject_json) is False: + log.debug("difference in HTML body, no match") diff_reasons.append("body") @@ -275,6 +275,6 @@ async def canary_check(self, url, mode, rounds=3): ) # if a nonsense header "caused" a difference, we need to abort. We also need to abort if our canary was reflected - if match == False or reflection == True: + if match is False or reflection is True: return False return True diff --git a/bbot/core/helpers/dns/dns.py b/bbot/core/helpers/dns/dns.py index 4526f084a..89758b35f 100644 --- a/bbot/core/helpers/dns/dns.py +++ b/bbot/core/helpers/dns/dns.py @@ -178,7 +178,7 @@ def _wildcard_prevalidation(self, host): host = clean_dns_record(host) # skip check if it's an IP or a plain hostname - if is_ip(host) or not "." in host: + if is_ip(host) or "." not in host: return False # skip if query isn't a dns name diff --git a/bbot/core/helpers/dns/engine.py b/bbot/core/helpers/dns/engine.py index 8f3c5a0b7..9ca94b23a 100644 --- a/bbot/core/helpers/dns/engine.py +++ b/bbot/core/helpers/dns/engine.py @@ -638,7 +638,7 @@ async def _connectivity_check(self, interval=5): self._last_dns_success = time.time() return True if time.time() - self._last_connectivity_warning > interval: - self.log.warning(f"DNS queries are failing, please check your internet connection") + self.log.warning("DNS queries are failing, please check your internet connection") self._last_connectivity_warning = time.time() self._errors.clear() return False diff --git a/bbot/core/helpers/files.py b/bbot/core/helpers/files.py index fb92d1c8b..5e7d2d88d 100644 --- a/bbot/core/helpers/files.py +++ b/bbot/core/helpers/files.py @@ -83,7 +83,7 @@ def _feed_pipe(self, pipe, content, text=True): for c in content: p.write(decode_fn(c) + newline) except BrokenPipeError: - log.debug(f"Broken pipe in _feed_pipe()") + log.debug("Broken pipe in _feed_pipe()") except ValueError: log.debug(f"Error _feed_pipe(): {traceback.format_exc()}") except KeyboardInterrupt: diff --git a/bbot/core/helpers/helper.py b/bbot/core/helpers/helper.py index 6db4b6921..78ccf6715 100644 --- a/bbot/core/helpers/helper.py +++ b/bbot/core/helpers/helper.py @@ -153,7 +153,9 @@ def temp_filename(self, extension=None): return self.temp_dir / filename def clean_old_scans(self): - _filter = lambda x: x.is_dir() and self.regexes.scan_name_regex.match(x.name) + def _filter(x): + return x.is_dir() and self.regexes.scan_name_regex.match(x.name) + self.clean_old(self.scans_dir, keep=self.keep_old_scans, filter=_filter) def make_target(self, *targets, **kwargs): diff --git a/bbot/core/helpers/interactsh.py b/bbot/core/helpers/interactsh.py index f707fac93..c809999a3 100644 --- a/bbot/core/helpers/interactsh.py +++ b/bbot/core/helpers/interactsh.py @@ -155,7 +155,7 @@ async def register(self, callback=None): break if not self.server: - raise InteractshError(f"Failed to register with an interactsh server") + raise InteractshError("Failed to register with an interactsh server") log.info( f"Successfully registered to interactsh server {self.server} with correlation_id {self.correlation_id} [{self.domain}]" @@ -181,7 +181,7 @@ async def deregister(self): >>> await interactsh_client.deregister() """ if not self.server or not self.correlation_id or not self.secret: - raise InteractshError(f"Missing required information to deregister") + raise InteractshError("Missing required information to deregister") headers = {} if self.token: @@ -226,7 +226,7 @@ async def poll(self): ] """ if not self.server or not self.correlation_id or not self.secret: - raise InteractshError(f"Missing required information to poll") + raise InteractshError("Missing required information to poll") headers = {} if self.token: diff --git a/bbot/core/helpers/misc.py b/bbot/core/helpers/misc.py index 1a5693296..75eaa72a7 100644 --- a/bbot/core/helpers/misc.py +++ b/bbot/core/helpers/misc.py @@ -391,7 +391,7 @@ def url_parents(u): parent_list = [] while 1: parent = parent_url(u) - if parent == None: + if parent is None: return parent_list elif parent not in parent_list: parent_list.append(parent) @@ -512,7 +512,7 @@ def domain_stem(domain): - Utilizes the `tldextract` function for domain parsing. """ parsed = tldextract(str(domain)) - return f".".join(parsed.subdomain.split(".") + parsed.domain.split(".")).strip(".") + return ".".join(parsed.subdomain.split(".") + parsed.domain.split(".")).strip(".") def ip_network_parents(i, include_self=False): diff --git a/bbot/core/helpers/validators.py b/bbot/core/helpers/validators.py index bc6ca1372..88acb0146 100644 --- a/bbot/core/helpers/validators.py +++ b/bbot/core/helpers/validators.py @@ -132,7 +132,7 @@ def validate_host(host: Union[str, ipaddress.IPv4Address, ipaddress.IPv6Address] @validator def validate_severity(severity: str): severity = str(severity).strip().upper() - if not severity in ("UNKNOWN", "INFO", "LOW", "MEDIUM", "HIGH", "CRITICAL"): + if severity not in ("UNKNOWN", "INFO", "LOW", "MEDIUM", "HIGH", "CRITICAL"): raise ValueError(f"Invalid severity: {severity}") return severity diff --git a/bbot/core/helpers/web/client.py b/bbot/core/helpers/web/client.py index c09a0e485..28788e04d 100644 --- a/bbot/core/helpers/web/client.py +++ b/bbot/core/helpers/web/client.py @@ -56,7 +56,7 @@ def __init__(self, *args, **kwargs): # timeout http_timeout = self._web_config.get("http_timeout", 20) - if not "timeout" in kwargs: + if "timeout" not in kwargs: kwargs["timeout"] = http_timeout # headers diff --git a/bbot/core/helpers/web/engine.py b/bbot/core/helpers/web/engine.py index 85805b2b7..4401a219f 100644 --- a/bbot/core/helpers/web/engine.py +++ b/bbot/core/helpers/web/engine.py @@ -137,7 +137,7 @@ async def stream_request(self, url, **kwargs): if max_size is not None: max_size = human_to_bytes(max_size) kwargs["follow_redirects"] = follow_redirects - if not "method" in kwargs: + if "method" not in kwargs: kwargs["method"] = "GET" try: total_size = 0 diff --git a/bbot/core/helpers/web/web.py b/bbot/core/helpers/web/web.py index 33aa2d035..2865732ce 100644 --- a/bbot/core/helpers/web/web.py +++ b/bbot/core/helpers/web/web.py @@ -261,7 +261,7 @@ async def wordlist(self, path, lines=None, **kwargs): """ if not path: raise WordlistError(f"Invalid wordlist: {path}") - if not "cache_hrs" in kwargs: + if "cache_hrs" not in kwargs: kwargs["cache_hrs"] = 720 if self.parent_helper.is_url(path): filename = await self.download(str(path), **kwargs) @@ -350,7 +350,7 @@ async def curl(self, *args, **kwargs): headers[hk] = hv # add the timeout - if not "timeout" in kwargs: + if "timeout" not in kwargs: timeout = http_timeout curl_command.append("-m") diff --git a/bbot/core/helpers/wordcloud.py b/bbot/core/helpers/wordcloud.py index fbd4e7593..824f9ea36 100644 --- a/bbot/core/helpers/wordcloud.py +++ b/bbot/core/helpers/wordcloud.py @@ -111,7 +111,7 @@ def mutations( results = set() for word in words: h = hash(word) - if not h in results: + if h not in results: results.add(h) yield (word,) if numbers > 0: @@ -119,7 +119,7 @@ def mutations( for word in words: for number_mutation in self.get_number_mutations(word, n=numbers, padding=number_padding): h = hash(number_mutation) - if not h in results: + if h not in results: results.add(h) yield (number_mutation,) for word in words: @@ -322,7 +322,7 @@ def json(self, limit=None): @property def default_filename(self): - return self.parent_helper.preset.scan.home / f"wordcloud.tsv" + return self.parent_helper.preset.scan.home / "wordcloud.tsv" def save(self, filename=None, limit=None): """ @@ -357,7 +357,7 @@ def save(self, filename=None, limit=None): log.debug(f"Saved word cloud ({len(self):,} words) to {filename}") return True, filename else: - log.debug(f"No words to save") + log.debug("No words to save") except Exception as e: import traceback diff --git a/bbot/core/modules.py b/bbot/core/modules.py index a5f4b30eb..66cd3caa7 100644 --- a/bbot/core/modules.py +++ b/bbot/core/modules.py @@ -153,7 +153,7 @@ def preload(self, module_dirs=None): else: log.debug(f"Preloading {module_name} from disk") if module_dir.name == "modules": - namespace = f"bbot.modules" + namespace = "bbot.modules" else: namespace = f"bbot.modules.{module_dir.name}" try: @@ -400,10 +400,10 @@ def preload_module(self, module_file): deps_common.append(dep_common.value) for task in ansible_tasks: - if not "become" in task: + if "become" not in task: task["become"] = False # don't sudo brew - elif os_platform() == "darwin" and ("package" in task and task.get("become", False) == True): + elif os_platform() == "darwin" and ("package" in task and task.get("become", False) is True): task["become"] = False preloaded_data = { @@ -436,8 +436,8 @@ def preload_module(self, module_file): f'Error while preloading module "{module_file}": No shared dependency named "{dep_common}" (choices: {common_choices})' ) for ansible_task in ansible_task_list: - if any(x == True for x in search_dict_by_key("become", ansible_task)) or any( - x == True for x in search_dict_by_key("ansible_become", ansible_tasks) + if any(x is True for x in search_dict_by_key("become", ansible_task)) or any( + x is True for x in search_dict_by_key("ansible_become", ansible_tasks) ): preloaded_data["sudo"] = True return preloaded_data diff --git a/bbot/modules/base.py b/bbot/modules/base.py index 956d59c98..1fa151c33 100644 --- a/bbot/modules/base.py +++ b/bbot/modules/base.py @@ -311,7 +311,7 @@ async def require_api_key(self): if self.auth_secret: try: await self.ping() - self.hugesuccess(f"API is ready") + self.hugesuccess("API is ready") return True, "" except Exception as e: self.trace(traceback.format_exc()) @@ -332,10 +332,10 @@ def api_key(self, api_keys): def cycle_api_key(self): if len(self._api_keys) > 1: - self.verbose(f"Cycling API key") + self.verbose("Cycling API key") self._api_keys.insert(0, self._api_keys.pop()) else: - self.debug(f"No extra API keys to cycle") + self.debug("No extra API keys to cycle") @property def api_retries(self): @@ -669,7 +669,7 @@ async def _worker(self): if self.incoming_event_queue is not False: event = await self.incoming_event_queue.get() else: - self.debug(f"Event queue is in bad state") + self.debug("Event queue is in bad state") break except asyncio.queues.QueueEmpty: continue @@ -700,7 +700,7 @@ async def _worker(self): else: self.error(f"Critical failure in module {self.name}: {e}") self.error(traceback.format_exc()) - self.log.trace(f"Worker stopped") + self.log.trace("Worker stopped") @property def max_scope_distance(self): @@ -743,7 +743,7 @@ def _event_precheck(self, event): if event.type in ("FINISHED",): return True, "its type is FINISHED" if self.errored: - return False, f"module is in error state" + return False, "module is in error state" # exclude non-watched types if not any(t in self.get_watched_events() for t in ("*", event.type)): return False, "its type is not in watched_events" @@ -770,7 +770,7 @@ async def _event_postcheck(self, event): # check duplicates is_incoming_duplicate, reason = self.is_incoming_duplicate(event, add=True) if is_incoming_duplicate and not self.accept_dupes: - return False, f"module has already seen it" + (f" ({reason})" if reason else "") + return False, "module has already seen it" + (f" ({reason})" if reason else "") return acceptable, reason @@ -863,7 +863,7 @@ async def queue_event(self, event): """ async with self._task_counter.count("queue_event()", _log=False): if self.incoming_event_queue is False: - self.debug(f"Not in an acceptable state to queue incoming event") + self.debug("Not in an acceptable state to queue incoming event") return acceptable, reason = self._event_precheck(event) if not acceptable: @@ -879,7 +879,7 @@ async def queue_event(self, event): if event.type != "FINISHED": self.scan._new_activity = True except AttributeError: - self.debug(f"Not in an acceptable state to queue incoming event") + self.debug("Not in an acceptable state to queue incoming event") async def queue_outgoing_event(self, event, **kwargs): """ @@ -904,7 +904,7 @@ async def queue_outgoing_event(self, event, **kwargs): try: await self.outgoing_event_queue.put((event, kwargs)) except AttributeError: - self.debug(f"Not in an acceptable state to queue outgoing event") + self.debug("Not in an acceptable state to queue outgoing event") def set_error_state(self, message=None, clear_outgoing_queue=False, critical=False): """ @@ -939,7 +939,7 @@ def set_error_state(self, message=None, clear_outgoing_queue=False, critical=Fal self.errored = True # clear incoming queue if self.incoming_event_queue is not False: - self.debug(f"Emptying event_queue") + self.debug("Emptying event_queue") with suppress(asyncio.queues.QueueEmpty): while 1: self.incoming_event_queue.get_nowait() @@ -1126,7 +1126,7 @@ def prepare_api_request(self, url, kwargs): """ if self.api_key: url = url.format(api_key=self.api_key) - if not "headers" in kwargs: + if "headers" not in kwargs: kwargs["headers"] = {} kwargs["headers"]["Authorization"] = f"Bearer {self.api_key}" return url, kwargs @@ -1142,7 +1142,7 @@ async def api_request(self, *args, **kwargs): # loop until we have a successful request for _ in range(self.api_retries): - if not "headers" in kwargs: + if "headers" not in kwargs: kwargs["headers"] = {} new_url, kwargs = self.prepare_api_request(url, kwargs) kwargs["url"] = new_url @@ -1589,7 +1589,7 @@ async def _worker(self): event = incoming kwargs = {} else: - self.debug(f"Event queue is in bad state") + self.debug("Event queue is in bad state") break except asyncio.queues.QueueEmpty: await asyncio.sleep(0.1) @@ -1644,7 +1644,7 @@ async def _worker(self): else: self.critical(f"Critical failure in intercept module {self.name}: {e}") self.critical(traceback.format_exc()) - self.log.trace(f"Worker stopped") + self.log.trace("Worker stopped") async def get_incoming_event(self): """ @@ -1675,7 +1675,7 @@ async def queue_event(self, event, kwargs=None): try: self.incoming_event_queue.put_nowait((event, kwargs)) except AttributeError: - self.debug(f"Not in an acceptable state to queue incoming event") + self.debug("Not in an acceptable state to queue incoming event") async def _event_postcheck(self, event): return await self._event_postcheck_inner(event) diff --git a/bbot/modules/bypass403.py b/bbot/modules/bypass403.py index 4f3b51789..61fb51077 100644 --- a/bbot/modules/bypass403.py +++ b/bbot/modules/bypass403.py @@ -92,7 +92,7 @@ async def do_checks(self, compare_helper, event, collapse_threshold): return None sig = self.format_signature(sig, event) - if sig[2] != None: + if sig[2] is not None: headers = dict(sig[2]) else: headers = None @@ -106,13 +106,13 @@ async def do_checks(self, compare_helper, event, collapse_threshold): continue # In some cases WAFs will respond with a 200 code which causes a false positive - if subject_response != None: + if subject_response is not None: for ws in waf_strings: if ws in subject_response.text: self.debug("Rejecting result based on presence of WAF string") return - if match == False: + if match is False: if str(subject_response.status_code)[0] != "4": if sig[2]: added_header_tuple = next(iter(sig[2].items())) @@ -165,13 +165,13 @@ async def filter_event(self, event): return False def format_signature(self, sig, event): - if sig[3] == True: + if sig[3] is True: cleaned_path = event.parsed_url.path.strip("/") else: cleaned_path = event.parsed_url.path.lstrip("/") kwargs = {"scheme": event.parsed_url.scheme, "netloc": event.parsed_url.netloc, "path": cleaned_path} formatted_url = sig[1].format(**kwargs) - if sig[2] != None: + if sig[2] is not None: formatted_headers = {k: v.format(**kwargs) for k, v in sig[2].items()} else: formatted_headers = None diff --git a/bbot/modules/c99.py b/bbot/modules/c99.py index 7bb395fa1..17fea87a1 100644 --- a/bbot/modules/c99.py +++ b/bbot/modules/c99.py @@ -20,7 +20,7 @@ class c99(subdomain_enum_apikey): async def ping(self): url = f"{self.base_url}/randomnumber?key={{api_key}}&between=1,100&json" response = await self.api_request(url) - assert response.json()["success"] == True, getattr(response, "text", "no response from server") + assert response.json()["success"] is True, getattr(response, "text", "no response from server") async def request_url(self, query): url = f"{self.base_url}/subdomainfinder?key={{api_key}}&domain={self.helpers.quote(query)}&json" diff --git a/bbot/modules/deadly/ffuf.py b/bbot/modules/deadly/ffuf.py index 6144d0b13..e0a45e12b 100644 --- a/bbot/modules/deadly/ffuf.py +++ b/bbot/modules/deadly/ffuf.py @@ -52,7 +52,7 @@ async def setup(self): async def handle_event(self, event): if self.helpers.url_depth(event.data) > self.config.get("max_depth"): - self.debug(f"Exceeded max depth, aborting event") + self.debug("Exceeded max depth, aborting event") return # only FFUF against a directory @@ -252,7 +252,7 @@ async def execute_ffuf( self.warning(f"Exiting from FFUF run early, received an ABORT filter: [{filters[ext][1]}]") continue - elif filters[ext] == None: + elif filters[ext] is None: pass else: @@ -282,7 +282,7 @@ async def execute_ffuf( else: if mode == "normal": # before emitting, we are going to send another baseline. This will immediately catch things like a WAF flipping blocking on us mid-scan - if baseline == False: + if baseline is False: pre_emit_temp_canary = [ f async for f in self.execute_ffuf( diff --git a/bbot/modules/deadly/nuclei.py b/bbot/modules/deadly/nuclei.py index f254f9bb0..c525b5a8f 100644 --- a/bbot/modules/deadly/nuclei.py +++ b/bbot/modules/deadly/nuclei.py @@ -226,7 +226,7 @@ async def execute_nuclei(self, nuclei_input): command.append(f"-{cli_option}") command.append(option) - if self.scan.config.get("interactsh_disable") == True: + if self.scan.config.get("interactsh_disable") is True: self.info("Disbling interactsh in accordance with global settings") command.append("-no-interactsh") diff --git a/bbot/modules/deadly/vhost.py b/bbot/modules/deadly/vhost.py index 66c1c516c..e37be8210 100644 --- a/bbot/modules/deadly/vhost.py +++ b/bbot/modules/deadly/vhost.py @@ -73,7 +73,7 @@ async def handle_event(self, event): async def ffuf_vhost(self, host, basehost, event, wordlist=None, skip_dns_host=False): filters = await self.baseline_ffuf(f"{host}/", exts=[""], suffix=basehost, mode="hostheader") - self.debug(f"Baseline completed and returned these filters:") + self.debug("Baseline completed and returned these filters:") self.debug(filters) if not wordlist: wordlist = self.tempfile @@ -90,7 +90,7 @@ async def ffuf_vhost(self, host, basehost, event, wordlist=None, skip_dns_host=F parent=event, context=f"{{module}} brute-forced virtual hosts for {event.data} and found {{event.type}}: {vhost_str}", ) - if skip_dns_host == False: + if skip_dns_host is False: await self.emit_event( f"{vhost_dict['vhost']}{basehost}", "DNS_NAME", diff --git a/bbot/modules/dnsbimi.py b/bbot/modules/dnsbimi.py index d974b1183..f780b1551 100644 --- a/bbot/modules/dnsbimi.py +++ b/bbot/modules/dnsbimi.py @@ -80,7 +80,7 @@ async def filter_event(self, event): return False, "event is wildcard" # there's no value in inspecting service records - if service_record(event.host) == True: + if service_record(event.host) is True: return False, "service record detected" return True diff --git a/bbot/modules/dnsdumpster.py b/bbot/modules/dnsdumpster.py index ab36b493e..5c0ae2904 100644 --- a/bbot/modules/dnsdumpster.py +++ b/bbot/modules/dnsdumpster.py @@ -31,7 +31,7 @@ async def query(self, domain): html = self.helpers.beautifulsoup(res1.content, "html.parser") if html is False: - self.verbose(f"BeautifulSoup returned False") + self.verbose("BeautifulSoup returned False") return ret csrftoken = None @@ -82,7 +82,7 @@ async def query(self, domain): return ret html = self.helpers.beautifulsoup(res2.content, "html.parser") if html is False: - self.verbose(f"BeautifulSoup returned False") + self.verbose("BeautifulSoup returned False") return ret escaped_domain = re.escape(domain) match_pattern = re.compile(r"^[\w\.-]+\." + escaped_domain + r"$") diff --git a/bbot/modules/generic_ssrf.py b/bbot/modules/generic_ssrf.py index c6bd38544..f486c7d97 100644 --- a/bbot/modules/generic_ssrf.py +++ b/bbot/modules/generic_ssrf.py @@ -163,7 +163,7 @@ async def setup(self): self.severity = None self.generic_only = self.config.get("generic_only", False) - if self.scan.config.get("interactsh_disable", False) == False: + if self.scan.config.get("interactsh_disable", False) is False: try: self.interactsh_instance = self.helpers.interactsh() self.interactsh_domain = await self.interactsh_instance.register(callback=self.interactsh_callback) @@ -216,7 +216,7 @@ async def interactsh_callback(self, r): self.debug("skipping result because subdomain tag was missing") async def cleanup(self): - if self.scan.config.get("interactsh_disable", False) == False: + if self.scan.config.get("interactsh_disable", False) is False: try: await self.interactsh_instance.deregister() self.debug( @@ -226,7 +226,7 @@ async def cleanup(self): self.warning(f"Interactsh failure: {e}") async def finish(self): - if self.scan.config.get("interactsh_disable", False) == False: + if self.scan.config.get("interactsh_disable", False) is False: await self.helpers.sleep(5) try: for r in await self.interactsh_instance.poll(): diff --git a/bbot/modules/github_workflows.py b/bbot/modules/github_workflows.py index 369b33742..9ba34d155 100644 --- a/bbot/modules/github_workflows.py +++ b/bbot/modules/github_workflows.py @@ -166,7 +166,7 @@ async def download_run_logs(self, owner, repo, run_id): main_logs = [] with zipfile.ZipFile(file_destination, "r") as logzip: for name in logzip.namelist(): - if fnmatch.fnmatch(name, "*.txt") and not "/" in name: + if fnmatch.fnmatch(name, "*.txt") and "/" not in name: logzip.extract(name, folder) main_logs.append(folder / name) return main_logs diff --git a/bbot/modules/gowitness.py b/bbot/modules/gowitness.py index 08edfaaf3..375817c1a 100644 --- a/bbot/modules/gowitness.py +++ b/bbot/modules/gowitness.py @@ -264,8 +264,8 @@ async def cur_execute(self, cur, query): async def report(self): if self.screenshots_taken: self.success(f"{len(self.screenshots_taken):,} web screenshots captured. To view:") - self.success(f" - Start gowitness") + self.success(" - Start gowitness") self.success(f" - cd {self.base_path} && ./gowitness server") - self.success(f" - Browse to http://localhost:7171") + self.success(" - Browse to http://localhost:7171") else: - self.info(f"No web screenshots captured") + self.info("No web screenshots captured") diff --git a/bbot/modules/host_header.py b/bbot/modules/host_header.py index 00dd640ba..50a6290cf 100644 --- a/bbot/modules/host_header.py +++ b/bbot/modules/host_header.py @@ -19,7 +19,7 @@ class host_header(BaseModule): async def setup(self): self.subdomain_tags = {} - if self.scan.config.get("interactsh_disable", False) == False: + if self.scan.config.get("interactsh_disable", False) is False: try: self.interactsh_instance = self.helpers.interactsh() self.domain = await self.interactsh_instance.register(callback=self.interactsh_callback) @@ -60,7 +60,7 @@ async def interactsh_callback(self, r): self.debug("skipping results because subdomain tag was missing") async def finish(self): - if self.scan.config.get("interactsh_disable", False) == False: + if self.scan.config.get("interactsh_disable", False) is False: await self.helpers.sleep(5) try: for r in await self.interactsh_instance.poll(): @@ -69,7 +69,7 @@ async def finish(self): self.debug(f"Error in interact.sh: {e}") async def cleanup(self): - if self.scan.config.get("interactsh_disable", False) == False: + if self.scan.config.get("interactsh_disable", False) is False: try: await self.interactsh_instance.deregister() self.debug( @@ -136,7 +136,7 @@ async def handle_event(self, event): split_output = output.split("\n") if " 4" in split_output: - description = f"Duplicate Host Header Tolerated" + description = "Duplicate Host Header Tolerated" await self.emit_event( { "host": str(event.host), diff --git a/bbot/modules/httpx.py b/bbot/modules/httpx.py index 2cd2c0504..059bc2461 100644 --- a/bbot/modules/httpx.py +++ b/bbot/modules/httpx.py @@ -90,7 +90,7 @@ def make_url_metadata(self, event): else: url = str(event.data) url_hash = hash((event.host, event.port, has_spider_max)) - if url_hash == None: + if url_hash is None: url_hash = hash((url, has_spider_max)) return url, url_hash diff --git a/bbot/modules/iis_shortnames.py b/bbot/modules/iis_shortnames.py index 6a173a929..60de15626 100644 --- a/bbot/modules/iis_shortnames.py +++ b/bbot/modules/iis_shortnames.py @@ -160,7 +160,7 @@ async def solve_shortname_recursive( url_hint_list = [] found_results = False - cl = ext_char_list if extension_mode == True else char_list + cl = ext_char_list if extension_mode is True else char_list urls_and_kwargs = [] @@ -209,7 +209,7 @@ async def solve_shortname_recursive( extension_mode, node_count=node_count, ) - if len(prefix) > 0 and found_results == False: + if len(prefix) > 0 and found_results is False: url_hint_list.append(f"{prefix}") self.verbose(f"Found new (possibly partial) URL_HINT: {prefix} from node {target}") return url_hint_list @@ -234,7 +234,7 @@ class safety_counter_obj: {"severity": "LOW", "host": str(event.host), "url": normalized_url, "description": description}, "VULNERABILITY", event, - context=f"{{module}} detected low {{event.type}}: IIS shortname enumeration", + context="{module} detected low {event.type}: IIS shortname enumeration", ) if not self.config.get("detect_only"): for detection in detections: diff --git a/bbot/modules/internal/cloudcheck.py b/bbot/modules/internal/cloudcheck.py index 42c51ec03..fa743360e 100644 --- a/bbot/modules/internal/cloudcheck.py +++ b/bbot/modules/internal/cloudcheck.py @@ -74,7 +74,7 @@ async def handle_event(self, event, **kwargs): if match: matches.append(match.groups()) for match in matches: - if not match in found: + if match not in found: found.add(match) _kwargs = dict(base_kwargs) diff --git a/bbot/modules/internal/dnsresolve.py b/bbot/modules/internal/dnsresolve.py index 9b68b7bb9..3367ce7f9 100644 --- a/bbot/modules/internal/dnsresolve.py +++ b/bbot/modules/internal/dnsresolve.py @@ -131,9 +131,9 @@ async def handle_wildcard_event(self, event): event.host, rdtypes=rdtypes, raw_dns_records=event.raw_dns_records ) for rdtype, (is_wildcard, wildcard_host) in wildcard_rdtypes.items(): - if is_wildcard == False: + if is_wildcard is False: continue - elif is_wildcard == True: + elif is_wildcard is True: event.add_tag("wildcard") wildcard_tag = "wildcard" else: @@ -142,16 +142,16 @@ async def handle_wildcard_event(self, event): event.add_tag(f"{rdtype}-{wildcard_tag}") # wildcard event modification (www.evilcorp.com --> _wildcard.evilcorp.com) - if wildcard_rdtypes and not "target" in event.tags: + if wildcard_rdtypes and "target" not in event.tags: # these are the rdtypes that have wildcards wildcard_rdtypes_set = set(wildcard_rdtypes) # consider the event a full wildcard if all its records are wildcards event_is_wildcard = False if wildcard_rdtypes_set: - event_is_wildcard = all(r[0] == True for r in wildcard_rdtypes.values()) + event_is_wildcard = all(r[0] is True for r in wildcard_rdtypes.values()) if event_is_wildcard: - if event.type in ("DNS_NAME",) and not "_wildcard" in event.data.split("."): + if event.type in ("DNS_NAME",) and "_wildcard" not in event.data.split("."): wildcard_parent = self.helpers.parent_domain(event.host) for rdtype, (_is_wildcard, _parent_domain) in wildcard_rdtypes.items(): if _is_wildcard: @@ -273,7 +273,7 @@ async def resolve_event(self, event, types): # tag event with errors for rdtype, errors in dns_errors.items(): # only consider it an error if there weren't any results for that rdtype - if errors and not rdtype in event.dns_children: + if errors and rdtype not in event.dns_children: event.add_tag(f"{rdtype}-error") def get_dns_parent(self, event): diff --git a/bbot/modules/internal/speculate.py b/bbot/modules/internal/speculate.py index 84e9726bb..958723739 100644 --- a/bbot/modules/internal/speculate.py +++ b/bbot/modules/internal/speculate.py @@ -71,7 +71,7 @@ async def setup(self): self.hugewarning( f"Selected target ({target_len:,} hosts) is too large, skipping IP_RANGE --> IP_ADDRESS speculation" ) - self.hugewarning(f'Enabling the "portscan" module is highly recommended') + self.hugewarning('Enabling the "portscan" module is highly recommended') self.range_to_ip = False return True @@ -126,7 +126,7 @@ async def handle_event(self, event): parent = self.helpers.parent_domain(event.host_original) if parent != event.data: await self.emit_event( - parent, "DNS_NAME", parent=event, context=f"speculated parent {{event.type}}: {{event.data}}" + parent, "DNS_NAME", parent=event, context="speculated parent {event.type}: {event.data}" ) # URL --> OPEN_TCP_PORT diff --git a/bbot/modules/ipneighbor.py b/bbot/modules/ipneighbor.py index 658383258..3bae28a37 100644 --- a/bbot/modules/ipneighbor.py +++ b/bbot/modules/ipneighbor.py @@ -31,7 +31,7 @@ async def handle_event(self, event): netmask = main_ip.max_prefixlen - min(main_ip.max_prefixlen, self.num_bits) network = ipaddress.ip_network(f"{main_ip}/{netmask}", strict=False) subnet_hash = hash(network) - if not subnet_hash in self.processed: + if subnet_hash not in self.processed: self.processed.add(subnet_hash) for ip in network: if ip != main_ip: diff --git a/bbot/modules/jadx.py b/bbot/modules/jadx.py index d065310ae..d081171d1 100644 --- a/bbot/modules/jadx.py +++ b/bbot/modules/jadx.py @@ -43,7 +43,7 @@ async def setup(self): async def filter_event(self, event): if "file" in event.tags: - if not event.data["magic_description"].lower() in self.allowed_file_types: + if event.data["magic_description"].lower() not in self.allowed_file_types: return False, f"Jadx is not able to decompile this file type: {event.data['magic_description']}" else: return False, "Event is not a file" diff --git a/bbot/modules/newsletters.py b/bbot/modules/newsletters.py index 5f2bac729..114f7d66f 100644 --- a/bbot/modules/newsletters.py +++ b/bbot/modules/newsletters.py @@ -46,11 +46,11 @@ async def handle_event(self, event): body = _event.data["body"] soup = self.helpers.beautifulsoup(body, "html.parser") if soup is False: - self.debug(f"BeautifulSoup returned False") + self.debug("BeautifulSoup returned False") return result = self.find_type(soup) if result: - description = f"Found a Newsletter Submission Form that could be used for email bombing attacks" + description = "Found a Newsletter Submission Form that could be used for email bombing attacks" data = {"host": str(_event.host), "description": description, "url": _event.data["url"]} await self.emit_event( data, diff --git a/bbot/modules/output/asset_inventory.py b/bbot/modules/output/asset_inventory.py index a150c029d..aaf97f80b 100644 --- a/bbot/modules/output/asset_inventory.py +++ b/bbot/modules/output/asset_inventory.py @@ -99,7 +99,7 @@ def increment_stat(stat, value): totals[stat] += 1 except KeyError: totals[stat] = 1 - if not stat in stats: + if stat not in stats: stats[stat] = {} try: stats[stat][value] += 1 diff --git a/bbot/modules/output/base.py b/bbot/modules/output/base.py index 0f6e7ac78..16fa4443b 100644 --- a/bbot/modules/output/base.py +++ b/bbot/modules/output/base.py @@ -24,7 +24,7 @@ def _event_precheck(self, event): if event.type in ("FINISHED",): return True, "its type is FINISHED" if self.errored: - return False, f"module is in error state" + return False, "module is in error state" # exclude non-watched types if not any(t in self.get_watched_events() for t in ("*", event.type)): return False, "its type is not in watched_events" diff --git a/bbot/modules/output/stdout.py b/bbot/modules/output/stdout.py index 6e4ccf5be..520a90204 100644 --- a/bbot/modules/output/stdout.py +++ b/bbot/modules/output/stdout.py @@ -20,7 +20,7 @@ class Stdout(BaseOutputModule): async def setup(self): self.text_format = self.config.get("format", "text").strip().lower() - if not self.text_format in self.format_choices: + if self.text_format not in self.format_choices: return ( False, f'Invalid text format choice, "{self.text_format}" (choices: {",".join(self.format_choices)})', @@ -33,7 +33,7 @@ async def setup(self): async def filter_event(self, event): if self.accept_event_types: - if not event.type in self.accept_event_types: + if event.type not in self.accept_event_types: return False, f'Event type "{event.type}" is not in the allowed event_types' return True diff --git a/bbot/modules/portscan.py b/bbot/modules/portscan.py index 674242169..ab3e54c1b 100644 --- a/bbot/modules/portscan.py +++ b/bbot/modules/portscan.py @@ -99,7 +99,7 @@ async def setup(self): return False, "Masscan failed to run" returncode = getattr(ipv6_result, "returncode", 0) if returncode and "failed to detect IPv6 address" in ipv6_result.stderr: - self.warning(f"It looks like you are not set up for IPv6. IPv6 targets will not be scanned.") + self.warning("It looks like you are not set up for IPv6. IPv6 targets will not be scanned.") self.ipv6_support = False return True @@ -334,7 +334,7 @@ def log_masscan_status(self, s): if "FAIL" in s: self.warning(s) self.warning( - f'Masscan failed to detect interface. Recommend passing "adapter_ip", "adapter_mac", and "router_mac" config options to portscan module.' + 'Masscan failed to detect interface. Recommend passing "adapter_ip", "adapter_mac", and "router_mac" config options to portscan module.' ) else: self.verbose(s) diff --git a/bbot/modules/report/asn.py b/bbot/modules/report/asn.py index ba5e1e39a..b3a80fc58 100644 --- a/bbot/modules/report/asn.py +++ b/bbot/modules/report/asn.py @@ -38,7 +38,7 @@ async def filter_event(self, event): async def handle_event(self, event): host = event.host - if self.cache_get(host) == False: + if self.cache_get(host) is False: asns, source = await self.get_asn(host) if not asns: self.cache_put(self.unknown_asn) @@ -96,7 +96,7 @@ def cache_get(self, ip): for p in self.helpers.ip_network_parents(ip): try: self.asn_counts[p] += 1 - if ret == False: + if ret is False: ret = p except KeyError: continue @@ -112,7 +112,7 @@ async def get_asn(self, ip, retries=1): for i, source in enumerate(list(self.sources)): get_asn_fn = getattr(self, f"get_asn_{source}") res = await get_asn_fn(ip) - if res == False: + if res is False: # demote the current source to lowest priority since it just failed self.sources.append(self.sources.pop(i)) self.verbose(f"Failed to contact {source}, retrying") @@ -125,7 +125,7 @@ async def get_asn_ripe(self, ip): url = f"https://stat.ripe.net/data/network-info/data.json?resource={ip}" response = await self.get_url(url, "ASN") asns = [] - if response == False: + if response is False: return False data = response.get("data", {}) if not data: @@ -138,7 +138,7 @@ async def get_asn_ripe(self, ip): asn_numbers = [] for number in asn_numbers: asn = await self.get_asn_metadata_ripe(number) - if asn == False: + if asn is False: return False asn["subnet"] = prefix asns.append(asn) @@ -155,7 +155,7 @@ async def get_asn_metadata_ripe(self, asn_number): } url = f"https://stat.ripe.net/data/whois/data.json?resource={asn_number}" response = await self.get_url(url, "ASN Metadata", cache=True) - if response == False: + if response is False: return False data = response.get("data", {}) if not data: @@ -187,7 +187,7 @@ async def get_asn_bgpview(self, ip): data = await self.get_url(url, "ASN") asns = [] asns_tried = set() - if data == False: + if data is False: return False data = data.get("data", {}) prefixes = data.get("prefixes", []) @@ -201,9 +201,9 @@ async def get_asn_bgpview(self, ip): description = details.get("description") or prefix.get("description") or "" country = details.get("country_code") or prefix.get("country_code") or "" emails = [] - if not asn in asns_tried: + if asn not in asns_tried: emails = await self.get_emails_bgpview(asn) - if emails == False: + if emails is False: return False asns_tried.add(asn) asns.append( @@ -217,7 +217,7 @@ async def get_emails_bgpview(self, asn): contacts = [] url = f"https://api.bgpview.io/asn/{asn}" data = await self.get_url(url, "ASN metadata", cache=True) - if data == False: + if data is False: return False data = data.get("data", {}) if not data: diff --git a/bbot/modules/robots.py b/bbot/modules/robots.py index fc7692051..e41b3119f 100644 --- a/bbot/modules/robots.py +++ b/bbot/modules/robots.py @@ -33,14 +33,14 @@ async def handle_event(self, event): for l in lines: if len(l) > 0: split_l = l.split(": ") - if (split_l[0].lower() == "allow" and self.config.get("include_allow") == True) or ( - split_l[0].lower() == "disallow" and self.config.get("include_disallow") == True + if (split_l[0].lower() == "allow" and self.config.get("include_allow") is True) or ( + split_l[0].lower() == "disallow" and self.config.get("include_disallow") is True ): unverified_url = f"{host}{split_l[1].lstrip('/')}".replace( "*", self.helpers.rand_string(4) ) - elif split_l[0].lower() == "sitemap" and self.config.get("include_sitemap") == True: + elif split_l[0].lower() == "sitemap" and self.config.get("include_sitemap") is True: unverified_url = split_l[1] else: continue diff --git a/bbot/modules/securitytxt.py b/bbot/modules/securitytxt.py index 681519e37..880865c9b 100644 --- a/bbot/modules/securitytxt.py +++ b/bbot/modules/securitytxt.py @@ -121,7 +121,7 @@ async def handle_event(self, event): start, end = match.span() found_url = v[start:end] - if found_url != url and self._urls == True: + if found_url != url and self._urls is True: await self.emit_event(found_url, "URL_UNVERIFIED", parent=event, tags=tags) diff --git a/bbot/modules/sitedossier.py b/bbot/modules/sitedossier.py index fe9015027..659f15960 100644 --- a/bbot/modules/sitedossier.py +++ b/bbot/modules/sitedossier.py @@ -52,5 +52,5 @@ async def query(self, query, parse_fn=None, request_fn=None): results.add(hostname) yield hostname if ' 1 # list modules monkeypatch.setattr("sys.argv", ["bbot", "--list-modules"]) result = await cli._main() - assert result == None + assert result is None out, err = capsys.readouterr() # internal modules assert "| excavate " in out @@ -162,7 +162,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert not output_dir.exists() monkeypatch.setattr("sys.argv", ["bbot", "-o", str(output_dir), "-n", scan_name, "-y"]) result = await cli._main() - assert result == True + assert result is True assert output_dir.is_dir() assert scan_dir.is_dir() assert "[SCAN]" in open(scan_dir / "output.txt").read() @@ -173,7 +173,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): monkeypatch.setattr("sys.argv", ["bbot", "--list-module-options"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| modules.wayback.urls" in out assert "| bool" in out assert "| emit URLs in addition to DNS_NAMEs" in out @@ -185,36 +185,36 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "--list-module-options"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| modules.wayback.urls" in out assert "| bool" in out assert "| emit URLs in addition to DNS_NAMEs" in out assert "| False" in out assert "| modules.dnsbrute.wordlist" in out - assert not "| modules.robots.include_allow" in out + assert "| modules.robots.include_allow" not in out # list module options by module monkeypatch.setattr("sys.argv", ["bbot", "-m", "dnsbrute", "-lmo"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert out.count("modules.") == out.count("modules.dnsbrute.") - assert not "| modules.wayback.urls" in out + assert "| modules.wayback.urls" not in out assert "| modules.dnsbrute.wordlist" in out - assert not "| modules.robots.include_allow" in out + assert "| modules.robots.include_allow" not in out # list output module options by module monkeypatch.setattr("sys.argv", ["bbot", "-om", "stdout", "-lmo"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert out.count("modules.") == out.count("modules.stdout.") # list flags monkeypatch.setattr("sys.argv", ["bbot", "--list-flags"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| safe " in out assert "| Non-intrusive, safe to run " in out assert "| active " in out @@ -224,32 +224,32 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): monkeypatch.setattr("sys.argv", ["bbot", "-f", "active", "--list-flags"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None - assert not "| safe " in out + assert result is None + assert "| safe " not in out assert "| active " in out - assert not "| passive " in out + assert "| passive " not in out # list multiple flags monkeypatch.setattr("sys.argv", ["bbot", "-f", "active", "safe", "--list-flags"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| safe " in out assert "| active " in out - assert not "| passive " in out + assert "| passive " not in out # no args monkeypatch.setattr("sys.argv", ["bbot"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "Target:\n -t TARGET [TARGET ...]" in out # list modules monkeypatch.setattr("sys.argv", ["bbot", "-l"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| dnsbrute " in out assert "| httpx " in out assert "| robots " in out @@ -258,33 +258,33 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-l"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| dnsbrute " in out assert "| httpx " in out - assert not "| robots " in out + assert "| robots " not in out # list modules by flag + required flag monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-rf", "passive", "-l"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| chaos " in out - assert not "| httpx " in out + assert "| httpx " not in out # list modules by flag + excluded flag monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-ef", "active", "-l"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None + assert result is None assert "| chaos " in out - assert not "| httpx " in out + assert "| httpx " not in out # list modules by flag + excluded module monkeypatch.setattr("sys.argv", ["bbot", "-f", "subdomain-enum", "-em", "dnsbrute", "-l"]) result = await cli._main() out, err = capsys.readouterr() - assert result == None - assert not "| dnsbrute " in out + assert result is None + assert "| dnsbrute " not in out assert "| httpx " in out # output modules override @@ -292,12 +292,12 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert not caplog.text monkeypatch.setattr("sys.argv", ["bbot", "-om", "csv,json", "-y"]) result = await cli._main() - assert result == True + assert result is True assert "Loaded 2/2 output modules, (csv,json)" in caplog.text caplog.clear() monkeypatch.setattr("sys.argv", ["bbot", "-em", "csv,json", "-y"]) result = await cli._main() - assert result == True + assert result is True assert "Loaded 3/3 output modules, (python,stdout,txt)" in caplog.text # output modules override @@ -305,7 +305,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert not caplog.text monkeypatch.setattr("sys.argv", ["bbot", "-om", "subdomains", "-y"]) result = await cli._main() - assert result == True + assert result is True assert "Loaded 6/6 output modules, (csv,json,python,stdout,subdomains,txt)" in caplog.text # internal modules override @@ -313,17 +313,17 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert not caplog.text monkeypatch.setattr("sys.argv", ["bbot", "-y"]) result = await cli._main() - assert result == True + assert result is True assert "Loaded 5/5 internal modules (aggregate,cloudcheck,dnsresolve,excavate,speculate)" in caplog.text caplog.clear() monkeypatch.setattr("sys.argv", ["bbot", "-em", "excavate", "speculate", "-y"]) result = await cli._main() - assert result == True + assert result is True assert "Loaded 3/3 internal modules (aggregate,cloudcheck,dnsresolve)" in caplog.text caplog.clear() monkeypatch.setattr("sys.argv", ["bbot", "-c", "speculate=false", "-y"]) result = await cli._main() - assert result == True + assert result is True assert "Loaded 4/4 internal modules (aggregate,cloudcheck,dnsresolve,excavate)" in caplog.text # custom target type @@ -331,7 +331,7 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): monkeypatch.setattr("sys.argv", ["bbot", "-t", "ORG:evilcorp", "-y"]) result = await cli._main() out, err = capsys.readouterr() - assert result == True + assert result is True assert "[ORG_STUB] evilcorp TARGET" in out # activate modules by flag @@ -339,50 +339,50 @@ async def test_cli_args(monkeypatch, caplog, capsys, clean_default_config): assert not caplog.text monkeypatch.setattr("sys.argv", ["bbot", "-f", "passive"]) result = await cli._main() - assert result == True + assert result is True # unconsoleable output module monkeypatch.setattr("sys.argv", ["bbot", "-om", "web_report"]) result = await cli._main() - assert result == True + assert result is True # unresolved dependency monkeypatch.setattr("sys.argv", ["bbot", "-m", "wappalyzer"]) result = await cli._main() - assert result == True + assert result is True # require flags monkeypatch.setattr("sys.argv", ["bbot", "-f", "active", "-rf", "passive"]) result = await cli._main() - assert result == True + assert result is True # excluded flags monkeypatch.setattr("sys.argv", ["bbot", "-f", "active", "-ef", "active"]) result = await cli._main() - assert result == True + assert result is True # slow modules monkeypatch.setattr("sys.argv", ["bbot", "-m", "bucket_digitalocean"]) result = await cli._main() - assert result == True + assert result is True # deadly modules caplog.clear() assert not caplog.text monkeypatch.setattr("sys.argv", ["bbot", "-m", "nuclei"]) result = await cli._main() - assert result == False, "-m nuclei ran without --allow-deadly" + assert result is False, "-m nuclei ran without --allow-deadly" assert "Please specify --allow-deadly to continue" in caplog.text # --allow-deadly monkeypatch.setattr("sys.argv", ["bbot", "-m", "nuclei", "--allow-deadly"]) result = await cli._main() - assert result == True, "-m nuclei failed to run with --allow-deadly" + assert result is True, "-m nuclei failed to run with --allow-deadly" # install all deps monkeypatch.setattr("sys.argv", ["bbot", "--install-all-deps"]) success = await cli._main() - assert success == True, "--install-all-deps failed for at least one module" + assert success is True, "--install-all-deps failed for at least one module" @pytest.mark.asyncio @@ -396,7 +396,7 @@ async def test_cli_customheaders(monkeypatch, caplog, capsys): "sys.argv", ["bbot", "--custom-headers", "foo=bar", "foo2=bar2", "foo3=bar=3", "--current-preset"] ) success = await cli._main() - assert success == None, "setting custom headers on command line failed" + assert success is None, "setting custom headers on command line failed" captured = capsys.readouterr() stdout_preset = yaml.safe_load(captured.out) assert stdout_preset["config"]["web"]["http_headers"] == {"foo": "bar", "foo2": "bar2", "foo3": "bar=3"} @@ -404,21 +404,21 @@ async def test_cli_customheaders(monkeypatch, caplog, capsys): # test custom headers invalid (no "=") monkeypatch.setattr("sys.argv", ["bbot", "--custom-headers", "justastring", "--current-preset"]) result = await cli._main() - assert result == None + assert result is None assert "Custom headers not formatted correctly (missing '=')" in caplog.text caplog.clear() # test custom headers invalid (missing key) monkeypatch.setattr("sys.argv", ["bbot", "--custom-headers", "=nokey", "--current-preset"]) result = await cli._main() - assert result == None + assert result is None assert "Custom headers not formatted correctly (missing header name or value)" in caplog.text caplog.clear() # test custom headers invalid (missing value) monkeypatch.setattr("sys.argv", ["bbot", "--custom-headers", "missingvalue=", "--current-preset"]) result = await cli._main() - assert result == None + assert result is None assert "Custom headers not formatted correctly (missing header name or value)" in caplog.text diff --git a/bbot/test/test_step_1/test_dns.py b/bbot/test/test_step_1/test_dns.py index d0bfb6833..cd75b5fff 100644 --- a/bbot/test/test_step_1/test_dns.py +++ b/bbot/test/test_step_1/test_dns.py @@ -23,7 +23,7 @@ async def test_dns_engine(bbot_scanner): ) result = await scan.helpers.resolve("one.one.one.one") assert "1.1.1.1" in result - assert not "2606:4700:4700::1111" in result + assert "2606:4700:4700::1111" not in result results = [_ async for _ in scan.helpers.resolve_batch(("one.one.one.one", "1.1.1.1"))] pass_1 = False @@ -85,12 +85,12 @@ async def test_dns_resolution(bbot_scanner): for answer in answers: responses += list(extract_targets(answer)) assert ("A", "1.1.1.1") in responses - assert not ("AAAA", "2606:4700:4700::1111") in responses + assert ("AAAA", "2606:4700:4700::1111") not in responses answers, errors = await dnsengine.resolve_raw("one.one.one.one", type="AAAA") responses = [] for answer in answers: responses += list(extract_targets(answer)) - assert not ("A", "1.1.1.1") in responses + assert ("A", "1.1.1.1") not in responses assert ("AAAA", "2606:4700:4700::1111") in responses answers, errors = await dnsengine.resolve_raw("1.1.1.1") responses = [] @@ -141,11 +141,11 @@ async def test_dns_resolution(bbot_scanner): assert hash(("1.1.1.1", "PTR")) in dnsengine._dns_cache await dnsengine.resolve("one.one.one.one", type="A") assert hash(("one.one.one.one", "A")) in dnsengine._dns_cache - assert not hash(("one.one.one.one", "AAAA")) in dnsengine._dns_cache + assert hash(("one.one.one.one", "AAAA")) not in dnsengine._dns_cache dnsengine._dns_cache.clear() await dnsengine.resolve("one.one.one.one", type="AAAA") assert hash(("one.one.one.one", "AAAA")) in dnsengine._dns_cache - assert not hash(("one.one.one.one", "A")) in dnsengine._dns_cache + assert hash(("one.one.one.one", "A")) not in dnsengine._dns_cache await dnsengine._shutdown() @@ -165,7 +165,7 @@ async def test_dns_resolution(bbot_scanner): assert "A" in resolved_hosts_event1.raw_dns_records assert "AAAA" in resolved_hosts_event1.raw_dns_records assert "a-record" in resolved_hosts_event1.tags - assert not "a-record" in resolved_hosts_event2.tags + assert "a-record" not in resolved_hosts_event2.tags scan2 = bbot_scanner("evilcorp.com", config={"dns": {"minimal": False}}) await scan2.helpers.dns._mock_dns( @@ -198,7 +198,7 @@ async def test_wildcards(bbot_scanner): assert len(dnsengine._wildcard_cache) == len(all_rdtypes) + (len(all_rdtypes) - 2) for rdtype in all_rdtypes: assert hash(("github.io", rdtype)) in dnsengine._wildcard_cache - if not rdtype in ("A", "AAAA"): + if rdtype not in ("A", "AAAA"): assert hash(("asdf.github.io", rdtype)) in dnsengine._wildcard_cache assert "github.io" in wildcard_domains assert "A" in wildcard_domains["github.io"] @@ -781,16 +781,16 @@ async def test_dns_graph_structure(bbot_scanner): @pytest.mark.asyncio async def test_dns_helpers(bbot_scanner): - assert service_record("") == False - assert service_record("localhost") == False - assert service_record("www.example.com") == False - assert service_record("www.example.com", "SRV") == True - assert service_record("_custom._service.example.com", "SRV") == True - assert service_record("_custom._service.example.com", "A") == False + assert service_record("") is False + assert service_record("localhost") is False + assert service_record("www.example.com") is False + assert service_record("www.example.com", "SRV") is True + assert service_record("_custom._service.example.com", "SRV") is True + assert service_record("_custom._service.example.com", "A") is False # top 100 most common SRV records for srv_record in common_srvs[:100]: hostname = f"{srv_record}.example.com" - assert service_record(hostname) == True + assert service_record(hostname) is True # make sure system nameservers are excluded from use by DNS brute force brute_nameservers = tempwordlist(["1.2.3.4", "8.8.4.4", "4.3.2.1", "8.8.8.8"]) diff --git a/bbot/test/test_step_1/test_engine.py b/bbot/test/test_step_1/test_engine.py index 1b44049c9..dbb21246f 100644 --- a/bbot/test/test_step_1/test_engine.py +++ b/bbot/test/test_step_1/test_engine.py @@ -72,7 +72,7 @@ async def yield_stuff(self, n): # test async generator assert counter == 0 - assert yield_cancelled == False + assert yield_cancelled is False yield_res = [r async for r in test_engine.yield_stuff(13)] assert yield_res == [f"thing{i}" for i in range(13)] assert len(yield_res) == 13 @@ -88,8 +88,8 @@ async def yield_stuff(self, n): await agen.aclose() break await asyncio.sleep(5) - assert yield_cancelled == True - assert yield_errored == False + assert yield_cancelled is True + assert yield_errored is False assert counter < 15 # test async generator with error @@ -99,8 +99,8 @@ async def yield_stuff(self, n): with pytest.raises(BBOTEngineError): async for _ in agen: pass - assert yield_cancelled == False - assert yield_errored == True + assert yield_cancelled is False + assert yield_errored is True # test return with cancellation return_started = False @@ -113,10 +113,10 @@ async def yield_stuff(self, n): with pytest.raises(asyncio.CancelledError): await task await asyncio.sleep(0.1) - assert return_started == True - assert return_finished == False - assert return_cancelled == True - assert return_errored == False + assert return_started is True + assert return_finished is False + assert return_cancelled is True + assert return_errored is False # test return with late cancellation return_started = False @@ -128,10 +128,10 @@ async def yield_stuff(self, n): task.cancel() result = await task assert result == "thing1" - assert return_started == True - assert return_finished == True - assert return_cancelled == False - assert return_errored == False + assert return_started is True + assert return_finished is True + assert return_cancelled is False + assert return_errored is False # test return with error return_started = False @@ -140,9 +140,9 @@ async def yield_stuff(self, n): return_errored = False with pytest.raises(BBOTEngineError): result = await test_engine.return_thing(None) - assert return_started == True - assert return_finished == False - assert return_cancelled == False - assert return_errored == True + assert return_started is True + assert return_finished is False + assert return_cancelled is False + assert return_errored is True await test_engine.shutdown() diff --git a/bbot/test/test_step_1/test_events.py b/bbot/test/test_step_1/test_events.py index 8156fc796..6add85ccf 100644 --- a/bbot/test/test_step_1/test_events.py +++ b/bbot/test/test_step_1/test_events.py @@ -81,8 +81,8 @@ async def test_events(events, helpers): assert "fsocie.ty" not in events.subdomain assert events.subdomain in events.domain assert events.domain not in events.subdomain - assert not events.ipv4 in events.domain - assert not events.netv6 in events.domain + assert events.ipv4 not in events.domain + assert events.netv6 not in events.domain assert events.emoji not in events.domain assert events.domain not in events.emoji open_port_event = scan.make_event(" eViLcorp.COM.:88", "DNS_NAME", dummy=True) @@ -207,7 +207,7 @@ async def test_events(events, helpers): # scope distance event1 = scan.make_event("1.2.3.4", dummy=True) - assert event1._scope_distance == None + assert event1._scope_distance is None event1.scope_distance = 0 assert event1._scope_distance == 0 event2 = scan.make_event("2.3.4.5", parent=event1) @@ -228,7 +228,7 @@ async def test_events(events, helpers): org_stub_1 = scan.make_event("STUB1", "ORG_STUB", parent=scan.root_event) org_stub_1.scope_distance == 1 - assert org_stub_1.netloc == None + assert org_stub_1.netloc is None assert "netloc" not in org_stub_1.json() org_stub_2 = scan.make_event("STUB2", "ORG_STUB", parent=org_stub_1) org_stub_2.scope_distance == 2 @@ -237,7 +237,7 @@ async def test_events(events, helpers): root_event = scan.make_event("0.0.0.0", dummy=True) root_event.scope_distance = 0 internal_event1 = scan.make_event("1.2.3.4", parent=root_event, internal=True) - assert internal_event1._internal == True + assert internal_event1._internal is True assert "internal" in internal_event1.tags # tag inheritance @@ -269,8 +269,8 @@ async def test_events(events, helpers): # updating module event3 = scan.make_event("127.0.0.1", parent=scan.root_event) updated_event = scan.make_event(event3, internal=True) - assert event3.internal == False - assert updated_event.internal == True + assert event3.internal is False + assert updated_event.internal is True # event sorting parent1 = scan.make_event("127.0.0.1", parent=scan.root_event) @@ -527,7 +527,7 @@ async def test_events(events, helpers): hostless_event_json = hostless_event.json() assert hostless_event_json["type"] == "ASDF" assert hostless_event_json["data"] == "asdf" - assert not "host" in hostless_event_json + assert "host" not in hostless_event_json # SIEM-friendly serialize/deserialize json_event_siemfriendly = db_event.json(siem_friendly=True) @@ -805,7 +805,7 @@ async def test_event_web_spider_distance(bbot_scanner): ) assert url_event_3.web_spider_distance == 1 assert "spider-danger" in url_event_3.tags - assert not "spider-max" in url_event_3.tags + assert "spider-max" not in url_event_3.tags social_event = scan.make_event( {"platform": "github", "url": "http://www.evilcorp.com/test4"}, "SOCIAL", parent=url_event_3 ) @@ -828,42 +828,42 @@ async def test_event_web_spider_distance(bbot_scanner): url_event = scan.make_event("http://www.evilcorp.com", "URL_UNVERIFIED", parent=scan.root_event) assert url_event.web_spider_distance == 0 - assert not "spider-danger" in url_event.tags - assert not "spider-max" in url_event.tags + assert "spider-danger" not in url_event.tags + assert "spider-max" not in url_event.tags url_event_2 = scan.make_event( "http://www.evilcorp.com", "URL_UNVERIFIED", parent=scan.root_event, tags="spider-danger" ) # spider distance shouldn't increment because it's not the same host assert url_event_2.web_spider_distance == 0 assert "spider-danger" in url_event_2.tags - assert not "spider-max" in url_event_2.tags + assert "spider-max" not in url_event_2.tags url_event_3 = scan.make_event( "http://www.evilcorp.com/3", "URL_UNVERIFIED", parent=url_event_2, tags="spider-danger" ) assert url_event_3.web_spider_distance == 1 assert "spider-danger" in url_event_3.tags - assert not "spider-max" in url_event_3.tags + assert "spider-max" not in url_event_3.tags url_event_4 = scan.make_event("http://evilcorp.com", "URL_UNVERIFIED", parent=url_event_3) assert url_event_4.web_spider_distance == 0 - assert not "spider-danger" in url_event_4.tags - assert not "spider-max" in url_event_4.tags + assert "spider-danger" not in url_event_4.tags + assert "spider-max" not in url_event_4.tags url_event_4.add_tag("spider-danger") assert url_event_4.web_spider_distance == 0 assert "spider-danger" in url_event_4.tags - assert not "spider-max" in url_event_4.tags + assert "spider-max" not in url_event_4.tags url_event_4.remove_tag("spider-danger") assert url_event_4.web_spider_distance == 0 - assert not "spider-danger" in url_event_4.tags - assert not "spider-max" in url_event_4.tags + assert "spider-danger" not in url_event_4.tags + assert "spider-max" not in url_event_4.tags url_event_5 = scan.make_event("http://evilcorp.com/5", "URL_UNVERIFIED", parent=url_event_4) assert url_event_5.web_spider_distance == 0 - assert not "spider-danger" in url_event_5.tags - assert not "spider-max" in url_event_5.tags + assert "spider-danger" not in url_event_5.tags + assert "spider-max" not in url_event_5.tags url_event_5.add_tag("spider-danger") # if host is the same as parent, web spider distance should auto-increment after adding spider-danger tag assert url_event_5.web_spider_distance == 1 assert "spider-danger" in url_event_5.tags - assert not "spider-max" in url_event_5.tags + assert "spider-max" not in url_event_5.tags def test_event_confidence(): diff --git a/bbot/test/test_step_1/test_helpers.py b/bbot/test/test_step_1/test_helpers.py index 76cf63517..a115e6f26 100644 --- a/bbot/test/test_step_1/test_helpers.py +++ b/bbot/test/test_step_1/test_helpers.py @@ -64,8 +64,8 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): assert not helpers.is_subdomain("notreal") assert helpers.is_url("http://evilcorp.co.uk/asdf?a=b&c=d#asdf") assert helpers.is_url("https://evilcorp.co.uk/asdf?a=b&c=d#asdf") - assert helpers.is_uri("ftp://evilcorp.co.uk") == True - assert helpers.is_uri("http://evilcorp.co.uk") == True + assert helpers.is_uri("ftp://evilcorp.co.uk") is True + assert helpers.is_uri("http://evilcorp.co.uk") is True assert helpers.is_uri("evilcorp.co.uk", return_scheme=True) == "" assert helpers.is_uri("ftp://evilcorp.co.uk", return_scheme=True) == "ftp" assert helpers.is_uri("FTP://evilcorp.co.uk", return_scheme=True) == "ftp" @@ -283,7 +283,7 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): replaced = helpers.search_format_dict( {"asdf": [{"wat": {"here": "#{replaceme}!"}}, {500: True}]}, replaceme="asdf" ) - assert replaced["asdf"][1][500] == True + assert replaced["asdf"][1][500] is True assert replaced["asdf"][0]["wat"]["here"] == "asdf!" filtered_dict = helpers.filter_dict( @@ -315,7 +315,7 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): fuzzy=True, exclude_keys="modules", ) - assert not "secrets_db" in filtered_dict4["modules"] + assert "secrets_db" not in filtered_dict4["modules"] assert "ipneighbor" in filtered_dict4["modules"] assert "secret" in filtered_dict4["modules"]["ipneighbor"] assert "asdf" not in filtered_dict4["modules"]["ipneighbor"] @@ -408,15 +408,15 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): assert helpers.validators.validate_host("LOCALHOST ") == "localhost" assert helpers.validators.validate_host(" 192.168.1.1") == "192.168.1.1" assert helpers.validators.validate_host(" Dead::c0dE ") == "dead::c0de" - assert helpers.validators.soft_validate(" evilCorp.COM", "host") == True - assert helpers.validators.soft_validate("!@#$", "host") == False + assert helpers.validators.soft_validate(" evilCorp.COM", "host") is True + assert helpers.validators.soft_validate("!@#$", "host") is False with pytest.raises(ValueError): assert helpers.validators.validate_host("!@#$") # ports assert helpers.validators.validate_port(666) == 666 assert helpers.validators.validate_port(666666) == 65535 - assert helpers.validators.soft_validate(666, "port") == True - assert helpers.validators.soft_validate("!@#$", "port") == False + assert helpers.validators.soft_validate(666, "port") is True + assert helpers.validators.soft_validate("!@#$", "port") is False with pytest.raises(ValueError): helpers.validators.validate_port("asdf") # top tcp ports @@ -437,20 +437,20 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): helpers.validators.validate_url_parsed(" httP://evilcorP.com/asdf?a=b&c=d#e").geturl() == "http://evilcorp.com/asdf" ) - assert helpers.validators.soft_validate(" httP://evilcorP.com/asdf?a=b&c=d#e", "url") == True - assert helpers.validators.soft_validate("!@#$", "url") == False + assert helpers.validators.soft_validate(" httP://evilcorP.com/asdf?a=b&c=d#e", "url") is True + assert helpers.validators.soft_validate("!@#$", "url") is False with pytest.raises(ValueError): helpers.validators.validate_url("!@#$") # severities assert helpers.validators.validate_severity(" iNfo") == "INFO" - assert helpers.validators.soft_validate(" iNfo", "severity") == True - assert helpers.validators.soft_validate("NOPE", "severity") == False + assert helpers.validators.soft_validate(" iNfo", "severity") is True + assert helpers.validators.soft_validate("NOPE", "severity") is False with pytest.raises(ValueError): helpers.validators.validate_severity("NOPE") # emails assert helpers.validators.validate_email(" bOb@eViLcorp.COM") == "bob@evilcorp.com" - assert helpers.validators.soft_validate(" bOb@eViLcorp.COM", "email") == True - assert helpers.validators.soft_validate("!@#$", "email") == False + assert helpers.validators.soft_validate(" bOb@eViLcorp.COM", "email") is True + assert helpers.validators.soft_validate("!@#$", "email") is False with pytest.raises(ValueError): helpers.validators.validate_email("!@#$") @@ -533,9 +533,9 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): truncated_filename.unlink() # misc DNS helpers - assert helpers.is_ptr("wsc-11-22-33-44-wat.evilcorp.com") == True - assert helpers.is_ptr("wsc-11-22-33-wat.evilcorp.com") == False - assert helpers.is_ptr("11wat.evilcorp.com") == False + assert helpers.is_ptr("wsc-11-22-33-44-wat.evilcorp.com") is True + assert helpers.is_ptr("wsc-11-22-33-wat.evilcorp.com") is False + assert helpers.is_ptr("11wat.evilcorp.com") is False ## NTLM testheader = "TlRMTVNTUAACAAAAHgAeADgAAAAVgorilwL+bvnVipUAAAAAAAAAAJgAmABWAAAACgBjRQAAAA9XAEkATgAtAFMANAAyAE4ATwBCAEQAVgBUAEsAOAACAB4AVwBJAE4ALQBTADQAMgBOAE8AQgBEAFYAVABLADgAAQAeAFcASQBOAC0AUwA0ADIATgBPAEIARABWAFQASwA4AAQAHgBXAEkATgAtAFMANAAyAE4ATwBCAEQAVgBUAEsAOAADAB4AVwBJAE4ALQBTADQAMgBOAE8AQgBEAFYAVABLADgABwAIAHUwOZlfoNgBAAAAAA==" @@ -613,8 +613,8 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): assert len(helpers.get_exception_chain(e)) == 2 assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, KeyboardInterrupt)]) == 1 assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, ValueError)]) == 1 - assert helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)) == True - assert helpers.in_exception_chain(e, (TypeError, OSError)) == False + assert helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)) is True + assert helpers.in_exception_chain(e, (TypeError, OSError)) is False test_ran = True assert test_ran test_ran = False @@ -627,9 +627,9 @@ async def test_helpers_misc(helpers, scan, bbot_scanner, bbot_httpserver): assert len(helpers.get_exception_chain(e)) == 2 assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, AttributeError)]) == 1 assert len([_ for _ in helpers.get_exception_chain(e) if isinstance(_, ValueError)]) == 1 - assert helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)) == False - assert helpers.in_exception_chain(e, (KeyboardInterrupt, AttributeError)) == True - assert helpers.in_exception_chain(e, (AttributeError,)) == True + assert helpers.in_exception_chain(e, (KeyboardInterrupt, asyncio.CancelledError)) is False + assert helpers.in_exception_chain(e, (KeyboardInterrupt, AttributeError)) is True + assert helpers.in_exception_chain(e, (AttributeError,)) is True test_ran = True assert test_ran @@ -886,7 +886,7 @@ async def test_parameter_validation(helpers): if helpers.validate_parameter(p, "getparam"): assert p in getparam_valid_params and p not in getparam_invalid_params else: - assert p in getparam_invalid_params and not p in getparam_valid_params + assert p in getparam_invalid_params and p not in getparam_valid_params header_valid_params = { "name", @@ -917,7 +917,7 @@ async def test_parameter_validation(helpers): if helpers.validate_parameter(p, "header"): assert p in header_valid_params and p not in header_invalid_params else: - assert p in header_invalid_params and not p in header_valid_params + assert p in header_invalid_params and p not in header_valid_params cookie_valid_params = { "name", @@ -947,4 +947,4 @@ async def test_parameter_validation(helpers): if helpers.validate_parameter(p, "cookie"): assert p in cookie_valid_params and p not in cookie_invalid_params else: - assert p in cookie_invalid_params and not p in cookie_valid_params + assert p in cookie_invalid_params and p not in cookie_valid_params diff --git a/bbot/test/test_step_1/test_manager_scope_accuracy.py b/bbot/test/test_step_1/test_manager_scope_accuracy.py index 62a03c0ef..0fb8bb6b2 100644 --- a/bbot/test/test_step_1/test_manager_scope_accuracy.py +++ b/bbot/test/test_step_1/test_manager_scope_accuracy.py @@ -140,7 +140,7 @@ async def do_scan(*args, _config={}, _dns_mock={}, scan_callback=None, **kwargs) ) assert len(events) == 3 - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66"]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notrealzies"]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.test.notreal"]) @@ -148,14 +148,14 @@ async def do_scan(*args, _config={}, _dns_mock={}, scan_callback=None, **kwargs) for _all_events in (all_events, all_events_nodups): assert len(_all_events) == 3 - assert 1 == len([e for e in _all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == True and e.scope_distance == 1]) + assert 1 == len([e for e in _all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is True and e.scope_distance == 1]) assert 0 == len([e for e in _all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies"]) assert 0 == len([e for e in _all_events if e.type == "DNS_NAME" and e.data == "www.test.notreal"]) assert 0 == len([e for e in _all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77"]) assert len(graph_output_events) == 3 - assert 1 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66"]) assert 0 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies"]) assert 0 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "www.test.notreal"]) @@ -169,38 +169,38 @@ async def do_scan(*args, _config={}, _dns_mock={}, scan_callback=None, **kwargs) ) assert len(events) == 4 - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66"]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notrealzies"]) - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77"]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test2.notrealzies"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert len(all_events) == 9 - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == True and e.scope_distance == 1]) - assert 2 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is True and e.scope_distance == 1]) + assert 2 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal is True and e.scope_distance == 2]) assert 0 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert len(all_events_nodups) == 7 - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal is True and e.scope_distance == 2]) assert 0 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 6 - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77"]) assert 0 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test2.notrealzies"]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) @@ -213,39 +213,39 @@ async def do_scan(*args, _config={}, _dns_mock={}, scan_callback=None, **kwargs) ) assert len(events) == 7 - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test2.notrealzies"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert len(all_events) == 8 - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 2 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 2 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal is True and e.scope_distance == 2]) assert 0 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert len(all_events_nodups) == 7 - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test2.notrealzies" and e.internal is True and e.scope_distance == 2]) assert 0 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 7 - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "www.test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test2.notrealzies"]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) @@ -284,33 +284,33 @@ def custom_setup(scan): ) assert len(events) == 5 - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notrealzies"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77"]) - assert 1 == len([e for e in events if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal == False and e.scope_distance == 3]) + assert 1 == len([e for e in events if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal is False and e.scope_distance == 3]) assert len(all_events) == 8 - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 2 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == True and e.scope_distance == 2]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == True and e.scope_distance == 3]) - assert 1 == len([e for e in all_events if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal == False and e.scope_distance == 3]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 2 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is True and e.scope_distance == 2]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is True and e.scope_distance == 3]) + assert 1 == len([e for e in all_events if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal is False and e.scope_distance == 3]) assert len(all_events_nodups) == 6 - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == True and e.scope_distance == 3]) - assert 1 == len([e for e in all_events_nodups if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal == False and e.scope_distance == 3]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is True and e.scope_distance == 3]) + assert 1 == len([e for e in all_events_nodups if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal is False and e.scope_distance == 3]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 7 - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == True and e.scope_distance == 3]) - assert 1 == len([e for e in _graph_output_events if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal == False and e.scope_distance == 3]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.66" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notrealzies" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is True and e.scope_distance == 3]) + assert 1 == len([e for e in _graph_output_events if e.type == "VULNERABILITY" and e.data["host"] == "127.0.0.77" and e.internal is False and e.scope_distance == 3]) # httpx/speculate IP_RANGE --> IP_ADDRESS --> OPEN_TCP_PORT --> URL, search distance = 0 events, all_events, all_events_nodups, graph_output_events, graph_output_batch_events = await do_scan( @@ -328,56 +328,56 @@ def custom_setup(scan): ) assert len(events) == 7 - assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888"]) - assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888"]) assert len(all_events) == 14 - assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1 and "spider-danger" in e.tags]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal == True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1 and "spider-danger" in e.tags]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal is True and e.scope_distance == 1]) assert len(all_events_nodups) == 12 - assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1 and "spider-danger" in e.tags]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal == True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1 and "spider-danger" in e.tags]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal is True and e.scope_distance == 1]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 7 - assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888"]) - assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888"]) # httpx/speculate IP_RANGE --> IP_ADDRESS --> OPEN_TCP_PORT --> URL, search distance = 0, in_scope_only = False @@ -400,70 +400,70 @@ def custom_setup(scan): # before, this event was speculated off the URL_UNVERIFIED, and that's what was used by httpx to generate the URL. it was graph-important. # now for whatever reason, httpx is visiting the url directly and the open port isn't being used # I don't know what changed exactly, but it doesn't matter, either way is equally valid and bbot is meant to be flexible this way. - assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888"]) - assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888"]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.77:8888"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) assert len(all_events) == 18 - assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) assert len(all_events_nodups) == 16 - assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1 and "spider-danger" in e.tags]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1 and "spider-danger" in e.tags]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 8 - assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888"]) - assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and "spider-danger" in e.tags]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888"]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/"]) @@ -483,78 +483,78 @@ def custom_setup(scan): ) assert len(events) == 8 - assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888"]) - assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888"]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) assert len(all_events) == 22 - assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1 and "spider-danger" in e.tags]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.88:8888" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.99:8888/" and e.internal == True and e.scope_distance == 3]) + assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1 and "spider-danger" in e.tags]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.88:8888" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.99:8888/" and e.internal is True and e.scope_distance == 3]) assert len(all_events_nodups) == 20 - assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1 and "spider-danger" in e.tags]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.88:8888" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.88:8888/" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.99:8888/" and e.internal == True and e.scope_distance == 3]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1 and "spider-danger" in e.tags]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.88" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.88:8888" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.88:8888/" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.99:8888/" and e.internal is True and e.scope_distance == 3]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 8 - assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:8888"]) - assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.1:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.1:8888"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.1:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.77:8888/"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.77" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.77:8888"]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.77:8888/" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.77:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.88"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.88:8888/"]) @@ -576,25 +576,25 @@ def custom_setup(scan): ) assert len(events) == 12 - assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.110"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.110:8888"]) - assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.111:8888"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.111:8888/"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.222:8889/"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.222" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.222" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.33:8889/"]) - assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.33" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.33" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8888"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889"]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8888"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889"]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.222:8889"]) - assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.33:8889"]) assert 0 == len([e for e in events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.44:8888/"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.44"]) @@ -604,71 +604,71 @@ def custom_setup(scan): assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.55:8888"]) assert len(all_events) == 31 - assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.110" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.110:8888" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.111:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.111:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.222" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.33" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.44:8888/" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.44" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.55:8888/" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.55" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.44:8888" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.55:8888" and e.internal == True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.110" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.110:8888" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.111:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.111:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.222" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.33" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.44:8888/" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.44" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.55:8888/" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.55" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.44:8888" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.55:8888" and e.internal is True and e.scope_distance == 1]) assert len(all_events_nodups) == 27 - assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.110" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.110:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.111:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.111:8888/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.222" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.33" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8888" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.44:8888/" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.44" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.55:8888/" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.55" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.44:8888" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.55:8888" and e.internal == True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.110" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.110:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.111:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.111:8888/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.222" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.33" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8888" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "HTTP_RESPONSE" and e.data["url"] == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.44:8888/" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.44" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.55:8888/" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.55" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.44:8888" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.55:8888" and e.internal is True and e.scope_distance == 1]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 12 - assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.110/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.110"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.111" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.110:8888"]) - assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.111:8888" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.111:8888/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.111:8888"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.111:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.222:8889/"]) @@ -679,9 +679,9 @@ def custom_setup(scan): assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.222:8889"]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8888"]) assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.33:8889"]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.222:8889/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.222:8889"]) - assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "URL" and e.data == "http://127.0.0.33:8889/" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "HTTP_RESPONSE" and e.data["input"] == "127.0.0.33:8889"]) assert 0 == len([e for e in _graph_output_events if e.type == "URL_UNVERIFIED" and e.data == "http://127.0.0.44:8888/"]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.44"]) @@ -699,49 +699,49 @@ def custom_setup(scan): ) assert len(events) == 7 - assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) assert 1 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1"]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999"]) assert 1 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999"]) - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == False and e.scope_distance == 1 and str(e.module) == "sslcert" and "affiliate" in e.tags]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal is False and e.scope_distance == 1 and str(e.module) == "sslcert" and "affiliate" in e.tags]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) assert 0 == len([e for e in events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "notreal"]) assert len(all_events) == 13 - assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal == True and e.scope_distance == 0]) - assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == False and e.scope_distance == 1 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal" and e.internal == True and e.scope_distance == 2 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal is True and e.scope_distance == 0]) + assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal is False and e.scope_distance == 1 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999" and e.internal is True and e.scope_distance == 1 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal" and e.internal is True and e.scope_distance == 2 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal is True and e.scope_distance == 0 and str(e.module) == "speculate"]) assert len(all_events_nodups) == 11 - assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal == True and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == False and e.scope_distance == 1 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999" and e.internal == True and e.scope_distance == 1 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal" and e.internal == True and e.scope_distance == 2 and str(e.module) == "speculate"]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal is True and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal is False and e.scope_distance == 1 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999" and e.internal is True and e.scope_distance == 1 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal" and e.internal is True and e.scope_distance == 2 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal is True and e.scope_distance == 0 and str(e.module) == "speculate"]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 7 - assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is False and e.scope_distance == 0]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999"]) - assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal == False and e.scope_distance == 0]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == False and e.scope_distance == 1 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal is False and e.scope_distance == 0]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in _graph_output_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal is False and e.scope_distance == 1 and str(e.module) == "sslcert"]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "www.bbottest.notreal:9999"]) assert 0 == len([e for e in _graph_output_events if e.type == "DNS_NAME_UNRESOLVED" and e.data == "bbottest.notreal"]) assert 0 == len([e for e in _graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) @@ -756,43 +756,43 @@ def custom_setup(scan): ) assert len(events) == 4 - assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) assert 0 == len([e for e in events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1"]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999"]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999"]) - assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) assert 0 == len([e for e in events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal"]) assert 0 == len([e for e in events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) assert len(all_events) == 11 - assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 2]) - assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal == True and e.scope_distance == 2]) - assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == True and e.scope_distance == 3 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 2]) + assert 2 == len([e for e in all_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal is True and e.scope_distance == 2]) + assert 2 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal is True and e.scope_distance == 3 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal is True and e.scope_distance == 0 and str(e.module) == "speculate"]) assert len(all_events_nodups) == 9 - assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal == True and e.scope_distance == 2]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal == True and e.scope_distance == 3 and str(e.module) == "sslcert"]) - assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal == True and e.scope_distance == 0 and str(e.module) == "speculate"]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.0" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999" and e.internal is True and e.scope_distance == 2]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events_nodups if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal" and e.internal is True and e.scope_distance == 3 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in all_events_nodups if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999" and e.internal is True and e.scope_distance == 0 and str(e.module) == "speculate"]) for _graph_output_events in (graph_output_events, graph_output_batch_events): assert len(_graph_output_events) == 6 - assert 1 == len([e for e in graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal == False and e.scope_distance == 1]) + assert 1 == len([e for e in graph_output_events if e.type == "IP_RANGE" and e.data == "127.0.0.0/31" and e.internal is False and e.scope_distance == 1]) assert 0 == len([e for e in graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.0"]) - assert 1 == len([e for e in graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal == True and e.scope_distance == 2]) + assert 1 == len([e for e in graph_output_events if e.type == "IP_ADDRESS" and e.data == "127.0.0.1" and e.internal is True and e.scope_distance == 2]) assert 0 == len([e for e in graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.0:9999"]) - assert 1 == len([e for e in graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal == True and e.scope_distance == 1]) - assert 1 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal == False and e.scope_distance == 0 and str(e.module) == "sslcert"]) + assert 1 == len([e for e in graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "127.0.0.1:9999" and e.internal is True and e.scope_distance == 1]) + assert 1 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "test.notreal" and e.internal is False and e.scope_distance == 0 and str(e.module) == "sslcert"]) assert 0 == len([e for e in graph_output_events if e.type == "DNS_NAME" and e.data == "www.bbottest.notreal"]) assert 0 == len([e for e in graph_output_events if e.type == "OPEN_TCP_PORT" and e.data == "test.notreal:9999"]) diff --git a/bbot/test/test_step_1/test_modules_basic.py b/bbot/test/test_step_1/test_modules_basic.py index 421234069..83ba778e4 100644 --- a/bbot/test/test_step_1/test_modules_basic.py +++ b/bbot/test/test_step_1/test_modules_basic.py @@ -23,27 +23,27 @@ async def test_modules_basic_checks(events, httpx_mock): localhost = scan.make_event("127.0.0.1", parent=scan.root_event) # ip addresses should be accepted result, reason = base_output_module_1._event_precheck(localhost) - assert result == True + assert result is True assert reason == "precheck succeeded" # internal events should be rejected localhost._internal = True result, reason = base_output_module_1._event_precheck(localhost) - assert result == False + assert result is False assert reason == "_internal is True" localhost._internal = False result, reason = base_output_module_1._event_precheck(localhost) - assert result == True + assert result is True assert reason == "precheck succeeded" # unwatched events should be rejected dns_name = scan.make_event("evilcorp.com", parent=scan.root_event) result, reason = base_output_module_1._event_precheck(dns_name) - assert result == False + assert result is False assert reason == "its type is not in watched_events" # omitted events matching watched types should be accepted url_unverified = scan.make_event("http://127.0.0.1", "URL_UNVERIFIED", parent=scan.root_event) url_unverified._omit = True result, reason = base_output_module_1._event_precheck(url_unverified) - assert result == True + assert result is True assert reason == "its type is explicitly in watched_events" base_output_module_2 = BaseOutputModule(scan) @@ -51,42 +51,42 @@ async def test_modules_basic_checks(events, httpx_mock): # normal events should be accepted localhost = scan.make_event("127.0.0.1", parent=scan.root_event) result, reason = base_output_module_2._event_precheck(localhost) - assert result == True + assert result is True assert reason == "precheck succeeded" # internal events should be rejected localhost._internal = True result, reason = base_output_module_2._event_precheck(localhost) - assert result == False + assert result is False assert reason == "_internal is True" localhost._internal = False result, reason = base_output_module_2._event_precheck(localhost) - assert result == True + assert result is True assert reason == "precheck succeeded" # omitted events should be rejected localhost._omit = True result, reason = base_output_module_2._event_precheck(localhost) - assert result == False + assert result is False assert reason == "_omit is True" # normal event should be accepted url_unverified = scan.make_event("http://127.0.0.1", "URL_UNVERIFIED", parent=scan.root_event) result, reason = base_output_module_2._event_precheck(url_unverified) - assert result == True + assert result is True assert reason == "precheck succeeded" # omitted event types should be marked during scan egress await scan.egress_module.handle_event(url_unverified) result, reason = base_output_module_2._event_precheck(url_unverified) - assert result == False + assert result is False assert reason == "_omit is True" # omitted events that are targets should be accepted dns_name = scan.make_event("evilcorp.com", "DNS_NAME", parent=scan.root_event) dns_name._omit = True result, reason = base_output_module_2._event_precheck(dns_name) - assert result == False + assert result is False assert reason == "_omit is True" # omitted results that are targets should be accepted dns_name.add_tag("target") result, reason = base_output_module_2._event_precheck(dns_name) - assert result == True + assert result is True assert reason == "it's a target" # common event filtering tests @@ -97,18 +97,18 @@ async def test_modules_basic_checks(events, httpx_mock): # base cases base_module._watched_events = None base_module.watched_events = ["*"] - assert base_module._event_precheck(events.emoji)[0] == True + assert base_module._event_precheck(events.emoji)[0] is True base_module._watched_events = None base_module.watched_events = ["IP_ADDRESS"] - assert base_module._event_precheck(events.ipv4)[0] == True - assert base_module._event_precheck(events.domain)[0] == False - assert base_module._event_precheck(events.localhost)[0] == True - assert base_module._event_precheck(localhost2)[0] == True + assert base_module._event_precheck(events.ipv4)[0] is True + assert base_module._event_precheck(events.domain)[0] is False + assert base_module._event_precheck(events.localhost)[0] is True + assert base_module._event_precheck(localhost2)[0] is True # target only base_module.target_only = True - assert base_module._event_precheck(localhost2)[0] == False + assert base_module._event_precheck(localhost2)[0] is False localhost2.add_tag("target") - assert base_module._event_precheck(localhost2)[0] == True + assert base_module._event_precheck(localhost2)[0] is True base_module.target_only = False # in scope only @@ -147,11 +147,11 @@ async def test_modules_basic_checks(events, httpx_mock): for flag in flags: all_flags.add(flag) if preloaded["type"] == "scan": - assert ("active" in flags and not "passive" in flags) or ( - not "active" in flags and "passive" in flags + assert ("active" in flags and "passive" not in flags) or ( + "active" not in flags and "passive" in flags ), f'module "{module_name}" must have either "active" or "passive" flag' - assert ("safe" in flags and not "aggressive" in flags) or ( - not "safe" in flags and "aggressive" in flags + assert ("safe" in flags and "aggressive" not in flags) or ( + "safe" not in flags and "aggressive" in flags ), f'module "{module_name}" must have either "safe" or "aggressive" flag' assert not ( "web-basic" in flags and "web-thorough" in flags @@ -174,7 +174,7 @@ async def test_modules_basic_checks(events, httpx_mock): assert type(watched_events) == list assert type(produced_events) == list - if not preloaded.get("type", "") in ("internal",): + if preloaded.get("type", "") not in ("internal",): assert watched_events, f"{module_name}.watched_events must not be empty" assert type(watched_events) == list, f"{module_name}.watched_events must be of type list" assert type(produced_events) == list, f"{module_name}.produced_events must be of type list" @@ -268,35 +268,35 @@ class mod_domain_only(BaseModule): valid_5, reason_5 = await module._event_postcheck(url_5) if mod_name == "mod_normal": - assert valid_1 == True - assert valid_2 == True - assert valid_3 == True - assert valid_4 == True - assert valid_5 == True + assert valid_1 is True + assert valid_2 is True + assert valid_3 is True + assert valid_4 is True + assert valid_5 is True elif mod_name == "mod_host_only": - assert valid_1 == True - assert valid_2 == False + assert valid_1 is True + assert valid_2 is False assert "per_host_only=True" in reason_2 - assert valid_3 == False + assert valid_3 is False assert "per_host_only=True" in reason_3 - assert valid_4 == True - assert valid_5 == True + assert valid_4 is True + assert valid_5 is True elif mod_name == "mod_hostport_only": - assert valid_1 == True - assert valid_2 == False + assert valid_1 is True + assert valid_2 is False assert "per_hostport_only=True" in reason_2 - assert valid_3 == True - assert valid_4 == True - assert valid_5 == True + assert valid_3 is True + assert valid_4 is True + assert valid_5 is True elif mod_name == "mod_domain_only": - assert valid_1 == True - assert valid_2 == False + assert valid_1 is True + assert valid_2 is False assert "per_domain_only=True" in reason_2 - assert valid_3 == False + assert valid_3 is False assert "per_domain_only=True" in reason_3 - assert valid_4 == False + assert valid_4 is False assert "per_domain_only=True" in reason_4 - assert valid_5 == True + assert valid_5 is True await scan._cleanup() @@ -331,15 +331,15 @@ async def test_modules_basic_perdomainonly(bbot_scanner, monkeypatch): valid_1, reason_1 = await module._event_postcheck(url_1) valid_2, reason_2 = await module._event_postcheck(url_2) - if module.per_domain_only == True: - assert valid_1 == True - assert valid_2 == False + if module.per_domain_only is True: + assert valid_1 is True + assert valid_2 is False assert hash("evilcorp.com") in module._per_host_tracker assert reason_2 == "per_domain_only enabled and already seen domain" else: - assert valid_1 == True - assert valid_2 == True + assert valid_1 is True + assert valid_2 is True await per_domain_scan._cleanup() @@ -397,7 +397,6 @@ async def handle_event(self, event): "ORG_STUB": 1, "URL_UNVERIFIED": 1, "FINDING": 1, - "ORG_STUB": 1, } assert set(scan.stats.module_stats) == {"speculate", "host", "TARGET", "python", "dummy", "dnsresolve"} diff --git a/bbot/test/test_step_1/test_presets.py b/bbot/test/test_step_1/test_presets.py index 1b11529ea..c81802678 100644 --- a/bbot/test/test_step_1/test_presets.py +++ b/bbot/test/test_step_1/test_presets.py @@ -182,7 +182,7 @@ def test_preset_scope(): blank_preset = blank_preset.bake() assert not blank_preset.target.seeds assert not blank_preset.target.whitelist - assert blank_preset.strict_scope == False + assert blank_preset.strict_scope is False preset1 = Preset( "evilcorp.com", @@ -194,13 +194,13 @@ def test_preset_scope(): # make sure target logic works as expected assert "evilcorp.com" in preset1_baked.target.seeds - assert not "evilcorp.com" in preset1_baked.target.whitelist + assert "evilcorp.com" not in preset1_baked.target.whitelist assert "asdf.evilcorp.com" in preset1_baked.target.seeds - assert not "asdf.evilcorp.com" in preset1_baked.target.whitelist + assert "asdf.evilcorp.com" not in preset1_baked.target.whitelist assert "asdf.evilcorp.ce" in preset1_baked.whitelist assert "evilcorp.ce" in preset1_baked.whitelist assert "test.www.evilcorp.ce" in preset1_baked.blacklist - assert not "evilcorp.ce" in preset1_baked.blacklist + assert "evilcorp.ce" not in preset1_baked.blacklist assert preset1_baked.in_scope("www.evilcorp.ce") assert not preset1_baked.in_scope("evilcorp.com") assert not preset1_baked.in_scope("asdf.test.www.evilcorp.ce") @@ -228,20 +228,20 @@ def test_preset_scope(): assert "www.evilcorp.ce" in preset1_baked.target.seeds assert "evilcorp.org" in preset1_baked.target.seeds # strict scope is enabled - assert not "asdf.www.evilcorp.ce" in preset1_baked.target.seeds - assert not "asdf.evilcorp.org" in preset1_baked.target.seeds - assert not "asdf.evilcorp.com" in preset1_baked.target.seeds - assert not "asdf.www.evilcorp.ce" in preset1_baked.target.seeds + assert "asdf.www.evilcorp.ce" not in preset1_baked.target.seeds + assert "asdf.evilcorp.org" not in preset1_baked.target.seeds + assert "asdf.evilcorp.com" not in preset1_baked.target.seeds + assert "asdf.www.evilcorp.ce" not in preset1_baked.target.seeds assert "evilcorp.ce" in preset1_baked.whitelist assert "evilcorp.de" in preset1_baked.whitelist - assert not "asdf.evilcorp.de" in preset1_baked.whitelist - assert not "asdf.evilcorp.ce" in preset1_baked.whitelist + assert "asdf.evilcorp.de" not in preset1_baked.whitelist + assert "asdf.evilcorp.ce" not in preset1_baked.whitelist # blacklist should be merged, strict scope does not apply assert "test.www.evilcorp.ce" in preset1_baked.blacklist assert "test.www.evilcorp.de" in preset1_baked.blacklist assert "asdf.test.www.evilcorp.ce" in preset1_baked.blacklist assert "asdf.test.www.evilcorp.de" in preset1_baked.blacklist - assert not "asdf.test.www.evilcorp.org" in preset1_baked.blacklist + assert "asdf.test.www.evilcorp.org" not in preset1_baked.blacklist # only the base domain of evilcorp.de should be in scope assert not preset1_baked.in_scope("evilcorp.com") assert not preset1_baked.in_scope("evilcorp.org") @@ -290,7 +290,7 @@ def test_preset_scope(): assert not preset_nowhitelist_baked.in_scope("1.2.3.4/24") assert "www.evilcorp.org" in preset_whitelist_baked.target.seeds - assert not "www.evilcorp.org" in preset_whitelist_baked.target.whitelist + assert "www.evilcorp.org" not in preset_whitelist_baked.target.whitelist assert "1.2.3.4" in preset_whitelist_baked.whitelist assert not preset_whitelist_baked.in_scope("www.evilcorp.org") assert not preset_whitelist_baked.in_scope("www.evilcorp.de") @@ -329,8 +329,8 @@ def test_preset_scope(): assert set([e.data for e in preset_whitelist_baked.whitelist]) == {"1.2.3.0/24"} assert "www.evilcorp.org" in preset_whitelist_baked.seeds assert "www.evilcorp.com" in preset_whitelist_baked.seeds - assert not "www.evilcorp.org" in preset_whitelist_baked.target.whitelist - assert not "www.evilcorp.com" in preset_whitelist_baked.target.whitelist + assert "www.evilcorp.org" not in preset_whitelist_baked.target.whitelist + assert "www.evilcorp.com" not in preset_whitelist_baked.target.whitelist assert "1.2.3.4" in preset_whitelist_baked.whitelist assert not preset_whitelist_baked.in_scope("www.evilcorp.org") assert not preset_whitelist_baked.in_scope("www.evilcorp.com") @@ -387,30 +387,30 @@ async def test_preset_logging(): try: silent_preset = Preset(silent=True) - assert silent_preset.silent == True - assert silent_preset.debug == False - assert silent_preset.verbose == False + assert silent_preset.silent is True + assert silent_preset.debug is False + assert silent_preset.verbose is False assert original_log_level == CORE.logger.log_level debug_preset = Preset(debug=True) - assert debug_preset.silent == False - assert debug_preset.debug == True - assert debug_preset.verbose == False + assert debug_preset.silent is False + assert debug_preset.debug is True + assert debug_preset.verbose is False assert original_log_level == CORE.logger.log_level verbose_preset = Preset(verbose=True) - assert verbose_preset.silent == False - assert verbose_preset.debug == False - assert verbose_preset.verbose == True + assert verbose_preset.silent is False + assert verbose_preset.debug is False + assert verbose_preset.verbose is True assert original_log_level == CORE.logger.log_level # test conflicting verbosity levels silent_and_verbose = Preset(silent=True, verbose=True) - assert silent_and_verbose.silent == True - assert silent_and_verbose.debug == False - assert silent_and_verbose.verbose == True + assert silent_and_verbose.silent is True + assert silent_and_verbose.debug is False + assert silent_and_verbose.verbose is True baked = silent_and_verbose.bake() - assert baked.silent == True - assert baked.debug == False - assert baked.verbose == False + assert baked.silent is True + assert baked.debug is False + assert baked.verbose is False assert baked.core.logger.log_level == original_log_level baked = silent_and_verbose.bake(scan=scan) assert baked.core.logger.log_level == logging.CRITICAL @@ -420,13 +420,13 @@ async def test_preset_logging(): assert CORE.logger.log_level == original_log_level silent_and_debug = Preset(silent=True, debug=True) - assert silent_and_debug.silent == True - assert silent_and_debug.debug == True - assert silent_and_debug.verbose == False + assert silent_and_debug.silent is True + assert silent_and_debug.debug is True + assert silent_and_debug.verbose is False baked = silent_and_debug.bake() - assert baked.silent == True - assert baked.debug == False - assert baked.verbose == False + assert baked.silent is True + assert baked.debug is False + assert baked.verbose is False assert baked.core.logger.log_level == original_log_level baked = silent_and_debug.bake(scan=scan) assert baked.core.logger.log_level == logging.CRITICAL @@ -436,13 +436,13 @@ async def test_preset_logging(): assert CORE.logger.log_level == original_log_level debug_and_verbose = Preset(verbose=True, debug=True) - assert debug_and_verbose.silent == False - assert debug_and_verbose.debug == True - assert debug_and_verbose.verbose == True + assert debug_and_verbose.silent is False + assert debug_and_verbose.debug is True + assert debug_and_verbose.verbose is True baked = debug_and_verbose.bake() - assert baked.silent == False - assert baked.debug == True - assert baked.verbose == False + assert baked.silent is False + assert baked.debug is True + assert baked.verbose is False assert baked.core.logger.log_level == original_log_level baked = debug_and_verbose.bake(scan=scan) assert baked.core.logger.log_level == logging.DEBUG @@ -452,13 +452,13 @@ async def test_preset_logging(): assert CORE.logger.log_level == original_log_level all_preset = Preset(verbose=True, debug=True, silent=True) - assert all_preset.silent == True - assert all_preset.debug == True - assert all_preset.verbose == True + assert all_preset.silent is True + assert all_preset.debug is True + assert all_preset.verbose is True baked = all_preset.bake() - assert baked.silent == True - assert baked.debug == False - assert baked.verbose == False + assert baked.silent is True + assert baked.debug is False + assert baked.verbose is False assert baked.core.logger.log_level == original_log_level baked = all_preset.bake(scan=scan) assert baked.core.logger.log_level == logging.CRITICAL @@ -688,7 +688,7 @@ class TestModule5(BaseModule): ) preset = Preset.from_yaml_string( - f""" + """ modules: - testmodule5 """ @@ -896,9 +896,9 @@ def get_module_flags(p): dnsbrute_flags = preset.preloaded_module("dnsbrute").get("flags", []) assert "subdomain-enum" in dnsbrute_flags assert "active" in dnsbrute_flags - assert not "passive" in dnsbrute_flags + assert "passive" not in dnsbrute_flags assert "aggressive" in dnsbrute_flags - assert not "safe" in dnsbrute_flags + assert "safe" not in dnsbrute_flags assert "dnsbrute" in [x[0] for x in module_flags] assert "certspotter" in [x[0] for x in module_flags] assert "c99" in [x[0] for x in module_flags] @@ -912,7 +912,7 @@ def get_module_flags(p): assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) assert "chaos" in [x[0] for x in module_flags] - assert not "httpx" in [x[0] for x in module_flags] + assert "httpx" not in [x[0] for x in module_flags] assert all("passive" in flags for module, flags in module_flags) assert not any("active" in flags for module, flags in module_flags) assert any("safe" in flags for module, flags in module_flags) @@ -923,7 +923,7 @@ def get_module_flags(p): assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) assert "chaos" in [x[0] for x in module_flags] - assert not "httpx" in [x[0] for x in module_flags] + assert "httpx" not in [x[0] for x in module_flags] assert all("passive" in flags for module, flags in module_flags) assert not any("active" in flags for module, flags in module_flags) assert any("safe" in flags for module, flags in module_flags) @@ -933,7 +933,7 @@ def get_module_flags(p): preset = Preset(flags=["subdomain-enum"], exclude_modules=["dnsbrute"]).bake() assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) - assert not "dnsbrute" in [x[0] for x in module_flags] + assert "dnsbrute" not in [x[0] for x in module_flags] assert "httpx" in [x[0] for x in module_flags] assert any("passive" in flags for module, flags in module_flags) assert any("active" in flags for module, flags in module_flags) @@ -944,7 +944,7 @@ def get_module_flags(p): preset = Preset(flags=["subdomain-enum"], require_flags=["safe", "passive"]).bake() assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) - assert not "dnsbrute" in [x[0] for x in module_flags] + assert "dnsbrute" not in [x[0] for x in module_flags] assert all("passive" in flags and "safe" in flags for module, flags in module_flags) assert all("active" not in flags and "aggressive" not in flags for module, flags in module_flags) assert not any("active" in flags for module, flags in module_flags) @@ -954,7 +954,7 @@ def get_module_flags(p): preset = Preset(flags=["subdomain-enum"], exclude_flags=["aggressive", "active"]).bake() assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) - assert not "dnsbrute" in [x[0] for x in module_flags] + assert "dnsbrute" not in [x[0] for x in module_flags] assert all("passive" in flags and "safe" in flags for module, flags in module_flags) assert all("active" not in flags and "aggressive" not in flags for module, flags in module_flags) assert not any("active" in flags for module, flags in module_flags) @@ -964,9 +964,9 @@ def get_module_flags(p): preset = Preset(flags=["subdomain-enum"], exclude_modules=["dnsbrute", "c99"]).bake() assert len(preset.modules) > 25 module_flags = list(get_module_flags(preset)) - assert not "dnsbrute" in [x[0] for x in module_flags] + assert "dnsbrute" not in [x[0] for x in module_flags] assert "certspotter" in [x[0] for x in module_flags] - assert not "c99" in [x[0] for x in module_flags] + assert "c99" not in [x[0] for x in module_flags] assert any("passive" in flags for module, flags in module_flags) assert any("active" in flags for module, flags in module_flags) assert any("safe" in flags for module, flags in module_flags) diff --git a/bbot/test/test_step_1/test_scan.py b/bbot/test/test_step_1/test_scan.py index f5f845826..b0f908a7b 100644 --- a/bbot/test/test_step_1/test_scan.py +++ b/bbot/test/test_step_1/test_scan.py @@ -100,13 +100,13 @@ async def test_url_extension_handling(bbot_scanner): assert "blacklisted" not in bad_event.tags assert "httpx-only" not in httpx_event.tags result = await scan.ingress_module.handle_event(good_event) - assert result == None + assert result is None result, reason = await scan.ingress_module.handle_event(bad_event) - assert result == False + assert result is False assert reason == "event is blacklisted" assert "blacklisted" in bad_event.tags result = await scan.ingress_module.handle_event(httpx_event) - assert result == None + assert result is None assert "httpx-only" in httpx_event.tags await scan._cleanup() diff --git a/bbot/test/test_step_1/test_target.py b/bbot/test/test_step_1/test_target.py index 0513d6abe..51343db85 100644 --- a/bbot/test/test_step_1/test_target.py +++ b/bbot/test/test_step_1/test_target.py @@ -106,24 +106,24 @@ async def test_target(bbot_scanner): assert scan1.target.whitelist.get("publicapis.org") is None target = RadixTarget("evilcorp.com") - assert not "com" in target + assert "com" not in target assert "evilcorp.com" in target assert "www.evilcorp.com" in target strict_target = RadixTarget("evilcorp.com", strict_dns_scope=True) - assert not "com" in strict_target + assert "com" not in strict_target assert "evilcorp.com" in strict_target - assert not "www.evilcorp.com" in strict_target + assert "www.evilcorp.com" not in strict_target target = RadixTarget() target.add("evilcorp.com") - assert not "com" in target + assert "com" not in target assert "evilcorp.com" in target assert "www.evilcorp.com" in target strict_target = RadixTarget(strict_dns_scope=True) strict_target.add("evilcorp.com") - assert not "com" in strict_target + assert "com" not in strict_target assert "evilcorp.com" in strict_target - assert not "www.evilcorp.com" in strict_target + assert "www.evilcorp.com" not in strict_target # test target hashing @@ -293,7 +293,7 @@ async def test_target(bbot_scanner): assert target_dict["seeds"] == ["1.2.3.0/24", "bob@fdsa.evilcorp.net", "http://www.evilcorp.net/"] assert target_dict["whitelist"] == ["bob@www.evilcorp.com", "evilcorp.com", "evilcorp.net"] assert target_dict["blacklist"] == ["1.2.3.4", "4.3.2.0/24", "bob@asdf.evilcorp.net", "http://1.2.3.4/"] - assert target_dict["strict_scope"] == False + assert target_dict["strict_scope"] is False assert target_dict["hash"] == "b36955a8238a71842fc5f23b11110c26ea07d451" assert target_dict["seed_hash"] == "560af51d1f3d69bc5c156fc270b28497fe52dec1" assert target_dict["whitelist_hash"] == "8ed0a7368e6d34630e1cfd419d2a73767debc4c4" @@ -321,8 +321,8 @@ async def test_target(bbot_scanner): assert set(target.hosts) == {"evilcorp.co.uk", "www.evilcorp.co.uk"} assert "evilcorp.co.uk" in target assert "www.evilcorp.co.uk" in target - assert not "api.evilcorp.co.uk" in target - assert not "api.www.evilcorp.co.uk" in target + assert "api.evilcorp.co.uk" not in target + assert "api.www.evilcorp.co.uk" not in target # test 'single' boolean argument target = ScanSeeds("http://evilcorp.com", "evilcorp.com:443") @@ -361,13 +361,13 @@ async def test_blacklist_regex(bbot_scanner, bbot_httpserver): blacklist.get("www.evil.com", raise_error=True) assert "test.com" in blacklist assert "http://evilcorp.com/test.aspx" in blacklist - assert not "http://tes.com" in blacklist + assert "http://tes.com" not in blacklist blacklist = ScanBlacklist("evilcorp.com", r"RE:[0-9]{6}\.aspx$") assert "http://evilcorp.com" in blacklist - assert not "http://test.com/123456" in blacklist - assert not "http://test.com/12345.aspx?a=asdf" in blacklist - assert not "http://test.com/asdf/123456.aspx/asdf" in blacklist + assert "http://test.com/123456" not in blacklist + assert "http://test.com/12345.aspx?a=asdf" not in blacklist + assert "http://test.com/asdf/123456.aspx/asdf" not in blacklist assert "http://test.com/asdf/123456.aspx?a=asdf" in blacklist assert "http://test.com/asdf/123456.aspx" in blacklist diff --git a/bbot/test/test_step_1/test_web.py b/bbot/test/test_step_1/test_web.py index 0b3011d57..351a17318 100644 --- a/bbot/test/test_step_1/test_web.py +++ b/bbot/test/test_step_1/test_web.py @@ -211,14 +211,14 @@ async def test_web_helpers(bbot_scanner, bbot_httpserver, httpx_mock): url = bbot_httpserver.url_for(path) bbot_httpserver.expect_request(uri=path).respond_with_data(download_content, status=200) webpage = await scan1.helpers.request(url) - assert webpage, f"Webpage is False" + assert webpage, "Webpage is False" soup = scan1.helpers.beautifulsoup(webpage, "html.parser") - assert soup, f"Soup is False" + assert soup, "Soup is False" # pretty_print = soup.prettify() # assert pretty_print, f"PrettyPrint is False" # scan1.helpers.log.info(f"{pretty_print}") html_text = soup.find(text="Example Domain") - assert html_text, f"Find HTML Text is False" + assert html_text, "Find HTML Text is False" # 404 path = "/test_http_helpers_download_404" @@ -389,7 +389,7 @@ async def test_web_http_compare(httpx_mock, bbot_scanner): await compare_helper.compare("http://www.example.com", check_reflection=True) compare_helper.compare_body({"asdf": "fdsa"}, {"fdsa": "asdf"}) for mode in ("getparam", "header", "cookie"): - assert await compare_helper.canary_check("http://www.example.com", mode=mode) == True + assert await compare_helper.canary_check("http://www.example.com", mode=mode) is True await scan._cleanup() diff --git a/bbot/test/test_step_2/module_tests/base.py b/bbot/test/test_step_2/module_tests/base.py index 47038e9ae..3f6b5dd76 100644 --- a/bbot/test/test_step_2/module_tests/base.py +++ b/bbot/test/test_step_2/module_tests/base.py @@ -99,15 +99,15 @@ async def module_test( module_test = self.ModuleTest( self, httpx_mock, bbot_httpserver, bbot_httpserver_ssl, monkeypatch, request, caplog, capsys ) - self.log.debug(f"Mocking DNS") + self.log.debug("Mocking DNS") await module_test.mock_dns({"blacklanternsecurity.com": {"A": ["127.0.0.88"]}}) - self.log.debug(f"Executing setup_before_prep()") + self.log.debug("Executing setup_before_prep()") await self.setup_before_prep(module_test) - self.log.debug(f"Executing scan._prep()") + self.log.debug("Executing scan._prep()") await module_test.scan._prep() - self.log.debug(f"Executing setup_after_prep()") + self.log.debug("Executing setup_after_prep()") await self.setup_after_prep(module_test) - self.log.debug(f"Starting scan") + self.log.debug("Starting scan") module_test.events = [e async for e in module_test.scan.async_start()] self.log.debug(f"Finished {module_test.name} module test") yield module_test @@ -123,7 +123,7 @@ async def test_module_run(self, module_test): if len(tasks): module_test.log.info(f"Unfinished tasks detected: {tasks}") else: - module_test.log.info(f"No unfinished tasks detected") + module_test.log.info("No unfinished tasks detected") def check(self, module_test, events): assert False, f"Must override {self.name}.check()" diff --git a/bbot/test/test_step_2/module_tests/test_module_anubisdb.py b/bbot/test/test_step_2/module_tests/test_module_anubisdb.py index dbebf8621..7b1bc6659 100644 --- a/bbot/test/test_step_2/module_tests/test_module_anubisdb.py +++ b/bbot/test/test_step_2/module_tests/test_module_anubisdb.py @@ -5,7 +5,7 @@ class TestAnubisdb(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.module.abort_if = lambda e: False module_test.httpx_mock.add_response( - url=f"https://jldc.me/anubis/subdomains/blacklanternsecurity.com", + url="https://jldc.me/anubis/subdomains/blacklanternsecurity.com", json=["asdf.blacklanternsecurity.com", "zzzz.blacklanternsecurity.com"], ) diff --git a/bbot/test/test_step_2/module_tests/test_module_azure_realm.py b/bbot/test/test_step_2/module_tests/test_module_azure_realm.py index 557c362ee..fa0672615 100644 --- a/bbot/test/test_step_2/module_tests/test_module_azure_realm.py +++ b/bbot/test/test_step_2/module_tests/test_module_azure_realm.py @@ -22,7 +22,7 @@ class TestAzure_Realm(ModuleTestBase): async def setup_after_prep(self, module_test): await module_test.mock_dns({"evilcorp.com": {"A": ["127.0.0.5"]}}) module_test.httpx_mock.add_response( - url=f"https://login.microsoftonline.com/getuserrealm.srf?login=test@evilcorp.com", + url="https://login.microsoftonline.com/getuserrealm.srf?login=test@evilcorp.com", json=self.response_json, ) diff --git a/bbot/test/test_step_2/module_tests/test_module_bevigil.py b/bbot/test/test_step_2/module_tests/test_module_bevigil.py index 328e213d2..7e616752f 100644 --- a/bbot/test/test_step_2/module_tests/test_module_bevigil.py +++ b/bbot/test/test_step_2/module_tests/test_module_bevigil.py @@ -9,7 +9,7 @@ class TestBeVigil(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://osint.bevigil.com/api/blacklanternsecurity.com/subdomains/", + url="https://osint.bevigil.com/api/blacklanternsecurity.com/subdomains/", match_headers={"X-Access-Token": "asdf"}, json={ "domain": "blacklanternsecurity.com", @@ -19,7 +19,7 @@ async def setup_after_prep(self, module_test): }, ) module_test.httpx_mock.add_response( - url=f"https://osint.bevigil.com/api/blacklanternsecurity.com/urls/", + url="https://osint.bevigil.com/api/blacklanternsecurity.com/urls/", json={"domain": "blacklanternsecurity.com", "urls": ["https://asdf.blacklanternsecurity.com"]}, ) @@ -35,7 +35,7 @@ class TestBeVigilMultiKey(TestBeVigil): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://osint.bevigil.com/api/blacklanternsecurity.com/subdomains/", + url="https://osint.bevigil.com/api/blacklanternsecurity.com/subdomains/", match_headers={"X-Access-Token": "fdsa"}, json={ "domain": "blacklanternsecurity.com", @@ -46,6 +46,6 @@ async def setup_after_prep(self, module_test): ) module_test.httpx_mock.add_response( match_headers={"X-Access-Token": "asdf"}, - url=f"https://osint.bevigil.com/api/blacklanternsecurity.com/urls/", + url="https://osint.bevigil.com/api/blacklanternsecurity.com/urls/", json={"domain": "blacklanternsecurity.com", "urls": ["https://asdf.blacklanternsecurity.com"]}, ) diff --git a/bbot/test/test_step_2/module_tests/test_module_binaryedge.py b/bbot/test/test_step_2/module_tests/test_module_binaryedge.py index 95b4ae7a7..348e2efb2 100644 --- a/bbot/test/test_step_2/module_tests/test_module_binaryedge.py +++ b/bbot/test/test_step_2/module_tests/test_module_binaryedge.py @@ -6,7 +6,7 @@ class TestBinaryEdge(ModuleTestBase): async def setup_before_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://api.binaryedge.io/v2/query/domains/subdomain/blacklanternsecurity.com", + url="https://api.binaryedge.io/v2/query/domains/subdomain/blacklanternsecurity.com", match_headers={"X-Key": "asdf"}, json={ "query": "blacklanternsecurity.com", @@ -19,7 +19,7 @@ async def setup_before_prep(self, module_test): }, ) module_test.httpx_mock.add_response( - url=f"https://api.binaryedge.io/v2/user/subscription", + url="https://api.binaryedge.io/v2/user/subscription", match_headers={"X-Key": "asdf"}, json={ "subscription": {"name": "Free"}, diff --git a/bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py b/bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py index b566e9a82..7a5499b2e 100644 --- a/bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py +++ b/bbot/test/test_step_2/module_tests/test_module_bucket_amazon.py @@ -82,8 +82,8 @@ def check(self, module_test, events): if e.type == "FINDING" and str(e.module) == self.module_name: url = e.data.get("url", "") assert self.random_bucket_2 in url - assert not self.random_bucket_1 in url - assert not self.random_bucket_3 in url + assert self.random_bucket_1 not in url + assert self.random_bucket_3 not in url # make sure bucket mutations were found assert any( e.type == "STORAGE_BUCKET" diff --git a/bbot/test/test_step_2/module_tests/test_module_bucket_azure.py b/bbot/test/test_step_2/module_tests/test_module_bucket_azure.py index a3c866c08..3b172eaab 100644 --- a/bbot/test/test_step_2/module_tests/test_module_bucket_azure.py +++ b/bbot/test/test_step_2/module_tests/test_module_bucket_azure.py @@ -21,7 +21,7 @@ class TestBucket_Azure_NoDup(ModuleTestBase): async def setup_before_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://tesla.blob.core.windows.net/tesla?restype=container", + url="https://tesla.blob.core.windows.net/tesla?restype=container", text="", ) await module_test.mock_dns( diff --git a/bbot/test/test_step_2/module_tests/test_module_builtwith.py b/bbot/test/test_step_2/module_tests/test_module_builtwith.py index 0fc4de9d5..d11c8940d 100644 --- a/bbot/test/test_step_2/module_tests/test_module_builtwith.py +++ b/bbot/test/test_step_2/module_tests/test_module_builtwith.py @@ -6,7 +6,7 @@ class TestBuiltWith(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://api.builtwith.com/v20/api.json?KEY=asdf&LOOKUP=blacklanternsecurity.com&NOMETA=yes&NOATTR=yes&HIDETEXT=yes&HIDEDL=yes", + url="https://api.builtwith.com/v20/api.json?KEY=asdf&LOOKUP=blacklanternsecurity.com&NOMETA=yes&NOATTR=yes&HIDETEXT=yes&HIDEDL=yes", json={ "Results": [ { @@ -91,7 +91,7 @@ async def setup_after_prep(self, module_test): }, ) module_test.httpx_mock.add_response( - url=f"https://api.builtwith.com/redirect1/api.json?KEY=asdf&LOOKUP=blacklanternsecurity.com", + url="https://api.builtwith.com/redirect1/api.json?KEY=asdf&LOOKUP=blacklanternsecurity.com", json={ "Lookup": "blacklanternsecurity.com", "Inbound": [ diff --git a/bbot/test/test_step_2/module_tests/test_module_c99.py b/bbot/test/test_step_2/module_tests/test_module_c99.py index 284b76e1e..5a0bd6e8d 100644 --- a/bbot/test/test_step_2/module_tests/test_module_c99.py +++ b/bbot/test/test_step_2/module_tests/test_module_c99.py @@ -51,7 +51,7 @@ async def custom_callback(request): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 13 - assert module_test.module.errored == False + assert module_test.module.errored is False # assert module_test.module._api_request_failures == 4 assert module_test.module.api_retries == 4 assert set([e.data for e in events if e.type == "DNS_NAME"]) == {"blacklanternsecurity.com"} @@ -82,7 +82,7 @@ async def setup_before_prep(self, module_test): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 13 - assert module_test.module.errored == False + assert module_test.module.errored is False assert module_test.module._api_request_failures == 8 assert module_test.module.api_retries == 4 assert set([e.data for e in events if e.type == "DNS_NAME"]) == {"blacklanternsecurity.com", "evilcorp.com"} @@ -106,7 +106,7 @@ class TestC99AbortThreshold3(TestC99AbortThreshold2): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 13 - assert module_test.module.errored == False + assert module_test.module.errored is False assert module_test.module._api_request_failures == 12 assert module_test.module.api_retries == 4 assert set([e.data for e in events if e.type == "DNS_NAME"]) == { @@ -138,7 +138,7 @@ class TestC99AbortThreshold4(TestC99AbortThreshold3): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 13 - assert module_test.module.errored == True + assert module_test.module.errored is True assert module_test.module._api_request_failures == 13 assert module_test.module.api_retries == 4 assert set([e.data for e in events if e.type == "DNS_NAME"]) == { diff --git a/bbot/test/test_step_2/module_tests/test_module_columbus.py b/bbot/test/test_step_2/module_tests/test_module_columbus.py index 55d456ce3..b91b532d7 100644 --- a/bbot/test/test_step_2/module_tests/test_module_columbus.py +++ b/bbot/test/test_step_2/module_tests/test_module_columbus.py @@ -4,7 +4,7 @@ class TestColumbus(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://columbus.elmasy.com/api/lookup/blacklanternsecurity.com?days=365", + url="https://columbus.elmasy.com/api/lookup/blacklanternsecurity.com?days=365", json=["asdf", "zzzz"], ) diff --git a/bbot/test/test_step_2/module_tests/test_module_credshed.py b/bbot/test/test_step_2/module_tests/test_module_credshed.py index 44b9133c9..a6b1e65c5 100644 --- a/bbot/test/test_step_2/module_tests/test_module_credshed.py +++ b/bbot/test/test_step_2/module_tests/test_module_credshed.py @@ -58,12 +58,12 @@ class TestCredshed(ModuleTestBase): async def setup_before_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://credshed.com/api/auth", + url="https://credshed.com/api/auth", json=credshed_auth_response, method="POST", ) module_test.httpx_mock.add_response( - url=f"https://credshed.com/api/search", + url="https://credshed.com/api/search", json=credshed_response, method="POST", ) diff --git a/bbot/test/test_step_2/module_tests/test_module_dehashed.py b/bbot/test/test_step_2/module_tests/test_module_dehashed.py index 0ac91c3b8..f642a444b 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dehashed.py +++ b/bbot/test/test_step_2/module_tests/test_module_dehashed.py @@ -45,7 +45,7 @@ class TestDehashed(ModuleTestBase): async def setup_before_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://api.dehashed.com/search?query=domain:blacklanternsecurity.com&size=10000&page=1", + url="https://api.dehashed.com/search?query=domain:blacklanternsecurity.com&size=10000&page=1", json=dehashed_domain_response, ) await module_test.mock_dns( diff --git a/bbot/test/test_step_2/module_tests/test_module_digitorus.py b/bbot/test/test_step_2/module_tests/test_module_digitorus.py index fc95a82c7..a683a17d8 100644 --- a/bbot/test/test_step_2/module_tests/test_module_digitorus.py +++ b/bbot/test/test_step_2/module_tests/test_module_digitorus.py @@ -11,7 +11,7 @@ class TestDigitorus(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://certificatedetails.com/blacklanternsecurity.com", + url="https://certificatedetails.com/blacklanternsecurity.com", text=self.web_response, ) diff --git a/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py b/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py index a5ba1eba7..0cbee8440 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py +++ b/bbot/test/test_step_2/module_tests/test_module_dnsbrute.py @@ -56,39 +56,39 @@ async def new_run_live(*command, check=False, text=True, **kwargs): event = module_test.scan.make_event("blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event) event.scope_distance = 0 result, reason = await module_test.module.filter_event(event) - assert result == True + assert result is True event = module_test.scan.make_event( "www.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event ) event.scope_distance = 0 result, reason = await module_test.module.filter_event(event) - assert result == True + assert result is True event = module_test.scan.make_event( "test.www.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event ) event.scope_distance = 0 result, reason = await module_test.module.filter_event(event) - assert result == True + assert result is True event = module_test.scan.make_event( "asdf.test.www.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event ) event.scope_distance = 0 result, reason = await module_test.module.filter_event(event) - assert result == True + assert result is True event = module_test.scan.make_event( "wat.asdf.test.www.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event ) event.scope_distance = 0 result, reason = await module_test.module.filter_event(event) - assert result == False - assert reason == f"subdomain depth of *.asdf.test.www.blacklanternsecurity.com (4) > max_depth (3)" + assert result is False + assert reason == "subdomain depth of *.asdf.test.www.blacklanternsecurity.com (4) > max_depth (3)" event = module_test.scan.make_event( "hmmm.ptr1234.blacklanternsecurity.com", "DNS_NAME", parent=module_test.scan.root_event ) event.scope_distance = 0 result, reason = await module_test.module.filter_event(event) - assert result == False - assert reason == f'"ptr1234.blacklanternsecurity.com" looks like an autogenerated PTR' + assert result is False + assert reason == '"ptr1234.blacklanternsecurity.com" looks like an autogenerated PTR' def check(self, module_test, events): assert len(events) == 4 diff --git a/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py b/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py index 6bf045d5c..447f8baba 100644 --- a/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py +++ b/bbot/test/test_step_2/module_tests/test_module_dnsdumpster.py @@ -4,12 +4,12 @@ class TestDNSDumpster(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://dnsdumpster.com", + url="https://dnsdumpster.com", headers={"Set-Cookie": "csrftoken=asdf"}, content=b'\n\n \n\n \n \n\n \n \n \n \n DNSdumpster.com - dns recon and research, find and lookup dns records\n\n\n \n \n \n\n \n \n\n \n\n \n\n
\n
\n\n
\n
\n\n
\n
\n \n
\n
\n\n\n\n\n
\n
\n

dns recon & research, find & lookup dns records

\n

\n

\n
\n
\n
\n\n\n\n\n
\n
\n
\n
\n
Loading...
\n
\n
\n
\n

\n\n
\n\n
\n\n

DNSdumpster.com is a FREE domain research tool that can discover hosts related to a domain. Finding visible hosts from the attackers perspective is an important part of the security assessment process.

\n\n
\n\n

this is a project

\n\n\n
\n
\n
\n

\n

Open Source Intelligence for Networks

\n
\n
\n
\n
\n \n \n \n

Attack

\n

The ability to quickly identify the attack surface is essential. Whether you are penetration testing or chasing bug bounties.

\n
\n
\n \n \n \n

Defend

\n

Network defenders benefit from passive reconnaissance in a number of ways. With analysis informing information security strategy.

\n
\n
\n \n \n \n

Learn

\n

Understanding network based OSINT helps information technologists to better operate, assess and manage the network.

\n
\n
\n
\n\n\n\n\n
\n\n \n

Map an organizations attack surface with a virtual dumpster dive* of the DNS records associated with the target organization.

\n

*DUMPSTER DIVING: The practice of sifting refuse from an office or technical installation to extract confidential data, especially security-compromising information.

\n
\n\n\n
\n\n

Frequently Asked Questions

\n\n

How can I take my security assessments to the next level?

\n\n

The company behind DNSDumpster is hackertarget.com where we provide online hosted access to trusted open source security vulnerability scanners and network intelligence tools.

Save time and headaches by incorporating our attack surface discovery into your vulnerability assessment process.

HackerTarget.com | Online Security Testing and Open Source Intelligence

\n\n

What data does DNSDumpster use?

\n\n

No brute force subdomain enumeration is used as is common in dns recon tools that enumerate subdomains. We use open source intelligence resources to query for related domain data. It is then compiled into an actionable resource for both attackers and defenders of Internet facing systems.

\n

More than a simple DNS lookup this tool will discover those hard to find sub-domains and web hosts. The search relies on data from our crawls of the Alexa Top 1 Million sites, Search Engines, Common Crawl, Certificate Transparency, Max Mind, Team Cymru, Shodan and scans.io.

\n\n

I have hit the host limit, do you have a PRO option?

\n\n

Over at hackertarget.com there\'s a tool we call domain profiler. This compiles data similiar to DNSDumpster; with additional data discovery. Queries available are based on the membership plan with the number of results (subdomains) being unlimited. With a STARTER membership you have access to the domain profiler tool for 12 months. Once the years membership expires you will revert to BASIC member status, however access to Domain Profiler and Basic Nmap scans continue. The BASIC access does not expire.

\n\n

What are some other resources and tools for learning more?

\n\n

There are some great open source recon frameworks that have been developed over the past couple of years. In addition tools such as Metasploit and Nmap include various modules for enumerating DNS. Check our Getting Started with Footprinting for more information.

\n\n
\n\n\n
\n\n\n\n \n \n \n \n\n\n\n\n\n\n \n \n \n \n\n\n\n\n\n\n\n\n\n \n\n', ) module_test.httpx_mock.add_response( - url=f"https://dnsdumpster.com/", + url="https://dnsdumpster.com/", method="POST", content=b'\n\n \n\n \n \n\n \n \n \n \n DNSdumpster.com - dns recon and research, find and lookup dns records\n\n\n \n \n \n\n \n \n\n \n\n \n\n
\n
\n\n
\n
\n\n
\n
\n \n
\n
\n\n\n\n\n
\n
\n

dns recon & research, find & lookup dns records

\n

\n

\n
\n
\n
\n\n\n\n\n
\n
\n
\n
\n
Loading...
\n
\n
\n
\n

\n\n
\n\n

Showing results for blacklanternsecurity.com

\n
\n
\n
\n
\n

Hosting (IP block owners)

\n
\n
\n

GeoIP of Host Locations

\n
\n
\n
\n\n

DNS Servers

\n
\n \n \n \n \n \n \n
ns01.domaincontrol.com.
\n\n\n \n
\n
\n
97.74.100.1
ns01.domaincontrol.com
GODADDY-DNS
United States
ns02.domaincontrol.com.
\n\n\n \n
\n
\n
173.201.68.1
ns02.domaincontrol.com
GODADDY-DNS
United States
\n
\n\n

MX Records ** This is where email for the domain goes...

\n
\n \n \n \n \n
asdf.blacklanternsecurity.com.mail.protection.outlook.com.
\n\n\n
\n
\n
104.47.55.138
mail-bn8nam120138.inbound.protection.outlook.com
MICROSOFT-CORP-MSN-AS-BLOCK
United States
\n
\n\n

TXT Records ** Find more hosts in Sender Policy Framework (SPF) configurations

\n
\n \n\n\n\n\n\n\n\n\n\n
"MS=ms26206678"
"v=spf1 ip4:50.240.76.25 include:spf.protection.outlook.com -all"
"google-site-verification=O_PoQFTGJ_hZ9LqfNT9OEc0KPFERKHQ_1t1m0YTx_1E"
"google-site-verification=7XKUMxJSTHBSzdvT7gH47jLRjNAS76nrEfXmzhR_DO4"
\n
\n\n\n

Host Records (A) ** this data may not be current as it uses a static database (updated monthly)

\n
\n \n\n \n \n \n \n \n \n \n \n \n \n \n \n \n \n \n
blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n
HTTP: \n GitHub.com\n\n\n\n\n\n\n\n\n
HTTP TECH: \n varnish\n\n\n\n
185.199.108.153
cdn-185-199-108-153.github.com
FASTLY
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n
SSH: \n SSH-2.0-OpenSSH_8.2p1 Ubuntu-4ubuntu0.3\n\n\n\n\n\n\n\n
143.244.156.80
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n
HTTP: \n Apache/2.4.29 (Ubuntu)\n\n\n\n\n\n\n\n\n
HTTP TECH: \n Ubuntu
Apache,2.4.29
\n\n\n\n
64.227.8.231
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
192.34.56.157
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
192.241.216.208
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
167.71.95.71
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
asdf.blacklanternsecurity.com
\n\n\n\n
\n
\n\n\n\n\n\n\n\n\n\n\n\n
157.245.247.197
asdf.blacklanternsecurity.com
DIGITALOCEAN-ASN
United States
\n
\n\n\n\n\n\n
\n

Mapping the domain ** click for full size image

\n

\n\n

\n
\n\n
\n\n

DNSdumpster.com is a FREE domain research tool that can discover hosts related to a domain. Finding visible hosts from the attackers perspective is an important part of the security assessment process.

\n\n
\n\n

this is a project

\n\n\n
\n
\n
\n

\n

Open Source Intelligence for Networks

\n
\n
\n
\n
\n \n \n \n

Attack

\n

The ability to quickly identify the attack surface is essential. Whether you are penetration testing or chasing bug bounties.

\n
\n
\n \n \n \n

Defend

\n

Network defenders benefit from passive reconnaissance in a number of ways. With analysis informing information security strategy.

\n
\n
\n \n \n \n

Learn

\n

Understanding network based OSINT helps information technologists to better operate, assess and manage the network.

\n
\n
\n
\n\n\n\n\n
\n\n \n

Map an organizations attack surface with a virtual dumpster dive* of the DNS records associated with the target organization.

\n

*DUMPSTER DIVING: The practice of sifting refuse from an office or technical installation to extract confidential data, especially security-compromising information.

\n
\n\n\n
\n\n

Frequently Asked Questions

\n\n

How can I take my security assessments to the next level?

\n\n

The company behind DNSDumpster is hackertarget.com where we provide online hosted access to trusted open source security vulnerability scanners and network intelligence tools.

Save time and headaches by incorporating our attack surface discovery into your vulnerability assessment process.

HackerTarget.com | Online Security Testing and Open Source Intelligence

\n\n

What data does DNSDumpster use?

\n\n

No brute force subdomain enumeration is used as is common in dns recon tools that enumerate subdomains. We use open source intelligence resources to query for related domain data. It is then compiled into an actionable resource for both attackers and defenders of Internet facing systems.

\n

More than a simple DNS lookup this tool will discover those hard to find sub-domains and web hosts. The search relies on data from our crawls of the Alexa Top 1 Million sites, Search Engines, Common Crawl, Certificate Transparency, Max Mind, Team Cymru, Shodan and scans.io.

\n\n

I have hit the host limit, do you have a PRO option?

\n\n

Over at hackertarget.com there\'s a tool we call domain profiler. This compiles data similiar to DNSDumpster; with additional data discovery. Queries available are based on the membership plan with the number of results (subdomains) being unlimited. With a STARTER membership you have access to the domain profiler tool for 12 months. Once the years membership expires you will revert to BASIC member status, however access to Domain Profiler and Basic Nmap scans continue. The BASIC access does not expire.

\n\n

What are some other resources and tools for learning more?

\n\n

There are some great open source recon frameworks that have been developed over the past couple of years. In addition tools such as Metasploit and Nmap include various modules for enumerating DNS. Check our Getting Started with Footprinting for more information.

\n\n
\n\n\n\n\n\n\n
\n\n\n\n \n \n \n \n\n\n\n\n\n\n \n \n \n \n\n\n\n \n \n \n\n \n\n\n\n\n\n\n\n\n\n\n\n\n \n\n', ) diff --git a/bbot/test/test_step_2/module_tests/test_module_excavate.py b/bbot/test/test_step_2/module_tests/test_module_excavate.py index bccbe3d73..cd709893a 100644 --- a/bbot/test/test_step_2/module_tests/test_module_excavate.py +++ b/bbot/test/test_step_2/module_tests/test_module_excavate.py @@ -60,8 +60,8 @@ def check(self, module_test, events): assert "www6.test.notreal" in event_data assert "www7.test.notreal" in event_data assert "www8.test.notreal" in event_data - assert not "http://127.0.0.1:8888/a_relative.js" in event_data - assert not "http://127.0.0.1:8888/link_relative.js" in event_data + assert "http://127.0.0.1:8888/a_relative.js" not in event_data + assert "http://127.0.0.1:8888/link_relative.js" not in event_data assert "http://127.0.0.1:8888/a_relative.txt" in event_data assert "http://127.0.0.1:8888/link_relative.txt" in event_data @@ -220,7 +220,7 @@ def check(self, module_test, events): [e for e in events if e.type == "FINDING" and e.data["description"] == "Non-HTTP URI: smb://127.0.0.1"] ) assert 1 == len( - [e for e in events if e.type == "PROTOCOL" and e.data["protocol"] == "SMB" and not "port" in e.data] + [e for e in events if e.type == "PROTOCOL" and e.data["protocol"] == "SMB" and "port" not in e.data] ) assert 0 == len([e for e in events if e.type == "FINDING" and "ssh://127.0.0.1" in e.data["description"]]) assert 0 == len([e for e in events if e.type == "PROTOCOL" and e.data["protocol"] == "SSH"]) @@ -711,7 +711,7 @@ def check(self, module_test, events): if ( str(e.module) == "dummy_module" and "spider-danger" not in e.tags - and not "spider-max" in e.tags + and "spider-max" not in e.tags ): found_url_unverified_dummy = True if e.type == "URL" and e.data == "http://127.0.0.1:8888/spider": @@ -868,8 +868,8 @@ def check(self, module_test, events): if e.data["name"] == "COOKIE2": found_second_cookie = True - assert found_first_cookie == True - assert found_second_cookie == True + assert found_first_cookie is True + assert found_second_cookie is True class TestExcavateRAWTEXT(ModuleTestBase): diff --git a/bbot/test/test_step_2/module_tests/test_module_host_header.py b/bbot/test/test_step_2/module_tests/test_module_host_header.py index b71a31b1d..a2d69e9b5 100644 --- a/bbot/test/test_step_2/module_tests/test_module_host_header.py +++ b/bbot/test/test_step_2/module_tests/test_module_host_header.py @@ -31,7 +31,7 @@ def request_handler(self, request): if subdomain_tag_overrides: return Response(f"Alive, host is: {subdomain_tag}.{self.fake_host}", status=200) - return Response(f"Alive, host is: defaulthost.com", status=200) + return Response("Alive, host is: defaulthost.com", status=200) async def setup_before_prep(self, module_test): self.interactsh_mock_instance = module_test.mock_interactsh("host_header") diff --git a/bbot/test/test_step_2/module_tests/test_module_http.py b/bbot/test/test_step_2/module_tests/test_module_http.py index 43b7189ad..2bc99f5dd 100644 --- a/bbot/test/test_step_2/module_tests/test_module_http.py +++ b/bbot/test/test_step_2/module_tests/test_module_http.py @@ -48,10 +48,10 @@ async def custom_callback(request): ) def check(self, module_test, events): - assert self.got_event == True - assert self.headers_correct == True - assert self.method_correct == True - assert self.url_correct == True + assert self.got_event is True + assert self.headers_correct is True + assert self.method_correct is True + assert self.url_correct is True class TestHTTPSIEMFriendly(TestHTTP): diff --git a/bbot/test/test_step_2/module_tests/test_module_httpx.py b/bbot/test/test_step_2/module_tests/test_module_httpx.py index c05b6842d..a8d7cfe03 100644 --- a/bbot/test/test_step_2/module_tests/test_module_httpx.py +++ b/bbot/test/test_step_2/module_tests/test_module_httpx.py @@ -44,7 +44,7 @@ def check(self, module_test, events): for e in events: if e.type == "HTTP_RESPONSE": if e.data["path"] == "/": - assert not "login-page" in e.tags + assert "login-page" not in e.tags open_port = True elif e.data["path"] == "/url": assert "login-page" in e.tags diff --git a/bbot/test/test_step_2/module_tests/test_module_leakix.py b/bbot/test/test_step_2/module_tests/test_module_leakix.py index 13b922159..f87dba6b5 100644 --- a/bbot/test/test_step_2/module_tests/test_module_leakix.py +++ b/bbot/test/test_step_2/module_tests/test_module_leakix.py @@ -11,7 +11,7 @@ async def setup_before_prep(self, module_test): json={"title": "Not Found", "description": "Host not found"}, ) module_test.httpx_mock.add_response( - url=f"https://leakix.net/api/subdomains/blacklanternsecurity.com", + url="https://leakix.net/api/subdomains/blacklanternsecurity.com", match_headers={"api-key": "asdf"}, json=[ { @@ -35,7 +35,7 @@ async def setup_before_prep(self, module_test): json={"title": "Not Found", "description": "Host not found"}, ) module_test.httpx_mock.add_response( - url=f"https://leakix.net/api/subdomains/blacklanternsecurity.com", + url="https://leakix.net/api/subdomains/blacklanternsecurity.com", json=[ { "subdomain": "asdf.blacklanternsecurity.com", diff --git a/bbot/test/test_step_2/module_tests/test_module_myssl.py b/bbot/test/test_step_2/module_tests/test_module_myssl.py index 34b9b9972..b39f2711d 100644 --- a/bbot/test/test_step_2/module_tests/test_module_myssl.py +++ b/bbot/test/test_step_2/module_tests/test_module_myssl.py @@ -5,7 +5,7 @@ class TestMySSL(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.module.abort_if = lambda e: False module_test.httpx_mock.add_response( - url=f"https://myssl.com/api/v1/discover_sub_domain?domain=blacklanternsecurity.com", + url="https://myssl.com/api/v1/discover_sub_domain?domain=blacklanternsecurity.com", json={ "code": 0, "data": [ diff --git a/bbot/test/test_step_2/module_tests/test_module_neo4j.py b/bbot/test/test_step_2/module_tests/test_module_neo4j.py index 98107481a..c5df1e474 100644 --- a/bbot/test/test_step_2/module_tests/test_module_neo4j.py +++ b/bbot/test/test_step_2/module_tests/test_module_neo4j.py @@ -41,4 +41,4 @@ async def close(self): module_test.monkeypatch.setattr("neo4j.AsyncGraphDatabase.driver", MockDriver) def check(self, module_test, events): - assert self.neo4j_used == True + assert self.neo4j_used is True diff --git a/bbot/test/test_step_2/module_tests/test_module_newsletters.py b/bbot/test/test_step_2/module_tests/test_module_newsletters.py index d3712be5c..135b67b32 100644 --- a/bbot/test/test_step_2/module_tests/test_module_newsletters.py +++ b/bbot/test/test_step_2/module_tests/test_module_newsletters.py @@ -53,5 +53,5 @@ def check(self, module_test, events): # Verify Negative Result (should skip this statement if correct) elif event.data["url"] == self.missing_tgt: missing = False - assert found, f"NEWSLETTER 'Found' Error - Expect status of True but got False" - assert missing, f"NEWSLETTER 'Missing' Error - Expect status of True but got False" + assert found, "NEWSLETTER 'Found' Error - Expect status of True but got False" + assert missing, "NEWSLETTER 'Missing' Error - Expect status of True but got False" diff --git a/bbot/test/test_step_2/module_tests/test_module_oauth.py b/bbot/test/test_step_2/module_tests/test_module_oauth.py index 85fe4f917..1e7078e84 100644 --- a/bbot/test/test_step_2/module_tests/test_module_oauth.py +++ b/bbot/test/test_step_2/module_tests/test_module_oauth.py @@ -167,7 +167,7 @@ class TestOAUTH(ModuleTestBase): async def setup_after_prep(self, module_test): await module_test.mock_dns({"evilcorp.com": {"A": ["127.0.0.1"]}}) module_test.httpx_mock.add_response( - url=f"https://login.microsoftonline.com/getuserrealm.srf?login=test@evilcorp.com", + url="https://login.microsoftonline.com/getuserrealm.srf?login=test@evilcorp.com", json=Azure_Realm.response_json, ) module_test.httpx_mock.add_response( diff --git a/bbot/test/test_step_2/module_tests/test_module_otx.py b/bbot/test/test_step_2/module_tests/test_module_otx.py index 1c41cd962..9c533ca96 100644 --- a/bbot/test/test_step_2/module_tests/test_module_otx.py +++ b/bbot/test/test_step_2/module_tests/test_module_otx.py @@ -4,7 +4,7 @@ class TestOTX(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://otx.alienvault.com/api/v1/indicators/domain/blacklanternsecurity.com/passive_dns", + url="https://otx.alienvault.com/api/v1/indicators/domain/blacklanternsecurity.com/passive_dns", json={ "passive_dns": [ { diff --git a/bbot/test/test_step_2/module_tests/test_module_postgres.py b/bbot/test/test_step_2/module_tests/test_module_postgres.py index 874acdb19..ea6c00210 100644 --- a/bbot/test/test_step_2/module_tests/test_module_postgres.py +++ b/bbot/test/test_step_2/module_tests/test_module_postgres.py @@ -48,7 +48,7 @@ async def setup_before_prep(self, module_test): await asyncio.sleep(1) if process.returncode != 0: - self.log.error(f"Failed to start PostgreSQL server") + self.log.error("Failed to start PostgreSQL server") async def check(self, module_test, events): import asyncpg diff --git a/bbot/test/test_step_2/module_tests/test_module_rapiddns.py b/bbot/test/test_step_2/module_tests/test_module_rapiddns.py index 2b3d3aaf0..d48f4dae7 100644 --- a/bbot/test/test_step_2/module_tests/test_module_rapiddns.py +++ b/bbot/test/test_step_2/module_tests/test_module_rapiddns.py @@ -11,7 +11,7 @@ class TestRapidDNS(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.module.abort_if = lambda e: False module_test.httpx_mock.add_response( - url=f"https://rapiddns.io/subdomain/blacklanternsecurity.com?full=1#result", text=self.web_body + url="https://rapiddns.io/subdomain/blacklanternsecurity.com?full=1#result", text=self.web_body ) def check(self, module_test, events): @@ -45,7 +45,7 @@ async def custom_callback(request): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 10 - assert module_test.module.errored == False + assert module_test.module.errored is False assert module_test.module._api_request_failures == 3 assert module_test.module.api_retries == 3 assert set([e.data for e in events if e.type == "DNS_NAME"]) == {"blacklanternsecurity.com"} @@ -59,7 +59,7 @@ class TestRapidDNSAbortThreshold2(TestRapidDNSAbortThreshold1): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 10 - assert module_test.module.errored == False + assert module_test.module.errored is False assert module_test.module._api_request_failures == 6 assert module_test.module.api_retries == 3 assert set([e.data for e in events if e.type == "DNS_NAME"]) == {"blacklanternsecurity.com", "evilcorp.com"} @@ -74,7 +74,7 @@ class TestRapidDNSAbortThreshold3(TestRapidDNSAbortThreshold1): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 10 - assert module_test.module.errored == False + assert module_test.module.errored is False assert module_test.module._api_request_failures == 9 assert module_test.module.api_retries == 3 assert set([e.data for e in events if e.type == "DNS_NAME"]) == { @@ -94,7 +94,7 @@ class TestRapidDNSAbortThreshold4(TestRapidDNSAbortThreshold1): def check(self, module_test, events): assert module_test.module.api_failure_abort_threshold == 10 - assert module_test.module.errored == True + assert module_test.module.errored is True assert module_test.module._api_request_failures == 10 assert module_test.module.api_retries == 3 assert set([e.data for e in events if e.type == "DNS_NAME"]) == { diff --git a/bbot/test/test_step_2/module_tests/test_module_sitedossier.py b/bbot/test/test_step_2/module_tests/test_module_sitedossier.py index a5b57b800..ed9330766 100644 --- a/bbot/test/test_step_2/module_tests/test_module_sitedossier.py +++ b/bbot/test/test_step_2/module_tests/test_module_sitedossier.py @@ -136,11 +136,11 @@ async def setup_after_prep(self, module_test): } ) module_test.httpx_mock.add_response( - url=f"http://www.sitedossier.com/parentdomain/evilcorp.com", + url="http://www.sitedossier.com/parentdomain/evilcorp.com", text=page1, ) module_test.httpx_mock.add_response( - url=f"http://www.sitedossier.com/parentdomain/evilcorp.com/101", + url="http://www.sitedossier.com/parentdomain/evilcorp.com/101", text=page2, ) diff --git a/bbot/test/test_step_2/module_tests/test_module_smuggler.py b/bbot/test/test_step_2/module_tests/test_module_smuggler.py index 7e076cf07..dcbb9fd3b 100644 --- a/bbot/test/test_step_2/module_tests/test_module_smuggler.py +++ b/bbot/test/test_step_2/module_tests/test_module_smuggler.py @@ -39,7 +39,7 @@ async def setup_after_prep(self, module_test): old_run_live = module_test.scan.helpers.run_live async def smuggler_mock_run_live(*command, **kwargs): - if not "smuggler" in command[0][1]: + if "smuggler" not in command[0][1]: async for l in old_run_live(*command, **kwargs): yield l else: diff --git a/bbot/test/test_step_2/module_tests/test_module_splunk.py b/bbot/test/test_step_2/module_tests/test_module_splunk.py index d55ed17c2..8366a6289 100644 --- a/bbot/test/test_step_2/module_tests/test_module_splunk.py +++ b/bbot/test/test_step_2/module_tests/test_module_splunk.py @@ -52,7 +52,7 @@ async def custom_callback(request): module_test.httpx_mock.add_response() def check(self, module_test, events): - assert self.got_event == True - assert self.headers_correct == True - assert self.method_correct == True - assert self.url_correct == True + assert self.got_event is True + assert self.headers_correct is True + assert self.method_correct is True + assert self.url_correct is True diff --git a/bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py b/bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py index 2ec5e0361..aa95473a4 100644 --- a/bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py +++ b/bbot/test/test_step_2/module_tests/test_module_subdomaincenter.py @@ -4,7 +4,7 @@ class TestSubdomainCenter(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://api.subdomain.center/?domain=blacklanternsecurity.com", + url="https://api.subdomain.center/?domain=blacklanternsecurity.com", json=["asdf.blacklanternsecurity.com", "zzzz.blacklanternsecurity.com"], ) diff --git a/bbot/test/test_step_2/module_tests/test_module_subdomains.py b/bbot/test/test_step_2/module_tests/test_module_subdomains.py index 65b9a8a03..e7fb49459 100644 --- a/bbot/test/test_step_2/module_tests/test_module_subdomains.py +++ b/bbot/test/test_step_2/module_tests/test_module_subdomains.py @@ -6,7 +6,7 @@ class TestSubdomains(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"https://api.subdomain.center/?domain=blacklanternsecurity.com", + url="https://api.subdomain.center/?domain=blacklanternsecurity.com", json=["asdfasdf.blacklanternsecurity.com", "zzzzzzzz.blacklanternsecurity.com"], ) diff --git a/bbot/test/test_step_2/module_tests/test_module_wayback.py b/bbot/test/test_step_2/module_tests/test_module_wayback.py index cf09d8e2c..7582e5417 100644 --- a/bbot/test/test_step_2/module_tests/test_module_wayback.py +++ b/bbot/test/test_step_2/module_tests/test_module_wayback.py @@ -4,7 +4,7 @@ class TestWayback(ModuleTestBase): async def setup_after_prep(self, module_test): module_test.httpx_mock.add_response( - url=f"http://web.archive.org/cdx/search/cdx?url=blacklanternsecurity.com&matchType=domain&output=json&fl=original&collapse=original", + url="http://web.archive.org/cdx/search/cdx?url=blacklanternsecurity.com&matchType=domain&output=json&fl=original&collapse=original", json=[["original"], ["http://asdf.blacklanternsecurity.com"]], ) diff --git a/docs/dev/dev_environment.md b/docs/dev/dev_environment.md index d3fdee3cf..b73f66057 100644 --- a/docs/dev/dev_environment.md +++ b/docs/dev/dev_environment.md @@ -33,7 +33,7 @@ bbot --help ```bash # auto-format code indentation, etc. -black . +ruff format # run tests ./bbot/test/run_tests.sh diff --git a/docs/dev/tests.md b/docs/dev/tests.md index f5d05fcf9..1bcf2970a 100644 --- a/docs/dev/tests.md +++ b/docs/dev/tests.md @@ -2,18 +2,18 @@ BBOT takes tests seriously. Every module *must* have a custom-written test that *actually tests* its functionality. Don't worry if you want to contribute but you aren't used to writing tests. If you open a draft PR, we will help write them :) -We use [black](https://github.com/psf/black) and [flake8](https://flake8.pycqa.org/en/latest/) for linting, and [pytest](https://docs.pytest.org/en/8.2.x/) for tests. +We use [ruff](https://docs.astral.sh/ruff/) for linting, and [pytest](https://docs.pytest.org/en/8.2.x/) for tests. ## Running tests locally -We have Github actions that automatically run tests whenever you open a Pull Request. However, you can also run the tests locally with `pytest`: +We have GitHub Actions that automatically run tests whenever you open a Pull Request. However, you can also run the tests locally with `pytest`: ```bash -# format code with black -poetry run black . +# lint with ruff +poetry run ruff check -# lint with flake8 -poetry run flake8 +# format code with ruff +poetry run ruff format # run all tests with pytest (takes rougly 30 minutes) poetry run pytest diff --git a/poetry.lock b/poetry.lock index 2c880c15d..509937d90 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. [[package]] name = "annotated-types" @@ -129,52 +129,6 @@ charset-normalizer = ["charset-normalizer"] html5lib = ["html5lib"] lxml = ["lxml"] -[[package]] -name = "black" -version = "24.10.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.9" -files = [ - {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, - {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, - {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, - {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, - {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, - {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, - {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, - {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, - {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, - {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, - {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, - {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, - {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, - {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, - {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, - {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, - {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"}, - {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"}, - {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"}, - {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"}, - {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, - {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.10)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - [[package]] name = "cachetools" version = "5.5.0" @@ -682,22 +636,6 @@ docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2. testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] -[[package]] -name = "flake8" -version = "7.1.1" -description = "the modular source code checker: pep8 pyflakes and co" -optional = false -python-versions = ">=3.8.1" -files = [ - {file = "flake8-7.1.1-py2.py3-none-any.whl", hash = "sha256:597477df7860daa5aa0fdd84bf5208a043ab96b8e96ab708770ae0364dd03213"}, - {file = "flake8-7.1.1.tar.gz", hash = "sha256:049d058491e228e03e67b390f311bbf88fce2dbaa8fa673e7aea87b7198b8d38"}, -] - -[package.dependencies] -mccabe = ">=0.7.0,<0.8.0" -pycodestyle = ">=2.12.0,<2.13.0" -pyflakes = ">=3.2.0,<3.3.0" - [[package]] name = "ghp-import" version = "2.1.0" @@ -1158,17 +1096,6 @@ files = [ {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - [[package]] name = "mergedeep" version = "1.3.4" @@ -1482,17 +1409,6 @@ plot = ["matplotlib (==3.9.2)", "pandas (==2.2.2)"] test = ["pytest (==8.3.3)", "pytest-sugar (==1.0.0)"] type = ["mypy (==1.11.2)"] -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - [[package]] name = "nodeenv" version = "1.9.1" @@ -1704,17 +1620,6 @@ files = [ {file = "puremagic-1.28.tar.gz", hash = "sha256:195893fc129657f611b86b959aab337207d6df7f25372209269ed9e303c1a8c0"}, ] -[[package]] -name = "pycodestyle" -version = "2.12.1" -description = "Python style guide checker" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, - {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, -] - [[package]] name = "pycparser" version = "2.22" @@ -1891,17 +1796,6 @@ files = [ [package.dependencies] typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" -[[package]] -name = "pyflakes" -version = "3.2.0" -description = "passive checker of Python programs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pyflakes-3.2.0-py2.py3-none-any.whl", hash = "sha256:84b5be138a2dfbb40689ca07e2152deb896a65c3a3e24c251c5c62489568074a"}, - {file = "pyflakes-3.2.0.tar.gz", hash = "sha256:1c61603ff154621fb2a9172037d84dca3500def8c8b630657d1701f026f8af3f"}, -] - [[package]] name = "pygments" version = "2.18.0" @@ -2502,6 +2396,33 @@ lint = ["black", "flake8", "isort", "mypy", "types-requests"] release = ["build", "towncrier", "twine"] test = ["commentjson", "packaging", "pytest"] +[[package]] +name = "ruff" +version = "0.8.0" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +files = [ + {file = "ruff-0.8.0-py3-none-linux_armv6l.whl", hash = "sha256:fcb1bf2cc6706adae9d79c8d86478677e3bbd4ced796ccad106fd4776d395fea"}, + {file = "ruff-0.8.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:295bb4c02d58ff2ef4378a1870c20af30723013f441c9d1637a008baaf928c8b"}, + {file = "ruff-0.8.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7b1f1c76b47c18fa92ee78b60d2d20d7e866c55ee603e7d19c1e991fad933a9a"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb0d4f250a7711b67ad513fde67e8870109e5ce590a801c3722580fe98c33a99"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:0e55cce9aa93c5d0d4e3937e47b169035c7e91c8655b0974e61bb79cf398d49c"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3f4cd64916d8e732ce6b87f3f5296a8942d285bbbc161acee7fe561134af64f9"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c5c1466be2a2ebdf7c5450dd5d980cc87c8ba6976fb82582fea18823da6fa362"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2dabfd05b96b7b8f2da00d53c514eea842bff83e41e1cceb08ae1966254a51df"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:facebdfe5a5af6b1588a1d26d170635ead6892d0e314477e80256ef4a8470cf3"}, + {file = "ruff-0.8.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87a8e86bae0dbd749c815211ca11e3a7bd559b9710746c559ed63106d382bd9c"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:85e654f0ded7befe2d61eeaf3d3b1e4ef3894469cd664ffa85006c7720f1e4a2"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:83a55679c4cb449fa527b8497cadf54f076603cc36779b2170b24f704171ce70"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:812e2052121634cf13cd6fddf0c1871d0ead1aad40a1a258753c04c18bb71bbd"}, + {file = "ruff-0.8.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:780d5d8523c04202184405e60c98d7595bdb498c3c6abba3b6d4cdf2ca2af426"}, + {file = "ruff-0.8.0-py3-none-win32.whl", hash = "sha256:5fdb6efecc3eb60bba5819679466471fd7d13c53487df7248d6e27146e985468"}, + {file = "ruff-0.8.0-py3-none-win_amd64.whl", hash = "sha256:582891c57b96228d146725975fbb942e1f30a0c4ba19722e692ca3eb25cc9b4f"}, + {file = "ruff-0.8.0-py3-none-win_arm64.whl", hash = "sha256:ba93e6294e9a737cd726b74b09a6972e36bb511f9a102f1d9a7e1ce94dd206a6"}, + {file = "ruff-0.8.0.tar.gz", hash = "sha256:a7ccfe6331bf8c8dad715753e157457faf7351c2b69f62f32c165c2dbcbacd44"}, +] + [[package]] name = "setproctitle" version = "1.3.4" @@ -3136,4 +3057,4 @@ type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "2a8751a5918309b0eb2fda2de92b363ae31cbf4ee97317999636d12ae04a06c9" +content-hash = "b196b7db964114dcb29d17bdd833c24759a09a02404d2a385d677b807ec865f8" diff --git a/pyproject.toml b/pyproject.toml index d333855f2..02a6d12d2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -58,13 +58,11 @@ cloudcheck = "^6.0.0.602" radixtarget = "^2.0.0.50" [tool.poetry.group.dev.dependencies] -flake8 = ">=6,<8" poetry-dynamic-versioning = ">=0.21.4,<1.5.0" urllib3 = "^2.0.2" werkzeug = ">=2.3.4,<4.0.0" pytest-env = ">=0.8.2,<1.2.0" pre-commit = ">=3.4,<5.0" -black = "^24.1.1" pytest-cov = ">=5,<7" pytest-rerunfailures = "^14.0" pytest-timeout = "^2.3.1" @@ -74,6 +72,7 @@ pytest-asyncio = "0.24.0" uvicorn = "^0.32.0" fastapi = "^0.115.5" pytest-httpx = ">=0.33,<0.35" +ruff = "^0.8.0" [tool.poetry.group.docs.dependencies] mkdocs = "^1.5.2"