Skip to content

Commit

Permalink
ruff check --fix --unsafe-fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
cclauss committed Nov 21, 2024
1 parent 2323594 commit e604d18
Show file tree
Hide file tree
Showing 113 changed files with 829 additions and 830 deletions.
4 changes: 2 additions & 2 deletions bbot/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,8 +133,8 @@ async def _main():
]
if deadly_modules and not options.allow_deadly:
log.hugewarning(f"You enabled the following deadly modules: {','.join(deadly_modules)}")
log.hugewarning(f"Deadly modules are highly intrusive")
log.hugewarning(f"Please specify --allow-deadly to continue")
log.hugewarning("Deadly modules are highly intrusive")
log.hugewarning("Please specify --allow-deadly to continue")
return False

# --current-preset
Expand Down
2 changes: 1 addition & 1 deletion bbot/core/config/logger.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ def __init__(self, core):
self.listener = None

# if we haven't set up logging yet, do it now
if not "_BBOT_LOGGING_SETUP" in os.environ:
if "_BBOT_LOGGING_SETUP" not in os.environ:
os.environ["_BBOT_LOGGING_SETUP"] = "1"
self.queue = multiprocessing.Queue()
self.setup_queue_handler()
Expand Down
2 changes: 1 addition & 1 deletion bbot/core/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,7 +106,7 @@ def default_config(self):
if DEFAULT_CONFIG is None:
self.default_config = self.files_config.get_default_config()
# ensure bbot home dir
if not "home" in self.default_config:
if "home" not in self.default_config:
self.default_config["home"] = "~/.bbot"
return DEFAULT_CONFIG

Expand Down
18 changes: 9 additions & 9 deletions bbot/core/event/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,7 @@ def __init__(
# self.scan holds the instantiated scan object (for helpers, etc.)
self.scan = scan
if (not self.scan) and (not self._dummy):
raise ValidationError(f"Must specify scan")
raise ValidationError("Must specify scan")
# self.scans holds a list of scan IDs from scans that encountered this event
self.scans = []
if scans is not None:
Expand All @@ -222,7 +222,7 @@ def __init__(

self.parent = parent
if (not self.parent) and (not self._dummy):
raise ValidationError(f"Must specify event parent")
raise ValidationError("Must specify event parent")

if tags is not None:
for tag in tags:
Expand Down Expand Up @@ -301,9 +301,9 @@ def internal(self, value):
The purpose of internal events is to enable speculative/explorative discovery without cluttering
the console with irrelevant or uninteresting events.
"""
if not value in (True, False):
if value not in (True, False):
raise ValueError(f'"internal" must be boolean, not {type(value)}')
if value == True:
if value is True:
self.add_tag("internal")
else:
self.remove_tag("internal")
Expand Down Expand Up @@ -1013,12 +1013,12 @@ def __init__(self, *args, **kwargs):
if not self.host:
for parent in self.get_parents(include_self=True):
# inherit closest URL
if not "url" in self.data:
if "url" not in self.data:
parent_url = getattr(parent, "parsed_url", None)
if parent_url is not None:
self.data["url"] = parent_url.geturl()
# inherit closest path
if not "path" in self.data and isinstance(parent.data, dict) and not parent.type == "HTTP_RESPONSE":
if "path" not in self.data and isinstance(parent.data, dict) and not parent.type == "HTTP_RESPONSE":
parent_path = parent.data.get("path", None)
if parent_path is not None:
self.data["path"] = parent_path
Expand Down Expand Up @@ -1228,7 +1228,7 @@ def sanitize_data(self, data):

def add_tag(self, tag):
host_same_as_parent = self.parent and self.host == self.parent.host
if tag == "spider-danger" and host_same_as_parent and not "spider-danger" in self.tags:
if tag == "spider-danger" and host_same_as_parent and "spider-danger" not in self.tags:
# increment the web spider distance
if self.type == "URL_UNVERIFIED":
self.web_spider_distance += 1
Expand All @@ -1250,7 +1250,7 @@ def with_port(self):

def _words(self):
first_elem = self.parsed_url.path.lstrip("/").split("/")[0]
if not "." in first_elem:
if "." not in first_elem:
return extract_words(first_elem)
return set()

Expand Down Expand Up @@ -1668,7 +1668,7 @@ def make_event(
event.parent = parent
if context is not None:
event.discovery_context = context
if internal == True:
if internal is True:
event.internal = True
if tags:
event.tags = tags.union(event.tags)
Expand Down
8 changes: 4 additions & 4 deletions bbot/core/helpers/command.py
Original file line number Diff line number Diff line change
Expand Up @@ -269,11 +269,11 @@ def _prepare_command_kwargs(self, command, kwargs):
(['sudo', '-E', '-A', 'LD_LIBRARY_PATH=...', 'PATH=...', 'ls', '-l'], {'limit': 104857600, 'stdout': -1, 'stderr': -1, 'env': environ(...)})
"""
# limit = 100MB (this is needed for cases like httpx that are sending large JSON blobs over stdout)
if not "limit" in kwargs:
if "limit" not in kwargs:
kwargs["limit"] = 1024 * 1024 * 100
if not "stdout" in kwargs:
if "stdout" not in kwargs:
kwargs["stdout"] = asyncio.subprocess.PIPE
if not "stderr" in kwargs:
if "stderr" not in kwargs:
kwargs["stderr"] = asyncio.subprocess.PIPE
sudo = kwargs.pop("sudo", False)

Expand All @@ -286,7 +286,7 @@ def _prepare_command_kwargs(self, command, kwargs):

# use full path of binary, if not already specified
binary = command[0]
if not "/" in binary:
if "/" not in binary:
binary_full_path = which(binary)
if binary_full_path is None:
raise SubprocessError(f'Command "{binary}" was not found')
Expand Down
8 changes: 4 additions & 4 deletions bbot/core/helpers/depsinstaller/installer.py
Original file line number Diff line number Diff line change
Expand Up @@ -96,11 +96,11 @@ async def install(self, *modules):
or self.deps_behavior == "force_install"
):
if not notified:
log.hugeinfo(f"Installing module dependencies. Please be patient, this may take a while.")
log.hugeinfo("Installing module dependencies. Please be patient, this may take a while.")
notified = True
log.verbose(f'Installing dependencies for module "{m}"')
# get sudo access if we need it
if preloaded.get("sudo", False) == True:
if preloaded.get("sudo", False) is True:
self.ensure_root(f'Module "{m}" needs root privileges to install its dependencies.')
success = await self.install_module(m)
self.setup_status[module_hash] = success
Expand Down Expand Up @@ -158,7 +158,7 @@ async def install_module(self, module):
deps_common = preloaded["deps"]["common"]
if deps_common:
for dep_common in deps_common:
if self.setup_status.get(dep_common, False) == True:
if self.setup_status.get(dep_common, False) is True:
log.debug(
f'Skipping installation of dependency "{dep_common}" for module "{module}" since it is already installed'
)
Expand Down Expand Up @@ -243,7 +243,7 @@ def shell(self, module, commands):
if success:
log.info(f"Successfully ran {len(commands):,} shell commands")
else:
log.warning(f"Failed to run shell dependencies")
log.warning("Failed to run shell dependencies")
return success

def tasks(self, module, tasks):
Expand Down
10 changes: 5 additions & 5 deletions bbot/core/helpers/diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ async def compare(

await self._baseline()

if timeout == None:
if timeout is None:
timeout = self.timeout

reflection = False
Expand Down Expand Up @@ -238,11 +238,11 @@ async def compare(

different_headers = self.compare_headers(self.baseline.headers, subject_response.headers)
if different_headers:
log.debug(f"headers were different, no match")
log.debug("headers were different, no match")
diff_reasons.append("header")

if self.compare_body(self.baseline_json, subject_json) == False:
log.debug(f"difference in HTML body, no match")
if self.compare_body(self.baseline_json, subject_json) is False:
log.debug("difference in HTML body, no match")

diff_reasons.append("body")

Expand Down Expand Up @@ -275,6 +275,6 @@ async def canary_check(self, url, mode, rounds=3):
)

# if a nonsense header "caused" a difference, we need to abort. We also need to abort if our canary was reflected
if match == False or reflection == True:
if match is False or reflection is True:
return False
return True
2 changes: 1 addition & 1 deletion bbot/core/helpers/dns/dns.py
Original file line number Diff line number Diff line change
Expand Up @@ -179,7 +179,7 @@ def _wildcard_prevalidation(self, host):

host = clean_dns_record(host)
# skip check if it's an IP or a plain hostname
if is_ip(host) or not "." in host:
if is_ip(host) or "." not in host:
return False

# skip if query isn't a dns name
Expand Down
2 changes: 1 addition & 1 deletion bbot/core/helpers/dns/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -640,7 +640,7 @@ async def _connectivity_check(self, interval=5):
self._last_dns_success = time.time()
return True
if time.time() - self._last_connectivity_warning > interval:
self.log.warning(f"DNS queries are failing, please check your internet connection")
self.log.warning("DNS queries are failing, please check your internet connection")
self._last_connectivity_warning = time.time()
self._errors.clear()
return False
Expand Down
2 changes: 1 addition & 1 deletion bbot/core/helpers/files.py
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ def _feed_pipe(self, pipe, content, text=True):
for c in content:
p.write(decode_fn(c) + newline)
except BrokenPipeError:
log.debug(f"Broken pipe in _feed_pipe()")
log.debug("Broken pipe in _feed_pipe()")
except ValueError:
log.debug(f"Error _feed_pipe(): {traceback.format_exc()}")
except KeyboardInterrupt:
Expand Down
3 changes: 2 additions & 1 deletion bbot/core/helpers/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,8 @@ def temp_filename(self, extension=None):
return self.temp_dir / filename

def clean_old_scans(self):
_filter = lambda x: x.is_dir() and self.regexes.scan_name_regex.match(x.name)
def _filter(x):
return x.is_dir() and self.regexes.scan_name_regex.match(x.name)
self.clean_old(self.scans_dir, keep=self.keep_old_scans, filter=_filter)

def make_target(self, *targets, **kwargs):
Expand Down
6 changes: 3 additions & 3 deletions bbot/core/helpers/interactsh.py
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@ async def register(self, callback=None):
break

if not self.server:
raise InteractshError(f"Failed to register with an interactsh server")
raise InteractshError("Failed to register with an interactsh server")

log.info(
f"Successfully registered to interactsh server {self.server} with correlation_id {self.correlation_id} [{self.domain}]"
Expand All @@ -181,7 +181,7 @@ async def deregister(self):
>>> await interactsh_client.deregister()
"""
if not self.server or not self.correlation_id or not self.secret:
raise InteractshError(f"Missing required information to deregister")
raise InteractshError("Missing required information to deregister")

headers = {}
if self.token:
Expand Down Expand Up @@ -226,7 +226,7 @@ async def poll(self):
]
"""
if not self.server or not self.correlation_id or not self.secret:
raise InteractshError(f"Missing required information to poll")
raise InteractshError("Missing required information to poll")

headers = {}
if self.token:
Expand Down
4 changes: 1 addition & 3 deletions bbot/core/helpers/libmagic.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,9 +59,7 @@ def get_compression(mime_type):
"application/x-xar": "xar", # XAR archive
"application/x-ace": "ace", # ACE archive
"application/x-zoo": "zoo", # Zoo archive
"application/x-arc": "arc", # ARC archive
"application/x-zstd-compressed-tar": "zstd", # Zstandard compressed Tar archive
"application/x-lz4-compressed-tar": "lz4", # LZ4 compressed Tar archive
"application/x-arc": "arc", # LZ4 compressed Tar archive
"application/vnd.comicbook-rar": "rar", # Comic book archive (RAR)
}

Expand Down
4 changes: 2 additions & 2 deletions bbot/core/helpers/misc.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,7 +391,7 @@ def url_parents(u):
parent_list = []
while 1:
parent = parent_url(u)
if parent == None:
if parent is None:
return parent_list
elif parent not in parent_list:
parent_list.append(parent)
Expand Down Expand Up @@ -512,7 +512,7 @@ def domain_stem(domain):
- Utilizes the `tldextract` function for domain parsing.
"""
parsed = tldextract(str(domain))
return f".".join(parsed.subdomain.split(".") + parsed.domain.split(".")).strip(".")
return ".".join(parsed.subdomain.split(".") + parsed.domain.split(".")).strip(".")


def ip_network_parents(i, include_self=False):
Expand Down
2 changes: 1 addition & 1 deletion bbot/core/helpers/validators.py
Original file line number Diff line number Diff line change
Expand Up @@ -132,7 +132,7 @@ def validate_host(host: Union[str, ipaddress.IPv4Address, ipaddress.IPv6Address]
@validator
def validate_severity(severity: str):
severity = str(severity).strip().upper()
if not severity in ("UNKNOWN", "INFO", "LOW", "MEDIUM", "HIGH", "CRITICAL"):
if severity not in ("UNKNOWN", "INFO", "LOW", "MEDIUM", "HIGH", "CRITICAL"):
raise ValueError(f"Invalid severity: {severity}")
return severity

Expand Down
2 changes: 1 addition & 1 deletion bbot/core/helpers/web/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def __init__(self, *args, **kwargs):

# timeout
http_timeout = self._web_config.get("http_timeout", 20)
if not "timeout" in kwargs:
if "timeout" not in kwargs:
kwargs["timeout"] = http_timeout

# headers
Expand Down
2 changes: 1 addition & 1 deletion bbot/core/helpers/web/engine.py
Original file line number Diff line number Diff line change
Expand Up @@ -138,7 +138,7 @@ async def stream_request(self, url, **kwargs):
if max_size is not None:
max_size = human_to_bytes(max_size)
kwargs["follow_redirects"] = follow_redirects
if not "method" in kwargs:
if "method" not in kwargs:
kwargs["method"] = "GET"
try:
total_size = 0
Expand Down
4 changes: 2 additions & 2 deletions bbot/core/helpers/web/web.py
Original file line number Diff line number Diff line change
Expand Up @@ -262,7 +262,7 @@ async def wordlist(self, path, lines=None, **kwargs):
"""
if not path:
raise WordlistError(f"Invalid wordlist: {path}")
if not "cache_hrs" in kwargs:
if "cache_hrs" not in kwargs:
kwargs["cache_hrs"] = 720
if self.parent_helper.is_url(path):
filename = await self.download(str(path), **kwargs)
Expand Down Expand Up @@ -351,7 +351,7 @@ async def curl(self, *args, **kwargs):
headers[hk] = hv

# add the timeout
if not "timeout" in kwargs:
if "timeout" not in kwargs:
timeout = http_timeout

curl_command.append("-m")
Expand Down
8 changes: 4 additions & 4 deletions bbot/core/helpers/wordcloud.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,15 +111,15 @@ def mutations(
results = set()
for word in words:
h = hash(word)
if not h in results:
if h not in results:
results.add(h)
yield (word,)
if numbers > 0:
if substitute_numbers:
for word in words:
for number_mutation in self.get_number_mutations(word, n=numbers, padding=number_padding):
h = hash(number_mutation)
if not h in results:
if h not in results:
results.add(h)
yield (number_mutation,)
for word in words:
Expand Down Expand Up @@ -322,7 +322,7 @@ def json(self, limit=None):

@property
def default_filename(self):
return self.parent_helper.preset.scan.home / f"wordcloud.tsv"
return self.parent_helper.preset.scan.home / "wordcloud.tsv"

def save(self, filename=None, limit=None):
"""
Expand Down Expand Up @@ -357,7 +357,7 @@ def save(self, filename=None, limit=None):
log.debug(f"Saved word cloud ({len(self):,} words) to {filename}")
return True, filename
else:
log.debug(f"No words to save")
log.debug("No words to save")
except Exception as e:
import traceback

Expand Down
10 changes: 5 additions & 5 deletions bbot/core/modules.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def preload(self, module_dirs=None):
else:
log.debug(f"Preloading {module_name} from disk")
if module_dir.name == "modules":
namespace = f"bbot.modules"
namespace = "bbot.modules"
else:
namespace = f"bbot.modules.{module_dir.name}"
try:
Expand Down Expand Up @@ -401,10 +401,10 @@ def preload_module(self, module_file):
deps_common.append(dep_common.value)

for task in ansible_tasks:
if not "become" in task:
if "become" not in task:
task["become"] = False
# don't sudo brew
elif os_platform() == "darwin" and ("package" in task and task.get("become", False) == True):
elif os_platform() == "darwin" and ("package" in task and task.get("become", False) is True):
task["become"] = False

preloaded_data = {
Expand Down Expand Up @@ -437,8 +437,8 @@ def preload_module(self, module_file):
f'Error while preloading module "{module_file}": No shared dependency named "{dep_common}" (choices: {common_choices})'
)
for ansible_task in ansible_task_list:
if any(x == True for x in search_dict_by_key("become", ansible_task)) or any(
x == True for x in search_dict_by_key("ansible_become", ansible_tasks)
if any(x is True for x in search_dict_by_key("become", ansible_task)) or any(
x is True for x in search_dict_by_key("ansible_become", ansible_tasks)
):
preloaded_data["sudo"] = True
return preloaded_data
Expand Down
Loading

0 comments on commit e604d18

Please sign in to comment.