diff --git a/poetry.lock b/poetry.lock index 7a56416a6..7cb06a69b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -911,6 +911,16 @@ files = [ {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:f698de3fd0c4e6972b92290a45bd9b1536bffe8c6759c62471efaa8acb4c37bc"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:aa57bd9cf8ae831a362185ee444e15a93ecb2e344c8e52e4d721ea3ab6ef1823"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffcc3f7c66b5f5b7931a5aa68fc9cecc51e685ef90282f4a82f0f5e9b704ad11"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47d4f1c5f80fc62fdd7777d0d40a2e9dda0a05883ab11374334f6c4de38adffd"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1f67c7038d560d92149c060157d623c542173016c4babc0c1913cca0564b9939"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9aad3c1755095ce347e26488214ef77e0485a3c34a50c5a5e2471dff60b9dd9c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:14ff806850827afd6b07a5f32bd917fb7f45b046ba40c57abdb636674a8b559c"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8f9293864fe09b8149f0cc42ce56e3f0e54de883a9de90cd427f191c346eb2e1"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win32.whl", hash = "sha256:715d3562f79d540f251b99ebd6d8baa547118974341db04f5ad06d5ea3eb8007"}, + {file = "MarkupSafe-2.1.3-cp312-cp312-win_amd64.whl", hash = "sha256:1b8dd8c3fd14349433c79fa8abeb573a55fc0fdd769133baac1f5e07abf54aeb"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, @@ -1323,6 +1333,32 @@ files = [ dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] +[[package]] +name = "psutil" +version = "5.9.4" +description = "Cross-platform lib for process and system monitoring in Python." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "psutil-5.9.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c1ca331af862803a42677c120aff8a814a804e09832f166f226bfd22b56feee8"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:68908971daf802203f3d37e78d3f8831b6d1014864d7a85937941bb35f09aefe"}, + {file = "psutil-5.9.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3ff89f9b835100a825b14c2808a106b6fdcc4b15483141482a12c725e7f78549"}, + {file = "psutil-5.9.4-cp27-cp27m-win32.whl", hash = "sha256:852dd5d9f8a47169fe62fd4a971aa07859476c2ba22c2254d4a1baa4e10b95ad"}, + {file = "psutil-5.9.4-cp27-cp27m-win_amd64.whl", hash = "sha256:9120cd39dca5c5e1c54b59a41d205023d436799b1c8c4d3ff71af18535728e94"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:6b92c532979bafc2df23ddc785ed116fced1f492ad90a6830cf24f4d1ea27d24"}, + {file = "psutil-5.9.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:efeae04f9516907be44904cc7ce08defb6b665128992a56957abc9b61dca94b7"}, + {file = "psutil-5.9.4-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:54d5b184728298f2ca8567bf83c422b706200bcbbfafdc06718264f9393cfeb7"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16653106f3b59386ffe10e0bad3bb6299e169d5327d3f187614b1cb8f24cf2e1"}, + {file = "psutil-5.9.4-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54c0d3d8e0078b7666984e11b12b88af2db11d11249a8ac8920dd5ef68a66e08"}, + {file = "psutil-5.9.4-cp36-abi3-win32.whl", hash = "sha256:149555f59a69b33f056ba1c4eb22bb7bf24332ce631c44a319cec09f876aaeff"}, + {file = "psutil-5.9.4-cp36-abi3-win_amd64.whl", hash = "sha256:fd8522436a6ada7b4aad6638662966de0d61d241cb821239b2ae7013d41a43d4"}, + {file = "psutil-5.9.4-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:6001c809253a29599bc0dfd5179d9f8a5779f9dffea1da0f13c53ee568115e1e"}, + {file = "psutil-5.9.4.tar.gz", hash = "sha256:3d7f9739eb435d4b1338944abe23f49584bde5395f27487d2ee25ad9a8774a62"}, +] + +[package.extras] +test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] + [[package]] name = "pycparser" version = "2.21" @@ -2163,6 +2199,17 @@ files = [ {file = "types_aiofiles-23.2.0.0-py3-none-any.whl", hash = "sha256:5d6719e8148cb2a9c4ea46dad86d50d3b675c46a940adca698533a8d2216d53d"}, ] +[[package]] +name = "types-psutil" +version = "5.9.5.10" +description = "Typing stubs for psutil" +optional = false +python-versions = "*" +files = [ + {file = "types-psutil-5.9.5.10.tar.gz", hash = "sha256:26206682efe011bc84f15f8d6539c714f3d598843e2b4c88ef371ecb1a34829b"}, + {file = "types_psutil-5.9.5.10-py3-none-any.whl", hash = "sha256:04c54f95f95d7175649f21cec4528f0b0313f0c4d103cc4d675bb66a25ac0bca"}, +] + [[package]] name = "types-tabulate" version = "0.9.0.2" @@ -2504,4 +2551,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.0" python-versions = ">=3.10,<3.12" -content-hash = "94084ddd38d8b16ec74ea3a002aa995c662f5a88e27b0410c048f7c8bcf33c77" +content-hash = "22e2acba90c7a3e12f458c663531b90a7a0e649e2e68a663fdb98e89ce4cb42d" diff --git a/pyproject.toml b/pyproject.toml index 55d49bc4e..a1e146c53 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,6 +48,8 @@ pydantic = "^2.0" pygit2 = "^1.10" platformdirs = ">=2.6,<4.0" exitcode = "^0.1.0" +psutil = "^5.9.4" +types-psutil = "^5.9.5.10" [tool.poetry.group.dev.dependencies] black = ">=22.10,<24.0" diff --git a/src/gallia/commands/__init__.py b/src/gallia/commands/__init__.py index 21975caa9..f7281a276 100644 --- a/src/gallia/commands/__init__.py +++ b/src/gallia/commands/__init__.py @@ -3,8 +3,8 @@ # SPDX-License-Identifier: Apache-2.0 from gallia.command.base import BaseCommand +from gallia.commands.discover.doip import DoIPDiscoverer from gallia.commands.discover.find_xcp import FindXCP -from gallia.commands.discover.uds.doip import DoIPDiscoverer from gallia.commands.discover.uds.isotp import IsotpDiscoverer from gallia.commands.fuzz.uds.pdu import PDUFuzzer from gallia.commands.primitive.generic.pdu import GenericPDUPrimitive diff --git a/src/gallia/commands/discover/doip.py b/src/gallia/commands/discover/doip.py new file mode 100644 index 000000000..d11a0738b --- /dev/null +++ b/src/gallia/commands/discover/doip.py @@ -0,0 +1,547 @@ +# SPDX-FileCopyrightText: AISEC Pentesting Team +# +# SPDX-License-Identifier: Apache-2.0 + +import asyncio +import socket +from argparse import Namespace +from collections.abc import Iterable +from itertools import chain, product +from urllib.parse import parse_qs, urlparse + +import aiofiles +import psutil + +from gallia.command import AsyncScript +from gallia.services.uds.core.service import ( + DiagnosticSessionControlRequest, + DiagnosticSessionControlResponse, +) +from gallia.transports.doip import ( + DiagnosticMessage, + DiagnosticMessageNegativeAckCodes, + DoIPConnection, + RoutingActivationRequestTypes, + RoutingActivationResponseCodes, + TimingAndCommunicationParameters, +) + + +class DoIPDiscoverer(AsyncScript): + """This script scans for active DoIP endpoints and automatically enumerates allowed + RoutingActivationTypes and known SourceAddresses. Once valid endpoints are acquired, + the script continues to discover valid TargetAddresses that are accepted and respond + to UDS DiagnosticSessionControl requests.""" + + GROUP = "discover" + COMMAND = "doip" + SHORT_HELP = "zero-knowledge DoIP enumeration scanner" + HAS_ARTIFACTS_DIR = True + + def configure_parser(self) -> None: + self.parser.add_argument( + "--start", + metavar="INT", + type=lambda x: int(x, 0), + default=0x00, + help="set start address of TargetAddress search range", + ) + self.parser.add_argument( + "--stop", + metavar="INT", + type=lambda x: int(x, 0), + default=0xFFFF, + help="set stop address of TargetAddress search range", + ) + self.parser.add_argument( + "--target", + metavar="", + type=str, + default=None, + help="The more you give, the more automatic detection will be skipped: IP, Port, RoutingActivationType, SourceAddress", + ) + self.parser.add_argument( + "--timeout", + metavar="SECONDS (FLOAT)", + type=float, + default=None, + help="This flag overrides the default timeout of DiagnosticMessages, which can be used to fine-tune classification of unresponsive ECUs or broadcast detection", + ) + + # This is an ugly hack to circumvent AsyncScript's shortcomings regarding return codes + def run(self, args: Namespace) -> int: + return asyncio.run(self.main2(args)) + + async def main(self, args: Namespace) -> None: + pass + + async def main2(self, args: Namespace) -> int: + self.logger.notice("[πŸ‘‹] Welcome to @realDoIP-Discovery powered by MoarMemes…") + + target = urlparse(args.target) if args.target is not None else None + if target is not None and target.scheme != "doip": + self.logger.error("[🫣] --target must be doip://…") + return 2 + + # Discover Hostname and Port + tgt_hostname: str + tgt_port: int + if ( + target is not None + and target.hostname is not None + and target.port is not None + ): + self.logger.notice( + "[πŸ“‹] Skipping host/port discovery because given by --target" + ) + tgt_hostname = target.hostname + tgt_port = target.port + else: + self.logger.notice("[πŸ”] Discovering Host and Port via UDP Broadcast") + + hosts = await self.run_udp_discovery() + + if len(hosts) != 1: + self.logger.error( + "[πŸƒ] Can only continue with a single DoIP host! Give me a --target!" + ) + return 11 + + tgt_hostname, tgt_port = hosts[0] + + # Find correct RoutingActivationType + rat_success: list[int] = [] + rat_wrong_source: list[int] = [] + if target is not None and "activation_type" in parse_qs(target.query): + self.logger.notice( + "[πŸ“‹] Skipping RoutingActivationType discovery because given by --target" + ) + rat_success = [int(parse_qs(target.query)["activation_type"][0], 0)] + else: + self.logger.notice("[πŸ”] Enumerating all RoutingActivationTypes") + + ( + rat_success, + rat_wrong_source, + ) = await self.enumerate_routing_activation_types( + tgt_hostname, + tgt_port, + int(parse_qs(target.query)["src_addr"][0], 0) + if target is not None and "src_addr" in parse_qs(target.query) + else 0xE00, + ) + + if len(rat_success) == 0 and len(rat_wrong_source) == 0: + self.logger.error( + "[πŸ₯Ύ] Damn son, didn't find a single routing activation type with unknown source?! OUTTA HERE!" + ) + return 10 + + # Discovering correct source address for suitable RoutingActivationRequests + if target is not None and "src_addr" in parse_qs(target.query): + self.logger.notice( + "[πŸ“‹] Skipping SourceAddress discovery because given by --target" + ) + targets = [ + f"doip://{tgt_hostname}:{tgt_port}?activation_type={rat:#x}&src_addr={parse_qs(target.query)['src_addr'][0]}" + for rat in rat_success + ] + + else: + self.logger.notice("[πŸ”] Enumerating all SourceAddresses") + targets = await self.enumerate_source_addresses( + tgt_hostname, + tgt_port, + chain(rat_success, rat_wrong_source), + ) + + if len(targets) != 1: + self.logger.error( + f"[πŸ’£] I found {len(targets)} valid RoutingActivationType/SourceAddress combos, but can only continue with exactly one; choose your weapon with --target!" + ) + return 20 + + # Enumerate valid TargetAddresses + if target is not None and "target_addr" in parse_qs(target.query): + self.logger.error( + "[😡] Why do you give me a target_addr in --target? Am I useless to you??? GOODBYE!" + ) + return 3 + + self.logger.notice( + f"[πŸ”] Enumerating all TargetAddresses from {args.start:#x} to {args.stop:#x}" + ) + + target = urlparse(targets[0]) + tgt_src = int(parse_qs(target.query)["src_addr"][0], 0) + tgt_rat = int(parse_qs(target.query)["activation_type"][0], 0) + + await self.enumerate_target_addresses( + tgt_hostname, + tgt_port, + tgt_rat, + tgt_src, + args.start, + args.stop, + args.timeout, + ) + + self.logger.notice("[πŸ›©οΈ] All done, thanks for flying with us!") + return 0 + + async def enumerate_routing_activation_types( + self, + tgt_hostname: str, + tgt_port: int, + src_addr: int, + ) -> tuple[list[int], list[int]]: + rat_not_unsupported: list[int] = [] + rat_success: list[int] = [] + rat_wrong_source: list[int] = [] + for routing_activation_type in range(0x100): + try: + conn = await DoIPConnection.connect( + tgt_hostname, + tgt_port, + src_addr, + 0xAFFE, + ) + except OSError as e: + self.logger.error(f"[🚨] Mr. Stark I don't feel so good: {e}") + return rat_success, rat_wrong_source + + try: + await conn.write_routing_activation_request(routing_activation_type) + rat_success.append(routing_activation_type) + self.logger.info( + f"[🀯] Holy moly, it actually worked for activation_type {routing_activation_type:#x} and src_addr {src_addr:#x}!!!" + ) + except ConnectionAbortedError as e: + # Let's utilize Gallia's excellent error handling + error = RoutingActivationResponseCodes[str(e).split(" ")[-1]] + self.logger.info( + f"[🌟] splendid, {routing_activation_type:#x} yields a {error.name}" + ) + + if error != RoutingActivationResponseCodes.UnsupportedActivationType: + rat_not_unsupported.append(routing_activation_type) + + if error == RoutingActivationResponseCodes.UnknownSourceAddress: + rat_wrong_source.append(routing_activation_type) + + finally: + try: + await conn.close() + except ConnectionResetError as e: + # This triggers when the connection is closed already, as conn.close() is not handling this + self.logger.warn(f"[β›”] Could not close connection: {e}") + + self.logger.notice( + f"[πŸ’Ž] Look what RoutingActivationTypes I've found that are not 'unsupported': {', '.join([f'{x:#x}' for x in rat_not_unsupported])}" + ) + return rat_success, rat_wrong_source + + async def enumerate_target_addresses( # noqa: PLR0913 + self, + tgt_hostname: str, + tgt_port: int, + correct_rat: int, + correct_src: int, + start: int, + stop: int, + timeout: None | float = None, + ) -> None: + known_targets = [] + responsive_targets = [] + search_space = range(start, stop + 1) + + conn = await self.create_DoIP_conn( + tgt_hostname, tgt_port, correct_rat, correct_src, 0xAFFE + ) + + for target_addr in search_space: + self.logger.info(f"[🚧] Attempting connection to {target_addr:#02x}") + + conn.target_addr = target_addr + + try: + req = DiagnosticSessionControlRequest(0x01) + await conn.write_diag_request(req.pdu) + + # If we reach this, the request was not denied due to unknown TargetAddress + known_targets.append( + f"doip://{tgt_hostname}:{tgt_port}?activation_type={correct_rat:#x}&src_addr={correct_src:#x}&target_addr={target_addr:#x}" + ) + self.logger.notice( + f"[πŸ₯‡] HEUREKA: target address {target_addr:#x} is valid! " + ) + async with aiofiles.open( + self.artifacts_dir.joinpath("3_valid_targets.txt"), "a" + ) as f: + await f.write(f"{known_targets[-1]}\n") + + # Hardcoded loop to detect potential broadcasts + while True: + pot_broadcast, data = await asyncio.wait_for( + self.read_diag_request_custom(conn), + TimingAndCommunicationParameters.DiagnosticMessageMessageTimeout + / 1000 + if timeout is None + else timeout, + ) + if pot_broadcast is None: + break + + self.logger.notice( + f"[πŸ€‘] B-B-B-B-B-B-BROADCAST at TargetAddress {target_addr:#x}! Got reply from {pot_broadcast:#x}" + ) + async with aiofiles.open( + self.artifacts_dir.joinpath("6_unsolicited_replies.txt"), "a" + ) as f: + await f.write( + f"target_addr={target_addr:#x} yielded reply from {pot_broadcast:#x}; could also be late answer triggered by previous address!\n" + ) + + resp = DiagnosticSessionControlResponse.parse_static(data) + self.logger.notice( + f"[πŸ₯³] It cannot get nicer: {target_addr:#x} responded: {resp}" + ) + responsive_targets.append(known_targets[-1]) + async with aiofiles.open( + self.artifacts_dir.joinpath("4_responsive_targets.txt"), "a" + ) as f: + await f.write(f"{known_targets[-1]}\n") + + except ( + BrokenPipeError + ) as e: # Though it's obvious: this error is raised when a DoIP NACK is received + error = DiagnosticMessageNegativeAckCodes(int(str(e).split(" ")[-1], 0)) + if error == DiagnosticMessageNegativeAckCodes.UnknownTargetAddress: + self.logger.info( + f"[πŸ«₯] {target_addr:#x} is an unknown target address" + ) + continue + else: + self.logger.warning( + f"[🀷] {target_addr:#x} is behaving strangely: {error.name}" + ) + async with aiofiles.open( + self.artifacts_dir.joinpath("7_targets_with_errors.txt"), "a" + ) as f: + await f.write(f"{target_addr:#x}: {error.name}\n") + continue + + except ( + asyncio.TimeoutError + ): # This triggers when no ACK is received, or ACK but no UDS reply + self.logger.info( + f"[πŸ™Š] Presumably no active ECU on target address {target_addr:#x}" + ) + async with aiofiles.open( + self.artifacts_dir.joinpath("5_unresponsive_targets.txt"), "a" + ) as f: + await f.write(f"{known_targets[-1]}\n") + continue + + except (ConnectionError, ConnectionResetError) as e: + # Whenever this triggers, but sometimes connections are closed not by us + self.logger.warn( + f"[🫦] Sexy, but unexpected: {target_addr:#} triggered {e}" + ) + async with aiofiles.open( + self.artifacts_dir.joinpath("7_targets_with_errors.txt"), "a" + ) as f: + await f.write(f"{target_addr:#x}: {e}\n") + # Re-establish DoIP connection + await conn.close() + conn = await self.create_DoIP_conn( + tgt_hostname, tgt_port, correct_rat, correct_src, 0xAFFE + ) + continue + + try: + await conn.close() + except ConnectionResetError as e: + # This triggers when the connection is closed already, as conn.close() is not handling this + self.logger.warn(f"[β›”] could not close connection: {e}") + + self.logger.notice( + "[βš”οΈ] It's dangerous to test alone, take one of these known targets:" + ) + for item in known_targets: + self.logger.notice(item) + + self.logger.notice( + "[πŸ’°] For even more profit, try targets that actually responded:" + ) + for item in responsive_targets: + self.logger.notice(item) + + self.logger.notice( + f"[🧭] Check out the content of the log files at {self.artifacts_dir} as well!" + ) + + async def create_DoIP_conn( # noqa: PLR0913 + self, + hostname: str, + port: int, + routing_activation_type: int, + src_addr: int, + target_addr: int, + ) -> DoIPConnection: + while True: + try: + conn = await DoIPConnection.connect( + hostname, + port, + src_addr, + target_addr, + ) + self.logger.info("[πŸ“«] Sending RoutingActivationRequest") + await conn.write_routing_activation_request( + RoutingActivationRequestTypes(routing_activation_type) + ) + except Exception as e: # TODO this probably is too broad + self.logger.warning( + f"[🫨] Got me some good errors when it should be working (dis an infinite loop): {e}" + ) + continue + return conn + + async def read_diag_request_custom( + self, conn: DoIPConnection + ) -> tuple[int | None, bytes]: + while True: + hdr, payload = await conn.read_frame() + if not isinstance(payload, DiagnosticMessage): + raise BrokenPipeError(f"[🧨] Unexpected DoIP message: {hdr} {payload}") + if payload.SourceAddress != conn.target_addr: + return payload.SourceAddress, payload.UserData + if payload.TargetAddress != conn.src_addr: + self.logger.warning( + f"[🀌] You talking to me?! Unexpected DoIP target address: {payload.TargetAddress:#04x}" + ) + continue + return None, payload.UserData + + async def enumerate_source_addresses( + self, + tgt_hostname: str, + tgt_port: int, + valid_routing_activation_types: Iterable[int], + ) -> list[str]: + known_sourceAddresses: list[int] = [] + denied_sourceAddresses: list[int] = [] + targets: list[str] = [] + for routing_activation_type, source_address in product( + valid_routing_activation_types, range(0x0000, 0x10000) + ): + try: + conn = await DoIPConnection.connect( + tgt_hostname, + tgt_port, + source_address, + 0xAFFE, + ) + except OSError as e: + self.logger.error(f"[🚨] Mr. Stark I don't feel so good: {e}") + return [] + + try: + await conn.write_routing_activation_request(routing_activation_type) + except ConnectionAbortedError as e: + # Let's utilize Gallia's excellent error handling + error = RoutingActivationResponseCodes[str(e).split(" ")[-1]] + self.logger.info( + f"[🌟] splendid, {source_address:#x} yields a {error.name}" + ) + + if error != RoutingActivationResponseCodes.UnknownSourceAddress: + denied_sourceAddresses.append(source_address) + async with aiofiles.open( + self.artifacts_dir.joinpath("2_denied_src_addresses.txt"), "a" + ) as f: + await f.write( + f"activation_type={routing_activation_type:#x},src_addr={source_address:#x}: {error.name}\n" + ) + + continue + + finally: + try: + await conn.close() + except ConnectionResetError as e: + # This triggers when the connection is closed already, as conn.close() is not handling this + self.logger.warn(f"[β›”] could not close connection: {e}") + + self.logger.info( + f"[🀯] Holy moly, it actually worked for activation_type {routing_activation_type:#x} and src_addr {source_address:#x}!!!" + ) + known_sourceAddresses.append(source_address) + targets.append( + f"doip://{tgt_hostname}:{tgt_port}?activation_type={routing_activation_type:#x}&src_addr={source_address:#x}" + ) + async with aiofiles.open( + self.artifacts_dir.joinpath("1_valid_src_addresses.txt"), "a" + ) as f: + await f.write(f"{targets[-1]}\n") + + # Print valid SourceAddresses and suitable target string for config + self.logger.notice( + f"[πŸ’€] Look what SourceAddresses got denied: {', '.join([f'{x:#x}' for x in known_sourceAddresses])}" + ) + self.logger.notice( + f"[πŸ’Ž] Look what valid SourceAddresses I've found: {', '.join([f'{x:#x}' for x in known_sourceAddresses])}" + ) + self.logger.notice("[βš”οΈ] It's dangerous to test alone, take one of these:") + for item in targets: + self.logger.notice(item) + return targets + + async def run_udp_discovery(self) -> list[tuple[str, int]]: + all_ips = [] + found = [] + + for iface in psutil.net_if_addrs().values(): + for ip in iface: + # we only work with broadcastable IPv4 + if ip.family != socket.AF_INET or ip.broadcast is None: + continue + all_ips.append(ip) + + for ip in all_ips: + self.logger.info( + f"[πŸ’Œ] Sending DoIP VehicleIdentificationRequest to {ip.broadcast}" + ) + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) + sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) + sock.settimeout(2) + sock.bind((ip.address, 0)) + + sock.sendto(b"\xff\x00\x00\x01\x00\x00\x00\x00", (ip.broadcast, 13400)) + try: + data, addr = sock.recvfrom(1024) + except TimeoutError: + self.logger.info("[πŸ’”] no response") + continue + finally: + sock.close() + + # Hardcoded slices + vin = data[8 : 8 + 17] + target_addr = int.from_bytes(data[25:27], "big") + self.logger.notice( + f"[πŸ’]: {addr} responded with VIN {vin.decode('ascii')} and target_addr {target_addr:#x}" + ) + found.append(addr) + + self.logger.notice("[πŸ’Ž] Look what valid hosts I've found:") + for item in found: + url = f"doip://{item[0]}:{item[1]}" + self.logger.notice(url) + async with aiofiles.open( + self.artifacts_dir.joinpath("0_valid_hosts.txt"), "a" + ) as f: + await f.write(f"{url}\n") + + return found diff --git a/src/gallia/commands/discover/uds/doip.py b/src/gallia/commands/discover/uds/doip.py deleted file mode 100644 index d45dea552..000000000 --- a/src/gallia/commands/discover/uds/doip.py +++ /dev/null @@ -1,160 +0,0 @@ -# SPDX-FileCopyrightText: AISEC Pentesting Team -# -# SPDX-License-Identifier: Apache-2.0 - -import asyncio -from argparse import Namespace - -from gallia.command import UDSDiscoveryScanner -from gallia.services.uds.core.service import ( - DiagnosticSessionControlRequest, - DiagnosticSessionControlResponse, -) -from gallia.services.uds.helpers import raise_for_mismatch -from gallia.transports.base import TargetURI -from gallia.transports.doip import ( - DiagnosticMessage, - DoIPConnection, - GenericHeader, - PayloadTypes, - ProtocolVersions, - RoutingActivationRequest, - RoutingActivationRequestTypes, -) -from gallia.utils import auto_int, write_target_list - - -class DoIPDiscoverer(UDSDiscoveryScanner): - """ECU and routing discovery scanner for DoIP""" - - SUBGROUP = "uds" - COMMAND = "doip" - SHORT_HELP = "DoIP enumeration scanner" - - def configure_parser(self) -> None: - self.parser.add_argument( - "--reversed", - action="store_true", - help="scan in reversed order", - ) - self.parser.add_argument( - "-r", - "--request-type", - default=RoutingActivationRequestTypes.WWH_OBD, - help="specify the routing request type", - ) - self.parser.add_argument( - "--src-addr", - type=auto_int, - default=0x0E00, - help="DoIP source address", - ) - self.parser.add_argument( - "--start", - metavar="INT", - type=auto_int, - default=0x00, - help="set start address", - ) - self.parser.add_argument( - "--stop", - metavar="INT", - type=auto_int, - default=0xFFFF, - help="set end address", - ) - - async def probe( # noqa: PLR0913 - self, - conn: DoIPConnection, - host: str, - port: int, - src_addr: int, - target_addr: int, - activation_type: RoutingActivationRequestTypes, - timeout: float, - ) -> TargetURI: - hdr = GenericHeader( - ProtocolVersion=ProtocolVersions.ISO_13400_2_2012, - PayloadType=PayloadTypes.RoutingActivationRequest, - PayloadLength=7, - PayloadTypeSpecificMessageContent=b"", - ) - routing_req = RoutingActivationRequest( - SourceAddress=src_addr, - ActivationType=activation_type, - Reserved=0x00, - ) - await conn.write_request_raw(hdr, routing_req) - - req = DiagnosticSessionControlRequest(0x01) - data = req.pdu - - hdr = GenericHeader( - ProtocolVersion=ProtocolVersions.ISO_13400_2_2012, - PayloadType=PayloadTypes.DiagnosticMessage, - PayloadLength=len(data) + 4, - PayloadTypeSpecificMessageContent=b"", - ) - payload = DiagnosticMessage( - SourceAddress=src_addr, - TargetAddress=target_addr, - UserData=data, - ) - await asyncio.wait_for(conn.write_request_raw(hdr, payload), timeout) - - _, diag_msg = await asyncio.wait_for(conn.read_diag_request_raw(), timeout) - - resp = DiagnosticSessionControlResponse.parse_static(diag_msg.UserData) - raise_for_mismatch(req, resp) - - return TargetURI.from_parts( - "doip", - host, - port, - { - "src_addr": hex(diag_msg.TargetAddress), - "dst_addr": hex(diag_msg.SourceAddress), - "activation_type": activation_type.value, - }, - ) - - async def main(self, args: Namespace) -> None: - found = [] - src_gen = ( - range(args.stop + 1, args.start) - if args.reversed - else range(args.start, args.stop + 1) - ) - - for target_addr in src_gen: - self.logger.info(f"testing target {target_addr:#02x}") - conn = await DoIPConnection.connect( - args.target.hostname, - args.target.port, - args.src_addr, - target_addr, - ) - - try: - target = await self.probe( - conn, - args.target.hostname, - args.target.port, - args.src_addr, - target_addr, - args.request_type, - args.timeout, - ) - except (ConnectionError, asyncio.TimeoutError): - continue - finally: - await conn.close() - - self.logger.info(f"found {target_addr:#02x}") - found.append(target) - - self.logger.result(f"Found {len(found)} targets") - ecus_file = self.artifacts_dir.joinpath("ECUs.txt") - self.logger.result(f"Writing urls to file: {ecus_file}") - await write_target_list(ecus_file, found, self.db_handler)