Skip to content

Commit

Permalink
Fixed a problem when a journal bud is not mapped
Browse files Browse the repository at this point in the history
Fixed a problem when a corrupted inode with invalid ino_size field (e.g. ludicrously huge size) is being dumped
Fixed a problem caused when decompressing data fails
  • Loading branch information
matthias-deu committed Jul 16, 2023
1 parent 0000cba commit 21a9350
Show file tree
Hide file tree
Showing 4 changed files with 25 additions and 16 deletions.
7 changes: 2 additions & 5 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -231,8 +231,5 @@ fabric.properties
# libreoffice files
**/.~lock*

# ocrmypdf docker container
data/ocrmypdf.tar

# protege tempfiles
data/catalog-v001.xml
# local files
/utils/
2 changes: 1 addition & 1 deletion ubift/cli/renderer.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,7 +364,7 @@ def write_to_file(inode: UBIFS_INO_NODE, data_nodes: List[UBIFS_DATA_NODE], abs_

accu_size += len(data_node.decompressed_data)

if inode.ino_size > accu_size:
if inode.ino_size > accu_size and accu_size > 0:
ubiftlog.warning(
f"[!] Size from inode field 'size' ({inode.ino_size}) is more than written bytes {accu_size}. Filling bytes with zeroes.")
f.seek(inode.ino_size)
Expand Down
29 changes: 19 additions & 10 deletions ubift/framework/compression.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@
import lzo

from ubift import exception
from ubift.framework.structs.ubifs_structs import UBIFS_COMPRESSION_TYPE
from ubift.logging import ubiftlog


def decompress(data: bytes, compr_type: int, size: int = None) -> bytes:
"""
Expand All @@ -14,13 +17,19 @@ def decompress(data: bytes, compr_type: int, size: int = None) -> bytes:
:param size: Size of buffer length that will fit output, needed by LZO-compression, for other compression methods this value does not matter. Value for this can be found in UBIFS_DATA_NODE
:return: Uncompressed data
"""
if compr_type == 0: # UBIFS_COMPRESSION_TYPE.UBIFS_COMPR_NONE
return data
elif compr_type == 1: # UBIFS_COMPRESSION_TYPE.UBIFS_COMPR_LZO
return lzo.decompress(data, False, size)
elif compr_type == 2: # UBIFS_COMPRESSION_TYPE.UBIFS_COMPR_ZLIB
return zlib.decompress(data, -zlib.MAX_WBITS)
elif compr_type == 3: # UBIFS_COMPRESSION_TYPE.UBIFS_COMPR_ZSTD
return zstandard.decompress(data, size)
else:
raise exception.UBIFTException(f"Data is compressed with unknown type. ({compr_type})")
try:
if compr_type == 0: # UBIFS_COMPRESSION_TYPE.UBIFS_COMPR_NONE
return data
elif compr_type == 1: # UBIFS_COMPRESSION_TYPE.UBIFS_COMPR_LZO
return lzo.decompress(data, False, size)
elif compr_type == 2: # UBIFS_COMPRESSION_TYPE.UBIFS_COMPR_ZLIB
return zlib.decompress(data, -zlib.MAX_WBITS)
elif compr_type == 3: # UBIFS_COMPRESSION_TYPE.UBIFS_COMPR_ZSTD
return zstandard.decompress(data, size)
else:
raise exception.UBIFTException(f"Data is compressed with unknown type. ({compr_type})")
except Exception as e:
ubiftlog.warn(
f"[-] Error while decompressing data using {UBIFS_COMPRESSION_TYPE(compr_type).name}: {e}")
return bytes()

3 changes: 3 additions & 0 deletions ubift/framework/ubifs.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,9 @@ def _parse_bud(self, jhead: int) -> list:
ubiftlog.info(f"[!] Parsing bud {UBIFS_JOURNAL_HEADS(jhead)}")

ref_node = self._jheads[jhead]
if ref_node.lnum not in self._ubifs.ubi_volume.lebs:
ubiftlog.info(f"[-] Cannot parse bud {UBIFS_JOURNAL_HEADS(jhead)} because referenced LEB {ref_node.lnum} is not mapped.")
return []
leb = self._ubifs.ubi_volume.lebs[ref_node.lnum]
leb_offs = ref_node.offs

Expand Down

0 comments on commit 21a9350

Please sign in to comment.