Skip to content

Commit 7e2ae39

Browse files
authored
Merge pull request #78 from mrexodia/module-parsing
Rewrite module parsing to be more robust
2 parents bde6186 + f937c7a commit 7e2ae39

File tree

2 files changed

+42
-30
lines changed

2 files changed

+42
-30
lines changed

src/dumpulator/dumpulator.py

Lines changed: 39 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -799,38 +799,47 @@ def _setup_modules(self):
799799
size = minidump_module.size
800800
path = minidump_module.name
801801

802-
# Parse the header to dump the sections from memory
803-
header = self.read(base, PAGE_SIZE)
804-
pe = PE(data=header, fast_load=True)
805-
image_size = pe.OPTIONAL_HEADER.SizeOfImage
806-
section_alignment = pe.OPTIONAL_HEADER.SectionAlignment
807-
mapped_data = bytearray(header)
808-
mapped_data += b"\0" * (image_size - len(header))
809-
for section in pe.sections:
810-
name = section.Name.rstrip(b"\0").decode()
811-
mask = section_alignment - 1
812-
rva = (section.VirtualAddress + mask) & ~mask
813-
size = self.memory.align_page(section.Misc_VirtualSize)
814-
va = base + rva
815-
for page in range(va, va + size, PAGE_SIZE):
816-
region = self.memory.find_commit(page)
817-
if region is not None:
818-
region.info = name
819-
try:
820-
data = self.read(va, size)
821-
mapped_data[rva:size] = data
822-
except IndexError:
823-
self.error(f"Failed to read section {name} from module {path}")
824-
# Load the PE dumped from memory
825-
pe = PE(data=mapped_data, fast_load=True)
826-
# Hack to adjust pefile to accept in-memory modules
827-
for section in pe.sections:
828-
# Potentially interesting members: Misc_PhysicalAddress, Misc_VirtualSize, SizeOfRawData
829-
section.PointerToRawData = section.VirtualAddress
830-
section.PointerToRawData_adj = section.VirtualAddress
802+
# Read as much data from the module memory as possible
803+
try:
804+
mapped_data = self.read(base, size)
805+
except IndexError:
806+
# HACK: modules with holes between sections need to be read in chunks
807+
mapped_data = bytearray(size)
808+
ptr = base
809+
while ptr < base + size:
810+
region = self.memory.query(ptr)
811+
if region.state == MemoryState.MEM_COMMIT:
812+
data = self.read(region.base, region.region_size)
813+
index = region.base - base
814+
mapped_data[index:index + len(data)] = data
815+
ptr += region.region_size
816+
assert len(mapped_data) == size
817+
818+
try:
819+
# Load the PE dumped from memory
820+
pe = PE(data=mapped_data, fast_load=True)
821+
section_alignment = pe.OPTIONAL_HEADER.SectionAlignment
822+
for section in pe.sections:
823+
# Set the section in the memory region
824+
name = section.Name.rstrip(b"\0").decode(encoding="ascii", errors="backslashreplace")
825+
mask = section_alignment - 1
826+
index = (section.VirtualAddress + mask) & ~mask
827+
section_size = self.memory.align_page(section.Misc_VirtualSize)
828+
va = base + index
829+
for page in range(va, va + section_size, PAGE_SIZE):
830+
region = self.memory.find_commit(page)
831+
if region is not None:
832+
region.info = name
833+
# HACK: adjust pefile to accept in-memory modules
834+
# Potentially interesting members: Misc_PhysicalAddress, Misc_VirtualSize, SizeOfRawData
835+
section.PointerToRawData = section.VirtualAddress
836+
section.PointerToRawData_adj = section.VirtualAddress
837+
except pefile.PEFormatError as e:
838+
self.error(f"Failed to parse module {hex(base)}[{hex(size)}]: {path}")
839+
831840
# Do not trust these values from memory
832841
pe.OPTIONAL_HEADER.ImageBase = base
833-
pe.OPTIONAL_HEADER.ImageSize = size
842+
pe.OPTIONAL_HEADER.SizeOfImage = size
834843
self.modules.add(pe, path)
835844

836845
def _setup_syscalls(self):

src/dumpulator/memory.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -412,3 +412,6 @@ def set_region_info(self, addr: int, info: Any, *, size=0):
412412
return False
413413
region.info = info
414414
return True
415+
416+
def __repr__(self):
417+
return f"MemoryManager(regions={len(self._regions)}, committed={len(self._committed)})"

0 commit comments

Comments
 (0)