Skip to content

Commit

Permalink
Merge branch 'main' into feature/unix-dhcp-lease
Browse files Browse the repository at this point in the history
  • Loading branch information
Horofic authored Dec 11, 2024
2 parents 8e750fb + 334e6a4 commit 516f15f
Show file tree
Hide file tree
Showing 7 changed files with 115 additions and 48 deletions.
15 changes: 9 additions & 6 deletions dissect/target/filesystems/dir.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,12 +27,7 @@ def __repr__(self) -> str:
def _detect(fh: BinaryIO) -> bool:
raise TypeError("Detect is not allowed on DirectoryFilesystem class")

def get(self, path: str) -> FilesystemEntry:
path = path.strip("/")

if not path:
return DirectoryFilesystemEntry(self, "/", self.base_path)

def _resolve_path(self, path: str) -> Path:
if not self.case_sensitive:
searchpath = self.base_path

Expand All @@ -48,6 +43,14 @@ def get(self, path: str) -> FilesystemEntry:
else:
entry = self.base_path.joinpath(path.strip("/"))

return entry

def get(self, path: str) -> FilesystemEntry:
if not (path := path.strip("/")):
return DirectoryFilesystemEntry(self, "/", self.base_path)

entry = self._resolve_path(path)

try:
entry.lstat()
return DirectoryFilesystemEntry(self, path, entry)
Expand Down
5 changes: 4 additions & 1 deletion dissect/target/filesystems/zip.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,14 +56,17 @@ def __init__(
if not mname.startswith(self.base) or mname == ".":
continue

rel_name = fsutil.normpath(mname[len(self.base) :], alt_separator=self.alt_separator)
rel_name = self._resolve_path(mname)
self._fs.map_file_entry(rel_name, ZipFilesystemEntry(self, rel_name, member))

@staticmethod
def _detect(fh: BinaryIO) -> bool:
"""Detect a zip file on a given file-like object."""
return zipfile.is_zipfile(fh)

def _resolve_path(self, path: str) -> str:
return fsutil.normpath(path[len(self.base) :], alt_separator=self.alt_separator)

def get(self, path: str, relentry: FilesystemEntry = None) -> FilesystemEntry:
"""Returns a ZipFilesystemEntry object corresponding to the given path."""
return self._fs.get(path, relentry=relentry)
Expand Down
16 changes: 13 additions & 3 deletions dissect/target/loaders/dir.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,13 +36,23 @@ def find_entry_path(path: Path) -> str | None:
return prefix


def map_dirs(target: Target, dirs: list[Path | tuple[str, Path]], os_type: str, **kwargs) -> None:
def map_dirs(
target: Target,
dirs: list[Path | tuple[str, Path]],
os_type: str,
*,
dirfs: type[DirectoryFilesystem] = DirectoryFilesystem,
zipfs: type[ZipFilesystem] = ZipFilesystem,
**kwargs,
) -> None:
"""Map directories as filesystems into the given target.
Args:
target: The target to map into.
dirs: The directories to map as filesystems. If a list member is a tuple, the first element is the drive letter.
os_type: The operating system type, used to determine how the filesystem should be mounted.
dirfs: The filesystem class to use for directory filesystems.
zipfs: The filesystem class to use for ZIP filesystems.
"""
alt_separator = ""
case_sensitive = True
Expand All @@ -59,9 +69,9 @@ def map_dirs(target: Target, dirs: list[Path | tuple[str, Path]], os_type: str,
drive_letter = path.name[0]

if isinstance(path, zipfile.Path):
dfs = ZipFilesystem(path.root.fp, path.at, alt_separator=alt_separator, case_sensitive=case_sensitive)
dfs = zipfs(path.root.fp, path.at, alt_separator=alt_separator, case_sensitive=case_sensitive)
else:
dfs = DirectoryFilesystem(path, alt_separator=alt_separator, case_sensitive=case_sensitive)
dfs = dirfs(path, alt_separator=alt_separator, case_sensitive=case_sensitive)

drive_letter_map[drive_letter].append(dfs)

Expand Down
50 changes: 35 additions & 15 deletions dissect/target/loaders/velociraptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,11 @@
import zipfile
from pathlib import Path
from typing import TYPE_CHECKING
from urllib.parse import quote, unquote

from dissect.target.filesystems.dir import DirectoryFilesystem
from dissect.target.filesystems.zip import ZipFilesystem
from dissect.target.helpers.fsutil import basename, dirname, join
from dissect.target.loaders.dir import DirLoader, find_dirs, map_dirs
from dissect.target.plugin import OperatingSystem

Expand Down Expand Up @@ -87,11 +91,13 @@ def __init__(self, path: Path, **kwargs):
super().__init__(path)

if path.suffix == ".zip":
log.warning(
f"Velociraptor target {path!r} is compressed, which will slightly affect performance. "
"Consider uncompressing the archive and passing the uncompressed folder to Dissect."
)
self.root = zipfile.Path(path.open("rb"))
if self.root.root.getinfo("uploads.json").compress_type > 0:
log.warning(
"Velociraptor target '%s' is compressed, which will slightly affect performance. "
"Consider uncompressing the archive and passing the uncompressed folder to Dissect.",
path,
)
else:
self.root = path

Expand All @@ -116,14 +122,28 @@ def detect(path: Path) -> bool:

def map(self, target: Target) -> None:
os_type, dirs = find_fs_directories(self.root)
if os_type == OperatingSystem.WINDOWS:
# Velociraptor doesn't have the correct filenames for the paths "$J" and "$Secure:$SDS"
map_dirs(
target,
dirs,
os_type,
usnjrnl_path="$Extend/$UsnJrnl%3A$J",
sds_path="$Secure%3A$SDS",
)
else:
map_dirs(target, dirs, os_type)

# Velociraptor URL encodes paths before storing these in a collection, this leads plugins not being able to find
# these paths. To circumvent this issue, for a zip file the path names are URL decoded before mapping into the
# VFS and for a directory the paths are URL encoded at lookup time.
map_dirs(
target,
dirs,
os_type,
dirfs=VelociraptorDirectoryFilesystem,
zipfs=VelociraptorZipFilesystem,
)


class VelociraptorDirectoryFilesystem(DirectoryFilesystem):
def _resolve_path(self, path: str) -> Path:
path = quote(path, safe="$/% ")
if (fname := basename(path)).startswith("."):
path = join(dirname(path), fname.replace(".", "%2E", 1))

return super()._resolve_path(path)


class VelociraptorZipFilesystem(ZipFilesystem):
def _resolve_path(self, path: str) -> str:
return unquote(super()._resolve_path(path))
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ full = [
"zstandard",
]
dev = [
"dissect.target[full,mqtt,yara]",
"dissect.target[full,yara]",
"dissect.archive[dev]>=1.0.dev,<2.0.dev",
"dissect.btrfs[dev]>=1.0.dev,<2.0.dev",
"dissect.cim[dev]>=3.0.dev,<4.0.dev",
Expand Down
36 changes: 28 additions & 8 deletions tests/loaders/test_mqtt.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,23 @@
from __future__ import annotations

import argparse
import sys
import time
from dataclasses import dataclass
from struct import pack
from typing import Iterator
from unittest.mock import MagicMock, patch

import paho.mqtt.client as mqtt
import pytest

from dissect.target import Target
from dissect.target.loaders.mqtt import Broker, MQTTConnection, case


class MQTTMock(MagicMock):
disks = []
hostname = ""
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.disks: list = []
self.hostname: str = ""

def fill_disks(self, sizes: list[int]) -> None:
self.disks = []
Expand Down Expand Up @@ -51,6 +52,17 @@ def publish(self, topic: str, *args) -> None:
self.on_message(self, None, response)


@pytest.fixture
def mock_paho(monkeypatch: pytest.MonkeyPatch) -> Iterator[MagicMock]:
with monkeypatch.context() as m:
mock_paho = MagicMock()
m.setitem(sys.modules, "paho", mock_paho)
m.setitem(sys.modules, "paho.mqtt", mock_paho.mqtt)
m.setitem(sys.modules, "paho.mqtt.client", mock_paho.mqtt.client)

yield mock_paho


@dataclass
class MockSeekMessage:
data: bytes = b""
Expand Down Expand Up @@ -87,7 +99,7 @@ def mock_broker() -> Iterator[MockBroker]:
@patch.object(time, "sleep") # improve speed during test, no need to wait for peers
def test_remote_loader_stream(
time: MagicMock,
monkeypatch: pytest.MonkeyPatch,
mock_paho: MagicMock,
alias: str,
hosts: list[str],
disks: list[int],
Expand All @@ -96,7 +108,9 @@ def test_remote_loader_stream(
read: int,
expected: bytes,
) -> None:
monkeypatch.setattr(mqtt, "Client", MQTTMock)
mock_paho.mqtt.client.Client.return_value = MQTTMock()

from dissect.target.loaders.mqtt import Broker

broker = Broker("0.0.0.0", "1884", "key", "crt", "ca", "case1", "user", "pass")
broker.connect()
Expand All @@ -116,7 +130,9 @@ def test_remote_loader_stream(
assert data == expected


def test_mqtt_loader_prefetch(mock_broker: MockBroker) -> None:
def test_mqtt_loader_prefetch(mock_broker: MockBroker, mock_paho: MagicMock) -> None:
from dissect.target.loaders.mqtt import MQTTConnection

connection = MQTTConnection(mock_broker, "")
connection.prefetch_factor_inc = 10
assert connection.factor == 1
Expand Down Expand Up @@ -155,7 +171,11 @@ def test_mqtt_loader_prefetch(mock_broker: MockBroker) -> None:
"invalid_case_empty",
],
)
def test_case(case_name, parse_result: str | pytest.RaisesContext[argparse.ArgumentTypeError]) -> None:
def test_case(
case_name, parse_result: str | pytest.RaisesContext[argparse.ArgumentTypeError], mock_paho: MagicMock
) -> None:
from dissect.target.loaders.mqtt import case

if isinstance(parse_result, str):
assert case(case_name) == parse_result
else:
Expand Down
39 changes: 25 additions & 14 deletions tests/loaders/test_velociraptor.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,15 @@ def create_root(sub_dir: str, tmp_path: Path) -> Path:
f"uploads/{sub_dir}/%5C%5C%3F%5CGLOBALROOT%5CDevice%5CHarddiskVolumeShadowCopy1/",
f"uploads/{sub_dir}/%5C%5C%3F%5CGLOBALROOT%5CDevice%5CHarddiskVolumeShadowCopy1/$Extend",
f"uploads/{sub_dir}/%5C%5C%3F%5CGLOBALROOT%5CDevice%5CHarddiskVolumeShadowCopy1/windows/system32",
f"uploads/{sub_dir}/%5C%5C.%5CC%3A/%2ETEST",
]
root = tmp_path
mkdirs(root, paths)

(root / "uploads.json").write_bytes(b"{}")
(root / f"uploads/{sub_dir}/%5C%5C.%5CC%3A/C-DRIVE.txt").write_bytes(b"{}")
(root / f"uploads/{sub_dir}/%5C%5C.%5CC%3A/Microsoft-Windows-Windows Defender%254WHC.evtx").write_bytes(b"{}")
(root / f"uploads/{sub_dir}/%5C%5C.%5CC%3A/other.txt").write_text("my first file")

with open(absolute_path("_data/plugins/filesystem/ntfs/mft/mft.raw"), "rb") as fh:
mft = fh.read(10 * 1025)
Expand Down Expand Up @@ -54,8 +57,6 @@ def create_root(sub_dir: str, tmp_path: Path) -> Path:
)
def test_windows_ntfs(sub_dir: str, other_dir: str, target_bare: Target, tmp_path: Path) -> None:
root = create_root(sub_dir, tmp_path)
root.joinpath(f"uploads/{other_dir}/C%3A").mkdir(parents=True, exist_ok=True)
root.joinpath(f"uploads/{other_dir}/C%3A/other.txt").write_text("my first file")

assert VelociraptorLoader.detect(root) is True

Expand All @@ -72,8 +73,11 @@ def test_windows_ntfs(sub_dir: str, other_dir: str, target_bare: Target, tmp_pat
usnjrnl_records += len(list(fs.ntfs.usnjrnl.records()))
assert usnjrnl_records == 2
assert len(target_bare.filesystems) == 4

assert target_bare.fs.path("sysvol/C-DRIVE.txt").exists()
assert target_bare.fs.path("sysvol/other.txt").read_text() == "my first file"
assert target_bare.fs.path("sysvol/.TEST").exists()
assert target_bare.fs.path("sysvol/Microsoft-Windows-Windows Defender%254WHC.evtx").exists()


@pytest.mark.parametrize(
Expand Down Expand Up @@ -102,17 +106,19 @@ def test_windows_ntfs_zip(sub_dir: str, target_bare: Target, tmp_path: Path) ->
assert usnjrnl_records == 2
assert len(target_bare.filesystems) == 4
assert target_bare.fs.path("sysvol/C-DRIVE.txt").exists()
assert target_bare.fs.path("sysvol/.TEST").exists()
assert target_bare.fs.path("sysvol/Microsoft-Windows-Windows Defender%4WHC.evtx").exists()


@pytest.mark.parametrize(
"paths",
[
(["uploads/file/etc", "uploads/file/var"]),
(["uploads/auto/etc", "uploads/auto/var"]),
(["uploads/file/etc", "uploads/file/var", "uploads/file/opt"]),
(["uploads/auto/etc", "uploads/auto/var", "uploads/auto/opt"]),
(["uploads/file/Library", "uploads/file/Applications"]),
(["uploads/auto/Library", "uploads/auto/Applications"]),
(["uploads/file/etc", "uploads/file/var", "uploads/file/%2ETEST"]),
(["uploads/auto/etc", "uploads/auto/var", "uploads/auto/%2ETEST"]),
(["uploads/file/etc", "uploads/file/var", "uploads/file/opt", "uploads/file/%2ETEST"]),
(["uploads/auto/etc", "uploads/auto/var", "uploads/auto/opt", "uploads/auto/%2ETEST"]),
(["uploads/file/Library", "uploads/file/Applications", "uploads/file/%2ETEST"]),
(["uploads/auto/Library", "uploads/auto/Applications", "uploads/auto/%2ETEST"]),
],
)
def test_unix(paths: list[str], target_bare: Target, tmp_path: Path) -> None:
Expand All @@ -125,19 +131,22 @@ def test_unix(paths: list[str], target_bare: Target, tmp_path: Path) -> None:

loader = VelociraptorLoader(root)
loader.map(target_bare)
target_bare.apply()

assert len(target_bare.filesystems) == 1
assert target_bare.fs.path("/.TEST").exists()


@pytest.mark.parametrize(
"paths",
[
(["uploads/file/etc", "uploads/file/var"]),
(["uploads/auto/etc", "uploads/auto/var"]),
(["uploads/file/etc", "uploads/file/var", "uploads/file/opt"]),
(["uploads/auto/etc", "uploads/auto/var", "uploads/auto/opt"]),
(["uploads/file/Library", "uploads/file/Applications"]),
(["uploads/auto/Library", "uploads/auto/Applications"]),
(["uploads/file/etc", "uploads/file/var", "uploads/file/%2ETEST"]),
(["uploads/file/etc", "uploads/file/var", "uploads/file/%2ETEST"]),
(["uploads/auto/etc", "uploads/auto/var", "uploads/auto/%2ETEST"]),
(["uploads/file/etc", "uploads/file/var", "uploads/file/opt", "uploads/file/%2ETEST"]),
(["uploads/auto/etc", "uploads/auto/var", "uploads/auto/opt", "uploads/auto/%2ETEST"]),
(["uploads/file/Library", "uploads/file/Applications", "uploads/file/%2ETEST"]),
(["uploads/auto/Library", "uploads/auto/Applications", "uploads/auto/%2ETEST"]),
],
)
def test_unix_zip(paths: list[str], target_bare: Target, tmp_path: Path) -> None:
Expand All @@ -153,5 +162,7 @@ def test_unix_zip(paths: list[str], target_bare: Target, tmp_path: Path) -> None

loader = VelociraptorLoader(zip_path)
loader.map(target_bare)
target_bare.apply()

assert len(target_bare.filesystems) == 1
assert target_bare.fs.path("/.TEST").exists()

0 comments on commit 516f15f

Please sign in to comment.