Skip to content

Commit

Permalink
refactor: implement dataclass-based parser🧼
Browse files Browse the repository at this point in the history
  • Loading branch information
lxndrblz authored Jan 10, 2024
2 parents 0f6bd02 + f347ae2 commit dc2571d
Show file tree
Hide file tree
Showing 7 changed files with 248 additions and 340 deletions.
21 changes: 14 additions & 7 deletions .github/workflows/build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -22,16 +22,23 @@ jobs:
run: pyinstaller "main.spec"
- name: Run conversion ↩️
run: |
.\dist\ms_teams_parser.exe -f ".\forensicsim-data\jane_doe_v_1_4_00_11161\IndexedDB\https_teams.microsoft.com_0.indexeddb.leveldb" -o "jane_doe.json"
.\dist\ms_teams_parser.exe -f ".\forensicsim-data\john_doe_v_1_4_00_11161\IndexedDB\https_teams.microsoft.com_0.indexeddb.leveldb" -o "john_doe.json"
.\dist\ms_teams_parser.exe -f ".\forensicsim-data\jane_doe_v_1_4_00_11161\IndexedDB\https_teams.microsoft.com_0.indexeddb.leveldb" -o "jane_doe_v_1_4_00_11161.json"
.\dist\ms_teams_parser.exe -f ".\forensicsim-data\john_doe_v_1_4_00_11161\IndexedDB\https_teams.microsoft.com_0.indexeddb.leveldb" -o "john_doe_v_1_4_00_11161.json"
- name: Upload results📲
uses: actions/upload-artifact@v4
with:
name: generated-outputs
path: |
jane_doe_v_1_4_00_11161.json
john_doe_v_1_4_00_11161.json
retention-days: 1
- name: Test calling script 📞
run: |
python tools/main.py --help
python tools/dump_leveldb.py --help
python tools/dump_localstorage.py --help
python tools/dump_sessionstorage.py --help
# python utils/populate_teams.py --help
# python utils/populate_teams_2.py --help
# python utils/populate_skype.py --help
# - name: Calculate diff 👽
# run: git diff --no-index --word-diff expected_output/john_doe.json current_output.json
- name: Calculate diff 👽
run: |
git diff --no-index --word-diff .\forensicsim-data\expected-result\jane_doe_v_1_4_00_11161.json jane_doe_v_1_4_00_11161.json
git diff --no-index --word-diff .\forensicsim-data\expected-result\john_doe_v_1_4_00_11161.json john_doe_v_1_4_00_11161.json
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -129,3 +129,5 @@ dmypy.json

venv/
env/

test/
8 changes: 5 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ dependencies = [
"beautifulsoup4",
"click",
"chromedb @ git+https://github.com/karelze/ccl_chrome_indexeddb@master",
"dataclasses-json",
"pause",
"pyautogui",
"pywinauto"
Expand Down Expand Up @@ -50,12 +51,13 @@ select = [
"F", # pyflakes
"FURB", # refurb
"I", # isort
"PIE", # misc lints
"PT", # pytest
"PGH", # pygrep
"PIE", # misc lints
# "PTH", # flake8-use-pathlib
"RET", # return
"RUF", # ruff-specific rules
"UP", # pyupgrade
"SIM", # flake8-simplify
"UP", # pyupgrade
]

include = ["*.py", "*.pyi", "**/pyproject.toml"]
Expand Down
16 changes: 8 additions & 8 deletions src/forensicsim/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import io
import json
import os
from pathlib import Path

from chromedb import (
ccl_blink_value_deserializer,
Expand Down Expand Up @@ -121,7 +122,7 @@ def fetch_data(self):
(
objstore_id,
varint_raw,
) = ccl_chromium_indexeddb.le_varint_from_bytes(
) = ccl_chromium_indexeddb.custom_le_varint_from_bytes(
record.key[len(prefix_objectstore) :]
)
except TypeError:
Expand Down Expand Up @@ -191,7 +192,7 @@ def iterate_records(self, do_not_filter=False):
(
_value_version,
varint_raw,
) = ccl_chromium_indexeddb.le_varint_from_bytes(
) = ccl_chromium_indexeddb.custom_le_varint_from_bytes(
record.value
)
val_idx = len(varint_raw)
Expand All @@ -202,9 +203,9 @@ def iterate_records(self, do_not_filter=False):
val_idx += 1

(
_blink_version,
_,
varint_raw,
) = ccl_chromium_indexeddb.le_varint_from_bytes(
) = ccl_chromium_indexeddb.custom_le_varint_from_bytes(
record.value[val_idx:]
)

Expand Down Expand Up @@ -247,7 +248,7 @@ def parse_localstorage(filepath):
extracted_values = []
for record in local_store.iter_all_records():
try:
extracted_values.append(json.loads(record.value))
extracted_values.append(json.loads(record.value, strict=False))
except json.decoder.JSONDecodeError:
continue
return extracted_values
Expand Down Expand Up @@ -288,8 +289,7 @@ def write_results_to_json(data, outputpath):
def parse_json():
# read data from a file. This is only for testing purpose.
try:
with open("teams.json") as json_file:
data = json.load(json_file)
return data
with Path("teams.json").open() as json_file:
return json.load(json_file)
except OSError as e:
print(e)
18 changes: 9 additions & 9 deletions src/forensicsim/consts.py
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
XTRACT_HEADER = """
_____ _ _
| ___|__ _ __ ___ _ __ ___(_) ___ ___ (_)_ __ ___
| |_ / _ \| '__/ _ \ '_ \/ __| |/ __/ __| | | '_ ` _ \\
| _| (_) | | | __/ | | \__ \ | (__\__ \_| | | | | | |
|_| \___/|_| \___|_| |_|___/_|\___|___(_)_|_| |_| |_|
| |_ / _ \\| '__/ _ \\ '_ \\/ __| |/ __/ __| | | '_ ` _ \\
| _| (_) | | | __/ | | \\__ \\ | (__\\__ \\_| | | | | | |
|_| \\___/|_| \\___|_| |_|___/_|\\___|___(_)_|_| |_| |_|
__ ___ _ _____ _
\ \/ / |_ _ __ __ _ ___| |_ |_ _|__ ___ | |
\ /| __| '__/ _` |/ __| __| | |/ _ \ / _ \| |
/ \| |_| | | (_| | (__| |_ | | (_) | (_) | |
/_/\_\\\\__|_| \__,_|\___|\__| |_|\___/ \___/|_|
\\ \\/ / |_ _ __ __ _ ___| |_ |_ _|__ ___ | |
\\ /| __| '__/ _` |/ __| __| | |/ _ \\ / _ \\| |
/ \\| |_| | | (_| | (__| |_ | | (_) | (_) | |
/_/\\_\\\\__|_| \\__,_|\\___|\\__| |_|\\___/ \\___/|_|
"""
UTIL_HEADER = """
UTIL_HEADER = r"""
_____ _ _ _ _ _ _ _
| ___|__ _ __ ___ _ __ ___(_) ___ ___ (_)_ __ ___ | | | | |_(_) |
| |_ / _ \| '__/ _ \ '_ \/ __| |/ __/ __| | | '_ ` _ \ | | | | __| | |
Expand All @@ -19,7 +19,7 @@
"""

DUMP_HEADER = """
DUMP_HEADER = r"""
_____ _ _
| ___|__ _ __ ___ _ __ ___(_) ___ ___ (_)_ __ ___
| |_ / _ \| '__/ _ \ '_ \/ __| |/ __/ __| | | '_ ` _ \
Expand Down
Loading

0 comments on commit dc2571d

Please sign in to comment.