Skip to content

Commit

Permalink
Merge pull request #756 from onekey-sec/754-duplicate-entries
Browse files Browse the repository at this point in the history
fix(fs): duplicate entries handling in FileSystem API.
  • Loading branch information
qkaiser authored Mar 25, 2024
2 parents 00de4c4 + 1ba3e04 commit 695b59f
Show file tree
Hide file tree
Showing 8 changed files with 22 additions and 1 deletion.
Git LFS file not shown
Git LFS file not shown
Git LFS file not shown
Git LFS file not shown
Git LFS file not shown
2 changes: 1 addition & 1 deletion unblob/file_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -628,7 +628,7 @@ def open( # noqa: A003

def unlink(self, path):
"""Delete file within extraction path."""
logger.debug("unlink file", file_path=path)
logger.debug("unlink file", file_path=path, _verbosity=3)
safe_path = self._get_extraction_path(path, "unlink")

safe_path.unlink(missing_ok=True)
5 changes: 5 additions & 0 deletions unblob/handlers/archive/cpio.py
Original file line number Diff line number Diff line change
Expand Up @@ -210,6 +210,11 @@ def dump_entries(self, fs: FileSystem):
if entry.path.name in ("", "."):
continue

# There are cases where CPIO archives have duplicated entries
# We then unlink the files to overwrite them and avoid an error.
if not stat.S_ISDIR(entry.mode):
fs.unlink(entry.path)

if stat.S_ISREG(entry.mode):
fs.carve(entry.path, self.file, entry.start_offset, entry.size)
elif stat.S_ISDIR(entry.mode):
Expand Down

0 comments on commit 695b59f

Please sign in to comment.