From 51cbdb465d94b35d0558766584d31559ccffac32 Mon Sep 17 00:00:00 2001 From: ckxckx Date: Tue, 29 Oct 2024 04:06:56 +0800 Subject: [PATCH 1/9] Fix attaching to a gdbserver with tuple `gdb.attach(('0.0.0.0',12345))` (#2291) * Patch for #issues/2290 * Return listening process on `pidof(("0.0.0.0", 1234))` Instead of returning the process which is connected to port 1234, return the process which is listening on that port. * Update CHANGELOG --------- Co-authored-by: Peace-Maker --- CHANGELOG.md | 2 ++ pwnlib/gdb.py | 26 ++++++++++++++++++++++++++ pwnlib/util/net.py | 12 ++++++------ pwnlib/util/proc.py | 9 +++++++-- 4 files changed, 41 insertions(+), 8 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 553b16e99..c10b09691 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -84,6 +84,7 @@ The table below shows which release corresponds to each branch, and what date th - [#2482][2482] Throw error when using `sni` and setting `server_hostname` manually in `remote` - [#2478][2478] libcdb-cli: add `--offline-only`, refactor unstrip and add fetch parser for download libc-database - [#2484][2484] Allow to disable caching +- [#2291][2291] Fix attaching to a gdbserver with tuple `gdb.attach(('0.0.0.0',12345))` [2471]: https://github.com/Gallopsled/pwntools/pull/2471 [2358]: https://github.com/Gallopsled/pwntools/pull/2358 @@ -96,6 +97,7 @@ The table below shows which release corresponds to each branch, and what date th [2482]: https://github.com/Gallopsled/pwntools/pull/2482 [2478]: https://github.com/Gallopsled/pwntools/pull/2478 [2484]: https://github.com/Gallopsled/pwntools/pull/2484 +[2291]: https://github.com/Gallopsled/pwntools/pull/2291 ## 4.14.0 (`beta`) diff --git a/pwnlib/gdb.py b/pwnlib/gdb.py index 1d90a9cbb..08c319bc4 100644 --- a/pwnlib/gdb.py +++ b/pwnlib/gdb.py @@ -950,6 +950,8 @@ def attach(target, gdbscript = '', exe = None, gdb_args = None, ssh = None, sysr Process name. The youngest process is selected. :obj:`tuple` Host, port pair of a listening ``gdbserver`` + Tries to look up the target exe from the ``gdbserver`` commandline, + requires explicit ``exe`` argument if the target exe is not in the commandline. :class:`.process` Process to connect to :class:`.sock` @@ -1034,6 +1036,30 @@ def attach(target, gdbscript = '', exe = None, gdb_args = None, ssh = None, sysr >>> io.sendline(b'echo Hello from bash && exit') >>> io.recvall() b'Hello from bash\n' + >>> server.close() + + Attach to a gdbserver / gdbstub running on the local machine + by specifying the host and port tuple it is listening on. + (gdbserver always listens on 0.0.0.0) + + >>> gdbserver = process(['gdbserver', '1.2.3.4:12345', '/bin/bash']) + >>> gdbserver.recvline_contains(b'Listening on port', timeout=10) + b'Listening on port 12345' + >>> pid = gdb.attach(('0.0.0.0', 12345), gdbscript=''' + ... tbreak main + ... commands + ... call puts("Hello from gdbserver debugger!") + ... continue + ... end + ... ''') + >>> gdbserver.recvline(timeout=10) # doctest: +ELLIPSIS + b'Remote debugging from host 127.0.0.1, ...\n' + >>> gdbserver.recvline(timeout=10) + b'Hello from gdbserver debugger!\n' + >>> gdbserver.sendline(b'echo Hello from bash && exit') + >>> gdbserver.recvline(timeout=10) + b'Hello from bash\n' + >>> gdbserver.close() Attach to processes running on a remote machine via an SSH :class:`.ssh` process diff --git a/pwnlib/util/net.py b/pwnlib/util/net.py index fab1dacbb..df8cd5662 100644 --- a/pwnlib/util/net.py +++ b/pwnlib/util/net.py @@ -259,17 +259,17 @@ def sockinfos(addr, f, t): infos |= set(socket.getaddrinfo(sockaddr[0], sockaddr[1], socket.AF_INET6, t, proto, socket.AI_V4MAPPED)) return infos - if local is not None: - local = sockinfos(local, fam, typ) - remote = sockinfos(remote, fam, typ) + local = sockinfos(local, fam, typ) + if remote is not None: + remote = sockinfos(remote, fam, typ) def match(c): laddrs = sockinfos(c.laddr, c.family, c.type) raddrs = sockinfos(c.raddr, c.family, c.type) - if not (raddrs & remote): + if not (laddrs & local): return False - if local is None: + if remote is None: return True - return bool(laddrs & local) + return bool(raddrs & remote) return match diff --git a/pwnlib/util/proc.py b/pwnlib/util/proc.py index 8ebe5d221..9889bb697 100644 --- a/pwnlib/util/proc.py +++ b/pwnlib/util/proc.py @@ -27,6 +27,11 @@ def pidof(target): - :class:`pwnlib.tubes.sock.sock`: singleton list of the PID at the remote end of `target` if it is running on the host. Otherwise an empty list. + - :class:`pwnlib.tubes.ssh.ssh_channel`: singleton list of the PID of + `target` on the remote system. + - :class:`tuple`: singleton list of the PID at the local end of the + connection to `target` if it is running on the host. Otherwise an + empty list. Arguments: target(object): The target whose PID(s) to find. @@ -38,7 +43,7 @@ def pidof(target): >>> l = tubes.listen.listen() >>> p = process(['curl', '-s', 'http://127.0.0.1:%d'%l.lport]) - >>> pidof(p) == pidof(l) == pidof(('127.0.0.1', l.lport)) + >>> pidof(p) == pidof(l) == pidof(('127.0.0.1', l.rport)) True """ if isinstance(target, tubes.ssh.ssh_channel): @@ -51,7 +56,7 @@ def pidof(target): return [c.pid for c in psutil.net_connections() if match(c)] elif isinstance(target, tuple): - match = sock_match(None, target) + match = sock_match(target, None) return [c.pid for c in psutil.net_connections() if match(c)] elif isinstance(target, tubes.process.process): From 6f0793eebcf78eab4ac86bb8fcbb1ffee3e59fa6 Mon Sep 17 00:00:00 2001 From: peace-maker Date: Mon, 28 Oct 2024 21:11:36 +0100 Subject: [PATCH 2/9] Add `tube.upload_manually` to upload files in chunks (#2410) * Add `tube.upload_manually` Upload data in chunks when having a tube connected to a shell. This is useful when doing kernel or qemu challenges where you can't use the ssh tube's file upload features. * Add tests and fix corner cases * Update CHANGELOG * Fix character escaping in tests * Fix gzip compression on Python 2 --- CHANGELOG.md | 2 + pwnlib/tubes/tube.py | 127 +++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 129 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index c10b09691..033971e16 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -85,6 +85,7 @@ The table below shows which release corresponds to each branch, and what date th - [#2478][2478] libcdb-cli: add `--offline-only`, refactor unstrip and add fetch parser for download libc-database - [#2484][2484] Allow to disable caching - [#2291][2291] Fix attaching to a gdbserver with tuple `gdb.attach(('0.0.0.0',12345))` +- [#2410][2410] Add `tube.upload_manually` to upload files in chunks [2471]: https://github.com/Gallopsled/pwntools/pull/2471 [2358]: https://github.com/Gallopsled/pwntools/pull/2358 @@ -98,6 +99,7 @@ The table below shows which release corresponds to each branch, and what date th [2478]: https://github.com/Gallopsled/pwntools/pull/2478 [2484]: https://github.com/Gallopsled/pwntools/pull/2484 [2291]: https://github.com/Gallopsled/pwntools/pull/2291 +[2410]: https://github.com/Gallopsled/pwntools/pull/2410 ## 4.14.0 (`beta`) diff --git a/pwnlib/tubes/tube.py b/pwnlib/tubes/tube.py index a14e2d286..84798314f 100644 --- a/pwnlib/tubes/tube.py +++ b/pwnlib/tubes/tube.py @@ -21,6 +21,8 @@ from pwnlib.log import Logger from pwnlib.timeout import Timeout from pwnlib.tubes.buffer import Buffer +from pwnlib.util import fiddling +from pwnlib.util import iters from pwnlib.util import misc from pwnlib.util import packing @@ -1077,6 +1079,131 @@ def clean_and_log(self, timeout = 0.05): with context.local(log_level='debug'): return cached_data + self.clean(timeout) + def upload_manually(self, data, target_path = './payload', prompt = b'$', chunk_size = 0x200, chmod_flags = 'u+x', compression='auto', end_marker = 'PWNTOOLS_DONE'): + """upload_manually(data, target_path = './payload', prompt = b'$', chunk_size = 0x200, chmod_flags = 'u+x', compression='auto', end_marker = 'PWNTOOLS_DONE') + + Upload a file manually using base64 encoding and compression. + This can be used when the tube is connected to a shell. + + The file is uploaded in base64-encoded chunks by appending to a file + and then decompressing it: + + ``` + loop: + echo | base64 -d >> . + -d -f . + chmod + ``` + + It is assumed that a `base64` command is available on the target system. + When ``compression`` is ``auto`` the best compression utility available + between ``gzip`` and ``xz`` is chosen with a fallback to uncompressed + upload. + + Arguments: + + data(bytes): The data to upload. + target_path(str): The path to upload the data to. + prompt(bytes): The shell prompt to wait for. + chunk_size(int): The size of each chunk to upload. + chmod_flags(str): The flags to use with chmod. ``""`` to ignore. + compression(str): The compression to use. ``auto`` to automatically choose the best compression or ``gzip`` or ``xz``. + end_marker(str): The marker to use to detect the end of the output. Only used when prompt is not set. + + Examples: + + >>> l = listen() + >>> l.spawn_process('/bin/sh') + >>> r = remote('127.0.0.1', l.lport) + >>> r.upload_manually(b'some\\xca\\xfedata\\n', prompt=b'', chmod_flags='') + >>> r.sendline(b'cat ./payload') + >>> r.recvline() + b'some\\xca\\xfedata\\n' + + >>> r.upload_manually(cyclic(0x1000), target_path='./cyclic_pattern', prompt=b'', chunk_size=0x10, compression='gzip') + >>> r.sendline(b'sha256sum ./cyclic_pattern') + >>> r.recvlineS(keepends=False).startswith(sha256sumhex(cyclic(0x1000))) + True + + >>> blob = ELF.from_assembly(shellcraft.echo('Hello world!\\n') + shellcraft.exit(0)) + >>> r.upload_manually(blob.data, prompt=b'') + >>> r.sendline(b'./payload') + >>> r.recvline() + b'Hello world!\\n' + >>> r.close() + >>> l.close() + """ + echo_end = "" + if not prompt: + echo_end = "; echo {}".format(end_marker) + end_markerb = end_marker.encode() + else: + end_markerb = prompt + + # Detect available compression utility, fallback to uncompressed upload. + compression_mode = None + possible_compression = ['gzip'] + if six.PY3: + possible_compression.insert(0, 'xz') + if not prompt: + self.sendline("echo {}".format(end_marker).encode()) + if compression == 'auto': + for utility in possible_compression: + self.sendlineafter(end_markerb, "command -v {} && echo YEP || echo NOPE{}".format(utility, echo_end).encode()) + result = self.recvuntil([b'YEP', b'NOPE']) + if b'YEP' in result: + compression_mode = utility + break + elif compression in possible_compression: + compression_mode = compression + else: + self.error('Invalid compression mode: %s, has to be one of %s', compression, possible_compression) + + self.debug('Manually uploading using compression mode: %s', compression_mode) + + compressed_data = b'' + if compression_mode == 'xz': + import lzma + compressed_data = lzma.compress(data, format=lzma.FORMAT_XZ, preset=9) + compressed_path = target_path + '.xz' + elif compression_mode == 'gzip': + import gzip + from six import BytesIO + f = BytesIO() + with gzip.GzipFile(fileobj=f, mode='wb', compresslevel=9) as g: + g.write(data) + compressed_data = f.getvalue() + compressed_path = target_path + '.gz' + else: + compressed_path = target_path + + # Don't compress if it doesn't reduce the size. + if len(compressed_data) >= len(data): + compression_mode = None + compressed_path = target_path + else: + data = compressed_data + + # Upload data in `chunk_size` chunks. Assume base64 is available. + with self.progress('Uploading payload') as p: + for idx, chunk in enumerate(iters.group(chunk_size, data)): + if None in chunk: + chunk = chunk[:chunk.index(None)] + if idx == 0: + self.sendlineafter(end_markerb, "echo {} | base64 -d > {}{}".format(fiddling.b64e(bytearray(chunk)), compressed_path, echo_end).encode()) + else: + self.sendlineafter(end_markerb, "echo {} | base64 -d >> {}{}".format(fiddling.b64e(bytearray(chunk)), compressed_path, echo_end).encode()) + p.status('{}/{} {}'.format(idx+1, len(data)//chunk_size+1, misc.size(idx*chunk_size + len(chunk)))) + p.success(misc.size(len(data))) + + # Decompress the file and set the permissions. + if compression_mode is not None: + self.sendlineafter(end_markerb, '{} -d -f {}{}'.format(compression_mode, compressed_path, echo_end).encode()) + if chmod_flags: + self.sendlineafter(end_markerb, 'chmod {} {}{}'.format(chmod_flags, target_path, echo_end).encode()) + if not prompt: + self.recvuntil(end_markerb + b'\n') + def connect_input(self, other): """connect_input(other) From 96d98cf192cf1e9bc5d6bbeff5311e8961e58439 Mon Sep 17 00:00:00 2001 From: Leopold Date: Fri, 1 Nov 2024 21:36:30 +0800 Subject: [PATCH 3/9] fix fromsocket to deal with ipv6 socket (#2497) * fix fromsocket to deal with ipv6 socket * add remote.fromsocket ipv6 test and update CHANGELOG.md * skip fromsocket ipv6 test --- CHANGELOG.md | 6 ++++++ pwnlib/tubes/remote.py | 9 ++++++++- 2 files changed, 14 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 37b77a9fe..4c5c96021 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -129,6 +129,12 @@ The table below shows which release corresponds to each branch, and what date th [2435]: https://github.com/Gallopsled/pwntools/pull/2435 [2437]: https://github.com/Gallopsled/pwntools/pull/2437 +## 4.13.2 + +- [#2497][2497] Fix remote.fromsocket() to handle AF_INET6 socket + +[2497]: https://github.com/Gallopsled/pwntools/pull/2497 + ## 4.13.1 (`stable`) - [#2445][2445] Fix parsing the PLT on Windows diff --git a/pwnlib/tubes/remote.py b/pwnlib/tubes/remote.py index 58008194c..4c6d9dcd2 100644 --- a/pwnlib/tubes/remote.py +++ b/pwnlib/tubes/remote.py @@ -53,6 +53,13 @@ class remote(sock): >>> r = remote.fromsocket(s) >>> r.recvn(4) b'HTTP' + >>> s = socket.socket(socket.AF_INET6, socket.SOCK_STREAM) #doctest: +SKIP + >>> s.connect(('2606:4700:4700::1111', 80)) #doctest: +SKIP + >>> s.send(b'GET ' + b'\r\n'*2) #doctest: +SKIP + 8 + >>> r = remote.fromsocket(s) #doctest: +SKIP + >>> r.recvn(4) #doctest: +SKIP + b'HTTP' """ def __init__(self, host, port, @@ -139,7 +146,7 @@ def fromsocket(cls, socket): Instance of pwnlib.tubes.remote.remote. """ s = socket - host, port = s.getpeername() + host, port = s.getpeername()[:2] return remote(host, port, fam=s.family, typ=s.type, sock=s) class tcp(remote): From 55ac6e11fc15a2720cfe7c44cb9ea90f23069711 Mon Sep 17 00:00:00 2001 From: Justin Applegate <70449145+Legoclones@users.noreply.github.com> Date: Sun, 8 Dec 2024 11:33:32 -0700 Subject: [PATCH 4/9] Update documentation for format strings (#2501) * Update fmtstr.py documentation Clarified that the `value` in the `writes` dict (for `fmtstr_payload()`) should be a bytestring if the value to be written is shorter than a long * Update fmtstr.py documentation for FmtStr class Updated example and definition for `write()` function in `FmtStr` class to include bytes as the value. * Fix code formatting --------- Co-authored-by: Peace-Maker --- pwnlib/fmtstr.py | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/pwnlib/fmtstr.py b/pwnlib/fmtstr.py index 2b8508806..97a534916 100644 --- a/pwnlib/fmtstr.py +++ b/pwnlib/fmtstr.py @@ -841,6 +841,12 @@ def fmtstr_payload(offset, writes, numbwritten=0, write_size='byte', write_size_ The overflows argument is a format-string-length to output-amount tradeoff: Larger values for ``overflows`` produce shorter format strings that generate more output at runtime. + The writes argument is a dictionary with address/value pairs like ``{addr: value, addr2: value2}``. + If the value is an ``int`` datatype, it will be automatically casted into a bytestring with the length of a ``long`` (8 bytes in 64-bit, 4 bytes in 32-bit). + If a specific number of bytes is intended to be written (such as only a single byte, single short, or single int and not an entire long), + then provide a bytestring like ``b'\x37\x13'`` or ``p16(0x1337)``. + Note that the ``write_size`` argument does not determine **total** bytes written, only the size of each consecutive write. + Arguments: offset(int): the first formatter's offset you control writes(dict): dict with addr, value ``{addr: value, addr2: value2}`` @@ -857,6 +863,8 @@ def fmtstr_payload(offset, writes, numbwritten=0, write_size='byte', write_size_ >>> context.clear(arch = 'amd64') >>> fmtstr_payload(1, {0x0: 0x1337babe}, write_size='int') b'%322419390c%4$llnaaaabaa\x00\x00\x00\x00\x00\x00\x00\x00' + >>> fmtstr_payload(1, {0x0: p32(0x1337babe)}, write_size='int') + b'%322419390c%3$na\x00\x00\x00\x00\x00\x00\x00\x00' >>> fmtstr_payload(1, {0x0: 0x1337babe}, write_size='short') b'%47806c%5$lln%22649c%6$hnaaaabaa\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00' >>> fmtstr_payload(1, {0x0: 0x1337babe}, write_size='byte') @@ -872,6 +880,8 @@ def fmtstr_payload(offset, writes, numbwritten=0, write_size='byte', write_size_ b'%19c%12$hhn%36c%13$hhn%131c%14$hhn%4c%15$hhn\x03\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00' >>> fmtstr_payload(1, {0x0: 0x00000001}, write_size='byte') b'c%3$naaa\x00\x00\x00\x00' + >>> fmtstr_payload(1, {0x0: b'\x01'}, write_size='byte') + b'c%3$hhna\x00\x00\x00\x00' >>> fmtstr_payload(1, {0x0: b"\xff\xff\x04\x11\x00\x00\x00\x00"}, write_size='short') b'%327679c%7$lln%18c%8$hhn\x00\x00\x00\x00\x03\x00\x00\x00' >>> fmtstr_payload(10, {0x404048 : 0xbadc0ffe, 0x40403c : 0xdeadbeef}, no_dollars=True) @@ -999,7 +1009,7 @@ def write(self, addr, data): Arguments: addr(int): the address where you want to write - data(int): the data that you want to write ``addr`` + data(int or bytes): the data that you want to write ``addr`` Returns: None @@ -1013,6 +1023,10 @@ def write(self, addr, data): >>> f.write(0x08040506, 0x1337babe) >>> f.execute_writes() b'%19c%16$hhn%36c%17$hhn%131c%18$hhn%4c%19$hhn\t\x05\x04\x08\x08\x05\x04\x08\x07\x05\x04\x08\x06\x05\x04\x08' + >>> f2 = FmtStr(send_fmt_payload, offset=5) + >>> f2.write(0x08040506, p16(0x1337)) + >>> f2.execute_writes() + b'%19c%11$hhn%36c%12$hhnaa\x07\x05\x04\x08\x06\x05\x04\x08' """ self.writes[addr] = data From ec262d8d0141c8bcab6de72013e029ba18a98d5b Mon Sep 17 00:00:00 2001 From: peace-maker Date: Sun, 8 Dec 2024 21:16:39 +0100 Subject: [PATCH 5/9] Update sphinx for Python 3.13 support (#2503) * Fix sphinx warnings in docstrings * Update sphinx for Python 3.13 support Could not import extension sphinx.builders.epub3 (exception: No module named 'imghdr') `imghdr` was removed in Python 3.13. --- docs/requirements.txt | 5 +++-- docs/source/protocols.rst | 9 +++++++++ docs/source/shellcraft/riscv64.rst | 6 +++--- docs/source/windbg.rst | 2 +- pwnlib/elf/corefile.py | 8 ++++---- pwnlib/elf/elf.py | 6 +++--- pwnlib/libcdb.py | 6 +++--- pwnlib/tubes/process.py | 25 ++++++++++++++----------- pwnlib/tubes/tube.py | 12 ++++++------ 9 files changed, 46 insertions(+), 33 deletions(-) diff --git a/docs/requirements.txt b/docs/requirements.txt index 9b9003604..f9da2e525 100755 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -2,7 +2,8 @@ capstone coverage[toml] python-dateutil doc2dash -docutils<0.18 +docutils<0.18; python_version<'3' +docutils>=0.18; python_version>='3' intervaltree isort mako>=1.0.0 @@ -18,6 +19,6 @@ psutil requests>=2.5.1 ropgadget>=5.3 sphinx==1.8.6; python_version<'3' -sphinx>=4.5.0; python_version>='3' +sphinx>=7.0.0; python_version>='3' sphinx_rtd_theme sphinxcontrib-autoprogram<=0.1.5 diff --git a/docs/source/protocols.rst b/docs/source/protocols.rst index 8b1378917..f4ed4099e 100644 --- a/docs/source/protocols.rst +++ b/docs/source/protocols.rst @@ -1 +1,10 @@ +.. testsetup:: * + from pwn import * + + +:mod:`pwnlib.protocols.adb` --- Protocol implementations +======================================================== + +.. automodule:: pwnlib.protocols.adb + :members: \ No newline at end of file diff --git a/docs/source/shellcraft/riscv64.rst b/docs/source/shellcraft/riscv64.rst index 47b484af7..6e4a01148 100644 --- a/docs/source/shellcraft/riscv64.rst +++ b/docs/source/shellcraft/riscv64.rst @@ -4,16 +4,16 @@ context.clear(arch='riscv64') :mod:`pwnlib.shellcraft.riscv64` --- Shellcode for RISCV64 -=========================================================== +========================================================== :mod:`pwnlib.shellcraft.riscv64` -------------------------------- +-------------------------------- .. automodule:: pwnlib.shellcraft.riscv64 :members: :mod:`pwnlib.shellcraft.riscv64.linux` ---------------------------------------- +-------------------------------------- .. automodule:: pwnlib.shellcraft.riscv64.linux :members: diff --git a/docs/source/windbg.rst b/docs/source/windbg.rst index e08397205..3a713f09a 100644 --- a/docs/source/windbg.rst +++ b/docs/source/windbg.rst @@ -3,7 +3,7 @@ from pwn import * :mod:`pwnlib.windbg` --- Working with WinDbg -====================================== +============================================ .. automodule:: pwnlib.windbg :members: \ No newline at end of file diff --git a/pwnlib/elf/corefile.py b/pwnlib/elf/corefile.py index 1cb8823fa..8db885d9c 100644 --- a/pwnlib/elf/corefile.py +++ b/pwnlib/elf/corefile.py @@ -238,8 +238,8 @@ class Corefile(ELF): Registers can be accessed directly, e.g. via ``core_obj.eax`` and enumerated via :data:`Corefile.registers`. - Memory can be accessed directly via :meth:`.read` or :meth:`.write`, and also - via :meth:`.pack` or :meth:`.unpack` or even :meth:`.string`. + Memory can be accessed directly via :meth:`pwnlib.elf.elf.ELF.read` or :meth:`pwnlib.elf.elf.ELF.write`, and also + via :meth:`pwnlib.elf.elf.ELF.pack` or :meth:`pwnlib.elf.elf.ELF.unpack` or even :meth:`.string`. Arguments: core: Path to the core file. Alternately, may be a :class:`.process` instance, @@ -376,8 +376,8 @@ class Corefile(ELF): >>> core.exe.data[0:4] b'\x7fELF' - It also supports all of the features of :class:`ELF`, so you can :meth:`.read` - or :meth:`.write` or even the helpers like :meth:`.pack` or :meth:`.unpack`. + It also supports all of the features of :class:`ELF`, so you can :meth:`pwnlib.elf.elf.ELF.read` + or :meth:`pwnlib.elf.elf.ELF.write` or even the helpers like :meth:`pwnlib.elf.elf.ELF.pack` or :meth:`pwnlib.elf.elf.ELF.unpack`. Don't forget to call :meth:`.ELF.save` to save the changes to disk. diff --git a/pwnlib/elf/elf.py b/pwnlib/elf/elf.py index 02668f0a9..f5cab6d80 100644 --- a/pwnlib/elf/elf.py +++ b/pwnlib/elf/elf.py @@ -2350,7 +2350,7 @@ def disable_nx(self): @staticmethod def set_runpath(exepath, runpath): - r"""set_runpath(str, str) -> ELF + r"""set_runpath(exepath, runpath) -> ELF Patches the RUNPATH of the ELF to the given path using the `patchelf utility `_. @@ -2385,7 +2385,7 @@ def set_runpath(exepath, runpath): @staticmethod def set_interpreter(exepath, interpreter_path): - r"""set_interpreter(str, str) -> ELF + r"""set_interpreter(exepath, interpreter_path) -> ELF Patches the interpreter of the ELF to the given binary using the `patchelf utility `_. @@ -2419,7 +2419,7 @@ def set_interpreter(exepath, interpreter_path): @staticmethod def patch_custom_libraries(exe_path, custom_library_path, create_copy=True, suffix='_remotelibc'): - r"""patch_custom_libraries(str, str, bool, str) -> ELF + r"""patch_custom_libraries(exe_path, custom_library_path, create_copy=True, suffix='_remotelibc') -> ELF Looks for the interpreter binary in the given path and patches the binary to use it if available. Also patches the RUNPATH to the given path using the `patchelf utility `_. diff --git a/pwnlib/libcdb.py b/pwnlib/libcdb.py index 90dde91ce..b1e969e34 100644 --- a/pwnlib/libcdb.py +++ b/pwnlib/libcdb.py @@ -766,12 +766,12 @@ def search_by_libs_id(libs_id, unstrip=True, offline_only=False): Arguments: libs_id(str): - Libs ID (e.g. 'libc6_...') of the library + Libs ID (e.g. ``'libc6_...'``) of the library unstrip(bool): Try to fetch debug info for the libc and apply it to the downloaded file. offline_only(bool): - When pass `offline_only=True`, restricts search mode to offline sources only, - disable online lookup. Defaults to `False`, and enable both offline and online providers. + When pass ``offline_only=True``, restricts search mode to offline sources only, + disable online lookup. Defaults to :const:`False`, and enable both offline and online providers. Returns: Path to the downloaded library on disk, or :const:`None`. diff --git a/pwnlib/tubes/process.py b/pwnlib/tubes/process.py index 73c378771..71dcbca39 100644 --- a/pwnlib/tubes/process.py +++ b/pwnlib/tubes/process.py @@ -888,8 +888,10 @@ def maps(self): """maps() -> [mapping] Returns a list of process mappings. + A mapping object has the following fields: addr, address (addr alias), start (addr alias), end, size, perms, path, rss, pss, shared_clean, shared_dirty, private_clean, private_dirty, referenced, anonymous, swap + perms is a permissions object, with the following fields: read, write, execute, private, shared, string @@ -917,24 +919,25 @@ def maps(self): >>> checker_arr == [True] * len(proc_maps) * 5 True - """ - - """ Useful information about this can be found at: https://man7.org/linux/man-pages/man5/proc.5.html specifically the /proc/pid/maps section. - memory_maps() returns a list of pmmap_ext objects + memory_maps() returns a list of pmmap_ext objects. The definition (from psutil/_pslinux.py) is: + + .. code-block:: python - The definition (from psutil/_pslinux.py) is: - pmmap_grouped = namedtuple( - 'pmmap_grouped', - ['path', 'rss', 'size', 'pss', 'shared_clean', 'shared_dirty', - 'private_clean', 'private_dirty', 'referenced', 'anonymous', 'swap']) - pmmap_ext = namedtuple( - 'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields)) + pmmap_grouped = namedtuple( + 'pmmap_grouped', + ['path', 'rss', 'size', 'pss', 'shared_clean', 'shared_dirty', + 'private_clean', 'private_dirty', 'referenced', 'anonymous', 'swap']) + pmmap_ext = namedtuple( + 'pmmap_ext', 'addr perms ' + ' '.join(pmmap_grouped._fields)) Here is an example of a pmmap_ext entry: + + .. code-block:: python + pmmap_ext(addr='15555551c000-155555520000', perms='r--p', path='[vvar]', rss=0, size=16384, pss=0, shared_clean=0, shared_dirty=0, private_clean=0, private_dirty=0, referenced=0, anonymous=0, swap=0) """ diff --git a/pwnlib/tubes/tube.py b/pwnlib/tubes/tube.py index 84798314f..1a82f145c 100644 --- a/pwnlib/tubes/tube.py +++ b/pwnlib/tubes/tube.py @@ -1088,12 +1088,12 @@ def upload_manually(self, data, target_path = './payload', prompt = b'$', chunk_ The file is uploaded in base64-encoded chunks by appending to a file and then decompressing it: - ``` - loop: - echo | base64 -d >> . - -d -f . - chmod - ``` + .. code-block:: + + loop: + echo | base64 -d >> . + -d -f . + chmod It is assumed that a `base64` command is available on the target system. When ``compression`` is ``auto`` the best compression utility available From 24d217cfbdc9efc0ac0c29d1ce792b3b87603a07 Mon Sep 17 00:00:00 2001 From: Peace-Maker Date: Mon, 9 Dec 2024 13:09:10 +0100 Subject: [PATCH 6/9] Docs: Fix link to source for class properties The property has setters and getters. Link to the getter if it's available. This adds line numbers to the source link for properties. Fixes #2401 --- docs/source/conf.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/source/conf.py b/docs/source/conf.py index 6d72a01af..d908e2436 100755 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -360,6 +360,9 @@ def linkcode_resolve(domain, info): else: filename = info['module'].replace('.', '/') + '.py' + if isinstance(val, property): + val = val.fget + if isinstance(val, (types.ModuleType, types.MethodType, types.FunctionType, types.TracebackType, types.FrameType, types.CodeType) + six.class_types): try: lines, first = inspect.getsourcelines(val) From a0ddbf5e1648d567b23f8a44408aa9e85397aac7 Mon Sep 17 00:00:00 2001 From: Peace-Maker Date: Tue, 10 Dec 2024 20:34:39 +0100 Subject: [PATCH 7/9] Tests: Revert timeout changes from gdb tests There were additional `timeout=X` additions in #2382 which caused tests to fail randomly when a timeout was reached. The tests run through occationally, so it's not an infinite loop. But flakey tests are annoying and I don't see a reason for the timeout additions to the doctests for the core patch of that PR other than failing early during manual tests of new functionality. CI running smoothly is more important than fast failing during development on the gdb module imo. --- pwnlib/gdb.py | 32 ++++++++++++++++---------------- 1 file changed, 16 insertions(+), 16 deletions(-) diff --git a/pwnlib/gdb.py b/pwnlib/gdb.py index 1315408d0..690e360a4 100644 --- a/pwnlib/gdb.py +++ b/pwnlib/gdb.py @@ -195,7 +195,7 @@ def debug_assembly(asm, gdbscript=None, vma=None, api=False): >>> assembly = shellcraft.echo("Hello world!\n") >>> io = gdb.debug_assembly(assembly) - >>> io.recvline(timeout=1) + >>> io.recvline() b'Hello world!\n' """ tmp_elf = make_elf_from_assembly(asm, vma=vma, extract=False) @@ -230,7 +230,7 @@ def debug_shellcode(data, gdbscript=None, vma=None, api=False): >>> assembly = shellcraft.echo("Hello world!\n") >>> shellcode = asm(assembly) >>> io = gdb.debug_shellcode(shellcode) - >>> io.recvline(timeout=1) + >>> io.recvline() b'Hello world!\n' """ if isinstance(data, six.text_type): @@ -490,12 +490,12 @@ def debug(args, gdbscript=None, gdb_args=None, exe=None, ssh=None, env=None, por Send a command to Bash >>> io.sendline(b"echo hello") - >>> io.recvline(timeout=30) + >>> io.recvline() b'hello\n' Interact with the process - >>> io.interactive(timeout=1) # doctest: +SKIP + >>> io.interactive() # doctest: +SKIP >>> io.close() Create a new process, and stop it at '_start' @@ -514,7 +514,7 @@ def debug(args, gdbscript=None, gdb_args=None, exe=None, ssh=None, env=None, por Send a command to Bash >>> io.sendline(b"echo hello") - >>> io.recvline(timeout=10) + >>> io.recvline() b'hello\n' Interact with the process @@ -526,19 +526,19 @@ def debug(args, gdbscript=None, gdb_args=None, exe=None, ssh=None, env=None, por >>> io = gdb.debug(args=[b'\xde\xad\xbe\xef'], gdbscript='continue', exe="/bin/sh") >>> io.sendline(b"echo $0") - >>> io.recvline(timeout=10) + >>> io.recvline() b'\xde\xad\xbe\xef\n' >>> io.close() Demonstrate that LD_PRELOAD is respected >>> io = process(["grep", "libc.so.6", "/proc/self/maps"]) - >>> real_libc_path = io.recvline(timeout=1).split()[-1] + >>> real_libc_path = io.recvline().split()[-1] >>> io.close() >>> import shutil >>> local_path = shutil.copy(real_libc_path, "./local-libc.so") # make a copy of libc to demonstrate that it is loaded >>> io = gdb.debug(["grep", "local-libc.so", "/proc/self/maps"], gdbscript="continue", env={"LD_PRELOAD": "./local-libc.so"}) - >>> io.recvline(timeout=1).split()[-1] # doctest: +ELLIPSIS + >>> io.recvline().split()[-1] # doctest: +ELLIPSIS b'.../local-libc.so' >>> io.close() >>> os.remove("./local-libc.so") # cleanup @@ -572,7 +572,7 @@ def debug(args, gdbscript=None, gdb_args=None, exe=None, ssh=None, env=None, por >>> io = gdb.debug(args=[b'\xde\xad\xbe\xef'], gdbscript='continue', exe="/bin/sh", ssh=shell) >>> io.sendline(b"echo $0") - >>> io.recvline(timeout=10) + >>> io.recvline() b'$ \xde\xad\xbe\xef\n' >>> io.close() @@ -580,7 +580,7 @@ def debug(args, gdbscript=None, gdb_args=None, exe=None, ssh=None, env=None, por >>> io = gdb.debug(args=[], gdbscript='continue', exe="/bin/sh", ssh=shell) >>> io.sendline(b"echo $0") - >>> io.recvline(timeout=10) + >>> io.recvline() b'$ \n' >>> io.close() @@ -620,12 +620,12 @@ def debug(args, gdbscript=None, gdb_args=None, exe=None, ssh=None, env=None, por Resume the program >>> io.gdb.continue_nowait() - >>> io.recvline(timeout=1) + >>> io.recvline() b'foo\n' >>> io.close() >>> ssh_io.gdb.continue_nowait() - >>> ssh_io.recvline(timeout=1) + >>> ssh_io.recvline() b'foo\n' >>> ssh_io.close() >>> shell.close() @@ -978,7 +978,7 @@ def attach(target, gdbscript = '', exe = None, gdb_args = None, ssh = None, sysr ... detach ... quit ... ''') - >>> io.recvline(timeout=10) + >>> io.recvline() b'Hello from process debugger!\n' >>> io.sendline(b'echo Hello from bash && exit') >>> io.recvall() @@ -1005,7 +1005,7 @@ def attach(target, gdbscript = '', exe = None, gdb_args = None, ssh = None, sysr Observe the forced line - >>> io.recvline(timeout=1) + >>> io.recvline() b'Hello from process debugger!\n' Interact with the program in a regular way @@ -1029,7 +1029,7 @@ def attach(target, gdbscript = '', exe = None, gdb_args = None, ssh = None, sysr ... detach ... quit ... ''') - >>> io.recvline(timeout=10) + >>> io.recvline() b'Hello from remote debugger!\n' >>> io.sendline(b'echo Hello from bash && exit') >>> io.recvall() @@ -1048,7 +1048,7 @@ def attach(target, gdbscript = '', exe = None, gdb_args = None, ssh = None, sysr >>> io.recvline(timeout=5) # doctest: +SKIP b'Hello from ssh debugger!\n' >>> io.sendline(b'This will be echoed back') - >>> io.recvline(timeout=1) + >>> io.recvline() b'This will be echoed back\n' >>> io.close() """ From 57b9eb91079d3117e87c4bbe82ace03a22c7440d Mon Sep 17 00:00:00 2001 From: peace-maker Date: Tue, 10 Dec 2024 21:36:33 +0100 Subject: [PATCH 8/9] Fix loading ELF files without valid .dynamic section (#2502) * Fix loading ELF files without valid .dynamic section This allows to load separate debuginfo files and access their symbols. * Update CHANGELOG --- CHANGELOG.md | 2 ++ pwnlib/elf/elf.py | 3 ++- pwnlib/libcdb.py | 9 +++++++++ 3 files changed, 13 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 1fde0d061..222324a28 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -86,6 +86,7 @@ The table below shows which release corresponds to each branch, and what date th - [#2484][2484] Allow to disable caching - [#2291][2291] Fix attaching to a gdbserver with tuple `gdb.attach(('0.0.0.0',12345))` - [#2410][2410] Add `tube.upload_manually` to upload files in chunks +- [#2502][2502] Fix loading ELF files without valid .dynamic section [2471]: https://github.com/Gallopsled/pwntools/pull/2471 [2358]: https://github.com/Gallopsled/pwntools/pull/2358 @@ -100,6 +101,7 @@ The table below shows which release corresponds to each branch, and what date th [2484]: https://github.com/Gallopsled/pwntools/pull/2484 [2291]: https://github.com/Gallopsled/pwntools/pull/2291 [2410]: https://github.com/Gallopsled/pwntools/pull/2410 +[2502]: https://github.com/Gallopsled/pwntools/pull/2502 ## 4.14.0 (`beta`) diff --git a/pwnlib/elf/elf.py b/pwnlib/elf/elf.py index f5cab6d80..d93bfca8b 100644 --- a/pwnlib/elf/elf.py +++ b/pwnlib/elf/elf.py @@ -52,6 +52,7 @@ from elftools.elf.constants import P_FLAGS from elftools.elf.constants import SHN_INDICES from elftools.elf.descriptions import describe_e_type +from elftools.elf.dynamic import DynamicSection from elftools.elf.elffile import ELFFile from elftools.elf.enums import ENUM_GNU_PROPERTY_X86_FEATURE_1_FLAGS from elftools.elf.gnuversions import GNUVerDefSection @@ -1607,7 +1608,7 @@ def dynamic_by_tag(self, tag): dt = None dynamic = self.get_section_by_name('.dynamic') - if not dynamic: + if not dynamic or not isinstance(dynamic, DynamicSection): return None try: diff --git a/pwnlib/libcdb.py b/pwnlib/libcdb.py index b1e969e34..909f5aeaa 100644 --- a/pwnlib/libcdb.py +++ b/pwnlib/libcdb.py @@ -294,6 +294,15 @@ def search_by_hash(search_target, search_type='build_id', unstrip=True, offline_ return cache def _search_debuginfo_by_hash(base_url, hex_encoded_id): + """ + Given a hex-encoded build_id, attempt to download a matching debuginfo from the debuginfod server. + + >>> debuginfo_file = _search_debuginfo_by_hash(DEBUGINFOD_SERVERS[0], 'd1704d25fbbb72fa95d517b883131828c0883fe9') + >>> debuginfo_file is not None + True + >>> 'main_arena' in ELF(debuginfo_file).symbols + True + """ # Deferred import because it's slow import requests from six.moves import urllib From 74a300db6c02c0b63ec49f0d5033e92ccf38fa00 Mon Sep 17 00:00:00 2001 From: Jakub Nowak Date: Tue, 10 Dec 2024 22:31:29 +0100 Subject: [PATCH 9/9] Deprecate 'keepends' argument in favor of 'drop' (#2476) * Replace 'keepends' with 'drop' Add 'drop' argument in place of 'keepends' for tubes. 'keepends' still works but raises depreciation warning. * changelog * Ignore line numbers in PyLint check --------- Co-authored-by: peace-maker --- .github/workflows/pylint.yml | 5 +- CHANGELOG.md | 2 + pwnlib/context/__init__.py | 8 +- pwnlib/tubes/tube.py | 146 ++++++++++++++++++++++++----------- 4 files changed, 109 insertions(+), 52 deletions(-) diff --git a/.github/workflows/pylint.yml b/.github/workflows/pylint.yml index ee21b8614..410e1cae5 100644 --- a/.github/workflows/pylint.yml +++ b/.github/workflows/pylint.yml @@ -26,10 +26,11 @@ jobs: set -x pip install pylint pip install --upgrade -e . - pylint --exit-zero --errors-only pwnlib -f parseable | cut -d ' ' -f2- > current.txt + run_pylint() { pylint --exit-zero --errors-only pwnlib -f parseable | cut -d ' ' -f2- | sed 's/line [0-9]\+/line XXXX/g'; } + run_pylint > current.txt git fetch origin git checkout origin/"$GITHUB_BASE_REF" - pylint --exit-zero --errors-only pwnlib -f parseable | cut -d ' ' -f2- > base.txt + run_pylint > base.txt if diff base.txt current.txt | grep '>'; then false fi diff --git a/CHANGELOG.md b/CHANGELOG.md index 222324a28..ff68e4081 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -87,6 +87,7 @@ The table below shows which release corresponds to each branch, and what date th - [#2291][2291] Fix attaching to a gdbserver with tuple `gdb.attach(('0.0.0.0',12345))` - [#2410][2410] Add `tube.upload_manually` to upload files in chunks - [#2502][2502] Fix loading ELF files without valid .dynamic section +- [#2476][2476] Deprecate 'keepends' argument in favor of 'drop' in `tube.recvline*` [2471]: https://github.com/Gallopsled/pwntools/pull/2471 [2358]: https://github.com/Gallopsled/pwntools/pull/2358 @@ -102,6 +103,7 @@ The table below shows which release corresponds to each branch, and what date th [2291]: https://github.com/Gallopsled/pwntools/pull/2291 [2410]: https://github.com/Gallopsled/pwntools/pull/2410 [2502]: https://github.com/Gallopsled/pwntools/pull/2502 +[2476]: https://github.com/Gallopsled/pwntools/pull/2476 ## 4.14.0 (`beta`) diff --git a/pwnlib/context/__init__.py b/pwnlib/context/__init__.py index 670fb138a..a95ec4310 100644 --- a/pwnlib/context/__init__.py +++ b/pwnlib/context/__init__.py @@ -594,9 +594,9 @@ def quiet(self, function=None): ... log.debug("DEBUG") ... log.info("INFO") ... log.warn("WARN") - [DEBUG] DEBUG - [*] INFO - [!] WARN + [...] DEBUG + [...] INFO + [...] WARN """ level = 'error' if context.log_level <= logging.DEBUG: @@ -664,7 +664,7 @@ def verbose(self): information is printed. >>> with context.verbose: func() - [DEBUG] Hello + [...] Hello """ return self.local(log_level='debug') diff --git a/pwnlib/tubes/tube.py b/pwnlib/tubes/tube.py index 1a82f145c..532045444 100644 --- a/pwnlib/tubes/tube.py +++ b/pwnlib/tubes/tube.py @@ -46,6 +46,44 @@ def __init__(self, timeout = default, level = None, *a, **kw): self._newline = None atexit.register(self.close) + def _normalize_keepends_drop(self, keepends, drop, drop_default): + ''' + >>> t = tube() + >>> t._normalize_keepends_drop(None, None, True) + True + >>> t._normalize_keepends_drop(None, None, False) + False + >>> t._normalize_keepends_drop(None, True, True) + True + >>> t._normalize_keepends_drop(None, True, False) + True + >>> t._normalize_keepends_drop(True, None, True) + False + >>> t._normalize_keepends_drop(True, None, False) + False + >>> t._normalize_keepends_drop(None, False, True) + False + >>> t._normalize_keepends_drop(None, False, False) + False + >>> t._normalize_keepends_drop(False, None, True) + True + >>> t._normalize_keepends_drop(False, None, False) + True + >>> t._normalize_keepends_drop(False, True, False) + Traceback (most recent call last): + ... + pwnlib.exception.PwnlibException: 'drop' and 'keepends' arguments cannot be used together. + ''' + if keepends is not None: + self.warn_once("'keepends' argument is deprecated. Use 'drop' instead.") + if drop is None and keepends is None: + return drop_default + elif drop is not None: + if keepends is not None: + self.error("'drop' and 'keepends' arguments cannot be used together.") + return drop + return not keepends + @property def newline(self): r'''Character sent with methods like sendline() or used for recvline(). @@ -100,7 +138,7 @@ def recv(self, numb = None, timeout = default): >>> t.recv() == b'Woohoo' True >>> with context.local(log_level='debug'): - ... _ = t.recv() # doctest: +ELLIPSIS + ... _ = t.recv() [...] Received 0xc bytes: b'Hello, world' """ @@ -265,7 +303,7 @@ def recvn(self, numb, timeout = default): >>> t.recv_raw = lambda *a: time.sleep(0.01) or b'a' >>> t.recvn(10, timeout=0.05) b'' - >>> t.recvn(10, timeout=0.06) # doctest: +ELLIPSIS + >>> t.recvn(10, timeout=0.06) b'aaaaaa...' """ # Keep track of how much data has been received @@ -370,8 +408,8 @@ def recvuntil(self, delims, drop=False, timeout=default): return b'' - def recvlines(self, numlines=2**20, keepends=False, timeout=default): - r"""recvlines(numlines, keepends=False, timeout=default) -> list of bytes objects + def recvlines(self, numlines=2**20, keepends=None, drop=None, timeout=default): + r"""recvlines(numlines, drop=True, timeout=default) -> list of bytes objects Receive up to ``numlines`` lines. @@ -383,7 +421,7 @@ def recvlines(self, numlines=2**20, keepends=False, timeout=default): Arguments: numlines(int): Maximum number of lines to receive - keepends(bool): Keep newlines at the end of each line (:const:`False`). + drop(bool): Drop newlines at the end of each line (:const:`True`). timeout(int): Maximum timeout Raises: @@ -404,15 +442,20 @@ def recvlines(self, numlines=2**20, keepends=False, timeout=default): [b'Foo', b'Bar', b'Baz'] >>> t.recvlines(3, True) [b'Foo\n', b'Bar\n', b'Baz\n'] + >>> t.recvlines(3, drop=False) + [b'Foo\n', b'Bar\n', b'Baz\n'] """ + drop = self._normalize_keepends_drop(keepends, drop, True) + del keepends + lines = [] with self.countdown(timeout): for _ in range(numlines): try: - # We must set 'keepends' to True here so that we can + # We must set 'drop' to False here so that we can # restore the original, unmodified data to the buffer # in the event of a timeout. - res = self.recvline(keepends=True, timeout=timeout) + res = self.recvline(drop=False, timeout=timeout) except Exception: self.unrecv(b''.join(lines)) raise @@ -422,13 +465,13 @@ def recvlines(self, numlines=2**20, keepends=False, timeout=default): else: break - if not keepends: + if drop: lines = [line.rstrip(self.newline) for line in lines] return lines - def recvlinesS(self, numlines=2**20, keepends=False, timeout=default): - r"""recvlinesS(numlines, keepends=False, timeout=default) -> str list + def recvlinesS(self, numlines=2**20, keepends=None, drop=None, timeout=default): + r"""recvlinesS(numlines, drop=True, timeout=default) -> str list This function is identical to :meth:`recvlines`, but decodes the received bytes into string using :func:`context.encoding`. @@ -444,10 +487,10 @@ def recvlinesS(self, numlines=2**20, keepends=False, timeout=default): >>> t.recvlinesS(3) ['Foo', 'Bar', 'Baz'] """ - return [packing._decode(x) for x in self.recvlines(numlines, keepends, timeout)] + return [packing._decode(x) for x in self.recvlines(numlines, keepends=keepends, drop=drop, timeout=timeout)] - def recvlinesb(self, numlines=2**20, keepends=False, timeout=default): - r"""recvlinesb(numlines, keepends=False, timeout=default) -> bytearray list + def recvlinesb(self, numlines=2**20, keepends=None, drop=None, timeout=default): + r"""recvlinesb(numlines, drop=True, timeout=default) -> bytearray list This function is identical to :meth:`recvlines`, but returns a bytearray. @@ -461,10 +504,10 @@ def recvlinesb(self, numlines=2**20, keepends=False, timeout=default): >>> t.recvlinesb(3) [bytearray(b'Foo'), bytearray(b'Bar'), bytearray(b'Baz')] """ - return [bytearray(x) for x in self.recvlines(numlines, keepends, timeout)] + return [bytearray(x) for x in self.recvlines(numlines, keepends=keepends, drop=drop, timeout=timeout)] - def recvline(self, keepends=True, timeout=default): - r"""recvline(keepends=True, timeout=default) -> bytes + def recvline(self, keepends=None, drop=None, timeout=default): + r"""recvline(drop=False, timeout=default) -> bytes Receive a single line from the tube. @@ -480,7 +523,7 @@ def recvline(self, keepends=True, timeout=default): all data is buffered and an empty byte string (``b''``) is returned. Arguments: - keepends(bool): Keep the line ending (:const:`True`). + drop(bool): Drop the line ending (:const:`False`). timeout(int): Timeout Raises: @@ -503,10 +546,10 @@ def recvline(self, keepends=True, timeout=default): b'Foo\n' >>> t.recvline() b'Bar\r\n' - >>> t.recvline(keepends = False) + >>> t.recvline(False) b'Baz' >>> t.newline = b'\r\n' - >>> t.recvline(keepends = False) + >>> t.recvline(drop=True) b'Foo\nBar' >>> t = tube() >>> def _recv_eof(n): @@ -520,13 +563,16 @@ def recvline(self, keepends=True, timeout=default): b'real line\n' >>> t.recvline() b'trailing data' - >>> t.recvline() # doctest: +ELLIPSIS + >>> t.recvline() Traceback (most recent call last): - ... + ... EOFError """ + drop = self._normalize_keepends_drop(keepends, drop, False) + del keepends + try: - return self.recvuntil(self.newline, drop = not keepends, timeout = timeout) + return self.recvuntil(self.newline, drop=drop, timeout=timeout) except EOFError: if not context.throw_eof_on_incomplete_line and self.buffer.size > 0: if context.throw_eof_on_incomplete_line is None: @@ -534,8 +580,8 @@ def recvline(self, keepends=True, timeout=default): return self.buffer.get() raise - def recvline_pred(self, pred, keepends=False, timeout=default): - r"""recvline_pred(pred, keepends=False) -> bytes + def recvline_pred(self, pred, keepends=None, drop=None, timeout=default): + r"""recvline_pred(pred, drop=True, timeout=default) -> bytes Receive data until ``pred(line)`` returns a truthy value. Drop all other data. @@ -546,6 +592,7 @@ def recvline_pred(self, pred, keepends=False, timeout=default): Arguments: pred(callable): Function to call. Returns the line for which this function returns :const:`True`. + drop(bool): Drop the line ending (:const:`True`). Examples: @@ -553,18 +600,22 @@ def recvline_pred(self, pred, keepends=False, timeout=default): >>> t.recv_raw = lambda n: b"Foo\nBar\nBaz\n" >>> t.recvline_pred(lambda line: line == b"Bar\n") b'Bar' - >>> t.recvline_pred(lambda line: line == b"Bar\n", keepends=True) + >>> t.recvline_pred(lambda line: line == b"Bar\n", True) + b'Bar\n' + >>> t.recvline_pred(lambda line: line == b"Bar\n", drop=False) b'Bar\n' >>> t.recvline_pred(lambda line: line == b'Nope!', timeout=0.1) b'' """ + drop = self._normalize_keepends_drop(keepends, drop, True) + del keepends tmpbuf = Buffer() line = b'' with self.countdown(timeout): while self.countdown_active(): try: - line = self.recvline(keepends=True) + line = self.recvline(drop=False) except Exception: self.buffer.unget(tmpbuf) raise @@ -574,22 +625,23 @@ def recvline_pred(self, pred, keepends=False, timeout=default): return b'' if pred(line): - if not keepends: - line = line[:-len(self.newline)] + if drop: + line = line.rstrip(self.newline) return line else: tmpbuf.add(line) return b'' - def recvline_contains(self, items, keepends = False, timeout = default): - r""" + def recvline_contains(self, items, keepends=None, drop=None, timeout=default): + r"""recvline_contains(items, drop=True, timeout=default) -> bytes + Receive lines until one line is found which contains at least one of `items`. Arguments: items(str,tuple): List of strings to search for, or a single string. - keepends(bool): Return lines with newlines if :const:`True` + drop(bool): Drop the line ending (:const:`True`). timeout(int): Timeout, in seconds Examples: @@ -615,10 +667,10 @@ def recvline_contains(self, items, keepends = False, timeout = default): def pred(line): return any(d in line for d in items) - return self.recvline_pred(pred, keepends, timeout) + return self.recvline_pred(pred, keepends=keepends, drop=drop, timeout=timeout) - def recvline_startswith(self, delims, keepends=False, timeout=default): - r"""recvline_startswith(delims, keepends=False, timeout=default) -> bytes + def recvline_startswith(self, delims, keepends=None, drop=None, timeout=default): + r"""recvline_startswith(delims, drop=True, timeout=default) -> bytes Keep receiving lines until one is found that starts with one of `delims`. Returns the last line received. @@ -628,7 +680,7 @@ def recvline_startswith(self, delims, keepends=False, timeout=default): Arguments: delims(str,tuple): List of strings to search for, or string of single characters - keepends(bool): Return lines with newlines if :const:`True` + drop(bool): Drop the line ending (:const:`True`). timeout(int): Timeout, in seconds Returns: @@ -640,7 +692,7 @@ def recvline_startswith(self, delims, keepends=False, timeout=default): >>> t.recv_raw = lambda n: b"Hello\nWorld\nXylophone\n" >>> t.recvline_startswith((b'W',b'X',b'Y',b'Z')) b'World' - >>> t.recvline_startswith((b'W',b'X',b'Y',b'Z'), True) + >>> t.recvline_startswith((b'W',b'X',b'Y',b'Z'), drop=False) b'Xylophone\n' >>> t.recvline_startswith(b'Wo') b'World' @@ -652,10 +704,11 @@ def recvline_startswith(self, delims, keepends=False, timeout=default): return self.recvline_pred(lambda line: any(map(line.startswith, delims)), keepends=keepends, + drop=drop, timeout=timeout) - def recvline_endswith(self, delims, keepends=False, timeout=default): - r"""recvline_endswith(delims, keepends=False, timeout=default) -> bytes + def recvline_endswith(self, delims, keepends=None, drop=None, timeout=default): + r"""recvline_endswith(delims, drop=True, timeout=default) -> bytes Keep receiving lines until one is found that ends with one of `delims`. Returns the last line received. @@ -671,7 +724,7 @@ def recvline_endswith(self, delims, keepends=False, timeout=default): >>> t.recv_raw = lambda n: b'Foo\nBar\nBaz\nKaboodle\n' >>> t.recvline_endswith(b'r') b'Bar' - >>> t.recvline_endswith((b'a',b'b',b'c',b'd',b'e'), True) + >>> t.recvline_endswith((b'a',b'b',b'c',b'd',b'e'), drop=False) b'Kaboodle\n' >>> t.recvline_endswith(b'oodle') b'Kaboodle' @@ -684,6 +737,7 @@ def recvline_endswith(self, delims, keepends=False, timeout=default): return self.recvline_pred(lambda line: any(map(line.endswith, delims)), keepends=keepends, + drop=drop, timeout=timeout) def recvregex(self, regex, exact=False, timeout=default, capture=False): @@ -726,8 +780,8 @@ def recvregex(self, regex, exact=False, timeout=default, capture=False): else: return self.recvpred(pred, timeout = timeout) - def recvline_regex(self, regex, exact=False, keepends=False, timeout=default): - """recvline_regex(regex, exact=False, keepends=False, timeout=default) -> bytes + def recvline_regex(self, regex, exact=False, keepends=None, drop=None, timeout=default): + """recvline_regex(regex, exact=False, drop=True, timeout=default) -> bytes Wrapper around :func:`recvline_pred`, which will return when a regex matches a line. @@ -748,7 +802,7 @@ def recvline_regex(self, regex, exact=False, keepends=False, timeout=default): else: pred = regex.search - return self.recvline_pred(pred, keepends = keepends, timeout = timeout) + return self.recvline_pred(pred, keepends=keepends, drop=drop, timeout=timeout) def recvrepeat(self, timeout=default): """recvrepeat(timeout=default) -> bytes @@ -1064,8 +1118,8 @@ def clean_and_log(self, timeout = 0.05): >>> t.connected_raw = lambda d: True >>> t.fileno = lambda: 1234 >>> with context.local(log_level='info'): - ... data = t.clean_and_log() #doctest: +ELLIPSIS - [DEBUG] Received 0xb bytes: + ... data = t.clean_and_log() + [...] Received 0xb bytes: b'hooray_data' >>> data b'hooray_data' @@ -1440,7 +1494,7 @@ def shutdown(self, direction = "send"): send send send - >>> t.shutdown('bad_value') #doctest: +ELLIPSIS + >>> t.shutdown('bad_value') Traceback (most recent call last): ... KeyError: "direction must be in ['in', 'out', 'read', 'recv', 'send', 'write']" @@ -1474,7 +1528,7 @@ def connected(self, direction = 'any'): send send send - >>> t.connected('bad_value') #doctest: +ELLIPSIS + >>> t.connected('bad_value') Traceback (most recent call last): ... KeyError: "direction must be in ['any', 'in', 'out', 'read', 'recv', 'send', 'write']"