From f3a0535153e997081c846707c14969aa7e7946a3 Mon Sep 17 00:00:00 2001 From: Munna Shaik Date: Wed, 4 Dec 2024 13:50:06 +0530 Subject: [PATCH] fix: Python upgrade readiness cahnges --- TA_dataset/lib/requirements.txt | 2 +- lib_files/socks.py | 903 ++++++++ lib_files/sockshandler.py | 162 ++ lib_files/typing_extensions.py | 3838 +++++++++++++++++++++++++++++++ scripts/pack.sh | 3 + 5 files changed, 4907 insertions(+), 1 deletion(-) create mode 100644 lib_files/socks.py create mode 100755 lib_files/sockshandler.py create mode 100644 lib_files/typing_extensions.py diff --git a/TA_dataset/lib/requirements.txt b/TA_dataset/lib/requirements.txt index f8fed017..7dab2507 100644 --- a/TA_dataset/lib/requirements.txt +++ b/TA_dataset/lib/requirements.txt @@ -8,5 +8,5 @@ requests==2.31.0; python_version <= '3.8' sniffio==1.3.0; python_version <= '3.8' splunk-sdk==1.7.3; python_version <= '3.8' splunktaucclib==6.0.7; python_version <= '3.8' -typing-extensions==4.7.1; python_version <= '3.8' +typing-extensions==4.12.2; python_version <= '3.8' urllib3==1.26.16; python_version <= '3.8' diff --git a/lib_files/socks.py b/lib_files/socks.py new file mode 100644 index 00000000..c5e98744 --- /dev/null +++ b/lib_files/socks.py @@ -0,0 +1,903 @@ +# -*- coding: utf-8 -*- +from base64 import b64encode + +from six.moves import zip + +try: + from collections.abc import Callable +except ImportError: + from collections import Callable + +import functools +import logging +import os +import socket +import struct +import sys +from errno import EAGAIN, EINVAL, EOPNOTSUPP +from io import BytesIO +from os import SEEK_CUR + +__version__ = "1.7.1" + + +if os.name == "nt" and sys.version_info < (3, 0): + try: + import win_inet_pton + except ImportError: + raise ImportError("To run PySocks on Windows you must install win_inet_pton") + +log = logging.getLogger(__name__) + +PROXY_TYPE_SOCKS4 = SOCKS4 = 1 +PROXY_TYPE_SOCKS5 = SOCKS5 = 2 +PROXY_TYPE_HTTP = HTTP = 3 + +PROXY_TYPES = {"SOCKS4": SOCKS4, "SOCKS5": SOCKS5, "HTTP": HTTP} +PRINTABLE_PROXY_TYPES = dict( + list(zip(list(PROXY_TYPES.values()), list(PROXY_TYPES.keys()))) +) + +_orgsocket = _orig_socket = socket.socket + + +def set_self_blocking(function): + @functools.wraps(function) + def wrapper(*args, **kwargs): + self = args[0] + try: + _is_blocking = self.gettimeout() + if _is_blocking == 0: + self.setblocking(True) + return function(*args, **kwargs) + except Exception as e: + raise + finally: + # set orgin blocking + if _is_blocking == 0: + self.setblocking(False) + + return wrapper + + +class ProxyError(IOError): + """Socket_err contains original socket.error exception.""" + + def __init__(self, msg, socket_err=None): + self.msg = msg + self.socket_err = socket_err + + if socket_err: + self.msg += ": {}".format(socket_err) + + def __str__(self): + return self.msg + + +class GeneralProxyError(ProxyError): + pass + + +class ProxyConnectionError(ProxyError): + pass + + +class SOCKS5AuthError(ProxyError): + pass + + +class SOCKS5Error(ProxyError): + pass + + +class SOCKS4Error(ProxyError): + pass + + +class HTTPError(ProxyError): + pass + + +SOCKS4_ERRORS = { + 0x5B: "Request rejected or failed", + 0x5C: ( + "Request rejected because SOCKS server cannot connect to identd on the client" + ), + 0x5D: ( + "Request rejected because the client program and identd report" + " different user-ids" + ), +} + +SOCKS5_ERRORS = { + 0x01: "General SOCKS server failure", + 0x02: "Connection not allowed by ruleset", + 0x03: "Network unreachable", + 0x04: "Host unreachable", + 0x05: "Connection refused", + 0x06: "TTL expired", + 0x07: "Command not supported, or protocol error", + 0x08: "Address type not supported", +} + +DEFAULT_PORTS = {SOCKS4: 1080, SOCKS5: 1080, HTTP: 8080} + + +def set_default_proxy( + proxy_type=None, addr=None, port=None, rdns=True, username=None, password=None +): + """Sets a default proxy. + + All further socksocket objects will use the default unless explicitly + changed. All parameters are as for socket.set_proxy().""" + socksocket.default_proxy = ( + proxy_type, + addr, + port, + rdns, + username.encode() if username else None, + password.encode() if password else None, + ) + + +def setdefaultproxy(*args, **kwargs): + if "proxytype" in kwargs: + kwargs["proxy_type"] = kwargs.pop("proxytype") + return set_default_proxy(*args, **kwargs) + + +def get_default_proxy(): + """Returns the default proxy, set by set_default_proxy.""" + return socksocket.default_proxy + + +getdefaultproxy = get_default_proxy + + +def wrap_module(module): + """Attempts to replace a module's socket library with a SOCKS socket. + + Must set a default proxy using set_default_proxy(...) first. This will + only work on modules that import socket directly into the namespace; + most of the Python Standard Library falls into this category.""" + if socksocket.default_proxy: + module.socket.socket = socksocket + else: + raise GeneralProxyError("No default proxy specified") + + +wrapmodule = wrap_module + + +def create_connection( + dest_pair, + timeout=None, + source_address=None, + proxy_type=None, + proxy_addr=None, + proxy_port=None, + proxy_rdns=True, + proxy_username=None, + proxy_password=None, + socket_options=None, +): + """create_connection(dest_pair, *[, timeout], **proxy_args) -> socket object + + Like socket.create_connection(), but connects to proxy + before returning the socket object. + + dest_pair - 2-tuple of (IP/hostname, port). + **proxy_args - Same args passed to socksocket.set_proxy() if present. + timeout - Optional socket timeout value, in seconds. + source_address - tuple (host, port) for the socket to bind to as its source + address before connecting (only for compatibility) + """ + # Remove IPv6 brackets on the remote address and proxy address. + remote_host, remote_port = dest_pair + if remote_host.startswith("["): + remote_host = remote_host.strip("[]") + if proxy_addr and proxy_addr.startswith("["): + proxy_addr = proxy_addr.strip("[]") + + err = None + + # Allow the SOCKS proxy to be on IPv4 or IPv6 addresses. + for r in socket.getaddrinfo(proxy_addr, proxy_port, 0, socket.SOCK_STREAM): + family, socket_type, proto, canonname, sa = r + sock = None + try: + sock = socksocket(family, socket_type, proto) + + if socket_options: + for opt in socket_options: + sock.setsockopt(*opt) + + if isinstance(timeout, (int, float)): + sock.settimeout(timeout) + + if proxy_type: + sock.set_proxy( + proxy_type, + proxy_addr, + proxy_port, + proxy_rdns, + proxy_username, + proxy_password, + ) + if source_address: + sock.bind(source_address) + + sock.connect((remote_host, remote_port)) + return sock + + except (socket.error, ProxyError) as e: + err = e + if sock: + sock.close() + sock = None + + if err: + raise err + + raise socket.error("gai returned empty list.") + + +class _BaseSocket(socket.socket): + """Allows Python 2 delegated methods such as send() to be overridden.""" + + def __init__(self, *pos, **kw): + _orig_socket.__init__(self, *pos, **kw) + + self._savedmethods = dict() + for name in self._savenames: + self._savedmethods[name] = getattr(self, name) + delattr(self, name) # Allows normal overriding mechanism to work + + _savenames = list() + + +def _makemethod(name): + return lambda self, *pos, **kw: self._savedmethods[name](*pos, **kw) + + +for name in ("sendto", "send", "recvfrom", "recv"): + method = getattr(_BaseSocket, name, None) + + # Determine if the method is not defined the usual way + # as a function in the class. + # Python 2 uses __slots__, so there are descriptors for each method, + # but they are not functions. + if not isinstance(method, Callable): + _BaseSocket._savenames.append(name) + setattr(_BaseSocket, name, _makemethod(name)) + + +class socksocket(_BaseSocket): + """socksocket([family[, type[, proto]]]) -> socket object + + Open a SOCKS enabled socket. The parameters are the same as + those of the standard socket init. In order for SOCKS to work, + you must specify family=AF_INET and proto=0. + The "type" argument must be either SOCK_STREAM or SOCK_DGRAM. + """ + + default_proxy = None + + def __init__( + self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, *args, **kwargs + ): + if type not in (socket.SOCK_STREAM, socket.SOCK_DGRAM): + msg = "Socket type must be stream or datagram, not {!r}" + raise ValueError(msg.format(type)) + + super(socksocket, self).__init__(family, type, proto, *args, **kwargs) + self._proxyconn = None # TCP connection to keep UDP relay alive + + if self.default_proxy: + self.proxy = self.default_proxy + else: + self.proxy = (None, None, None, None, None, None) + self.proxy_sockname = None + self.proxy_peername = None + + self._timeout = None + + def _readall(self, file, count): + """Receive EXACTLY the number of bytes requested from the file object. + + Blocks until the required number of bytes have been received.""" + data = b"" + while len(data) < count: + d = file.read(count - len(data)) + if not d: + raise GeneralProxyError("Connection closed unexpectedly") + data += d + return data + + def settimeout(self, timeout): + self._timeout = timeout + try: + # test if we're connected, if so apply timeout + peer = self.get_proxy_peername() + super(socksocket, self).settimeout(self._timeout) + except socket.error: + pass + + def gettimeout(self): + return self._timeout + + def setblocking(self, v): + if v: + self.settimeout(None) + else: + self.settimeout(0.0) + + def set_proxy( + self, + proxy_type=None, + addr=None, + port=None, + rdns=True, + username=None, + password=None, + ): + """Sets the proxy to be used. + + proxy_type - The type of the proxy to be used. Three types + are supported: PROXY_TYPE_SOCKS4 (including socks4a), + PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP + addr - The address of the server (IP or DNS). + port - The port of the server. Defaults to 1080 for SOCKS + servers and 8080 for HTTP proxy servers. + rdns - Should DNS queries be performed on the remote side + (rather than the local side). The default is True. + Note: This has no effect with SOCKS4 servers. + username - Username to authenticate with to the server. + The default is no authentication. + password - Password to authenticate with to the server. + Only relevant when username is also provided.""" + self.proxy = ( + proxy_type, + addr, + port, + rdns, + username.encode() if username else None, + password.encode() if password else None, + ) + + def setproxy(self, *args, **kwargs): + if "proxytype" in kwargs: + kwargs["proxy_type"] = kwargs.pop("proxytype") + return self.set_proxy(*args, **kwargs) + + def bind(self, *pos, **kw): + """Implements proxy connection for UDP sockets. + + Happens during the bind() phase.""" + (proxy_type, proxy_addr, proxy_port, rdns, username, password) = self.proxy + if not proxy_type or self.type != socket.SOCK_DGRAM: + return _orig_socket.bind(self, *pos, **kw) + + if self._proxyconn: + raise socket.error(EINVAL, "Socket already bound to an address") + if proxy_type != SOCKS5: + msg = "UDP only supported by SOCKS5 proxy type" + raise socket.error(EOPNOTSUPP, msg) + super(socksocket, self).bind(*pos, **kw) + + # Need to specify actual local port because + # some relays drop packets if a port of zero is specified. + # Avoid specifying host address in case of NAT though. + _, port = self.getsockname() + dst = ("0", port) + + self._proxyconn = _orig_socket() + proxy = self._proxy_addr() + self._proxyconn.connect(proxy) + + UDP_ASSOCIATE = b"\x03" + _, relay = self._SOCKS5_request(self._proxyconn, UDP_ASSOCIATE, dst) + + # The relay is most likely on the same host as the SOCKS proxy, + # but some proxies return a private IP address (10.x.y.z) + host, _ = proxy + _, port = relay + super(socksocket, self).connect((host, port)) + super(socksocket, self).settimeout(self._timeout) + self.proxy_sockname = ("0.0.0.0", 0) # Unknown + + def sendto(self, bytes, *args, **kwargs): + if self.type != socket.SOCK_DGRAM: + return super(socksocket, self).sendto(bytes, *args, **kwargs) + if not self._proxyconn: + self.bind(("", 0)) + + address = args[-1] + flags = args[:-1] + + header = BytesIO() + RSV = b"\x00\x00" + header.write(RSV) + STANDALONE = b"\x00" + header.write(STANDALONE) + self._write_SOCKS5_address(address, header) + + sent = super(socksocket, self).send(header.getvalue() + bytes, *flags, **kwargs) + return sent - header.tell() + + def send(self, bytes, flags=0, **kwargs): + if self.type == socket.SOCK_DGRAM: + return self.sendto(bytes, flags, self.proxy_peername, **kwargs) + else: + return super(socksocket, self).send(bytes, flags, **kwargs) + + def recvfrom(self, bufsize, flags=0): + if self.type != socket.SOCK_DGRAM: + return super(socksocket, self).recvfrom(bufsize, flags) + if not self._proxyconn: + self.bind(("", 0)) + + buf = BytesIO(super(socksocket, self).recv(bufsize + 1024, flags)) + buf.seek(2, SEEK_CUR) + frag = buf.read(1) + if ord(frag): + raise NotImplementedError("Received UDP packet fragment") + fromhost, fromport = self._read_SOCKS5_address(buf) + + if self.proxy_peername: + peerhost, peerport = self.proxy_peername + if fromhost != peerhost or peerport not in (0, fromport): + raise socket.error(EAGAIN, "Packet filtered") + + return (buf.read(bufsize), (fromhost, fromport)) + + def recv(self, *pos, **kw): + bytes, _ = self.recvfrom(*pos, **kw) + return bytes + + def close(self): + if self._proxyconn: + self._proxyconn.close() + return super(socksocket, self).close() + + def get_proxy_sockname(self): + """Returns the bound IP address and port number at the proxy.""" + return self.proxy_sockname + + getproxysockname = get_proxy_sockname + + def get_proxy_peername(self): + """ + Returns the IP and port number of the proxy. + """ + return self.getpeername() + + getproxypeername = get_proxy_peername + + def get_peername(self): + """Returns the IP address and port number of the destination machine. + + Note: get_proxy_peername returns the proxy.""" + return self.proxy_peername + + getpeername = get_peername + + def _negotiate_SOCKS5(self, *dest_addr): + """Negotiates a stream connection through a SOCKS5 server.""" + CONNECT = b"\x01" + self.proxy_peername, self.proxy_sockname = self._SOCKS5_request( + self, CONNECT, dest_addr + ) + + def _SOCKS5_request(self, conn, cmd, dst): + """ + Send SOCKS5 request with given command (CMD field) and + address (DST field). Returns resolved DST address that was used. + """ + proxy_type, addr, port, rdns, username, password = self.proxy + + writer = conn.makefile("wb") + reader = conn.makefile("rb", 0) # buffering=0 renamed in Python 3 + try: + # First we'll send the authentication packages we support. + if username and password: + # The username/password details were supplied to the + # set_proxy method so we support the USERNAME/PASSWORD + # authentication (in addition to the standard none). + writer.write(b"\x05\x02\x00\x02") + else: + # No username/password were entered, therefore we + # only support connections with no authentication. + writer.write(b"\x05\x01\x00") + + # We'll receive the server's response to determine which + # method was selected + writer.flush() + chosen_auth = self._readall(reader, 2) + + if chosen_auth[0:1] != b"\x05": + # Note: string[i:i+1] is used because indexing of a bytestring + # via bytestring[i] yields an integer in Python 3 + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + + # Check the chosen authentication method + + if chosen_auth[1:2] == b"\x02": + # Okay, we need to perform a basic username/password + # authentication. + if not (username and password): + # Although we said we don't support authentication, the + # server may still request basic username/password + # authentication + raise SOCKS5AuthError( + "No username/password supplied. " + "Server requested username/password" + " authentication" + ) + + writer.write( + b"\x01" + + chr(len(username)).encode() + + username + + chr(len(password)).encode() + + password + ) + writer.flush() + auth_status = self._readall(reader, 2) + if auth_status[0:1] != b"\x01": + # Bad response + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + if auth_status[1:2] != b"\x00": + # Authentication failed + raise SOCKS5AuthError("SOCKS5 authentication failed") + + # Otherwise, authentication succeeded + + # No authentication is required if 0x00 + elif chosen_auth[1:2] != b"\x00": + # Reaching here is always bad + if chosen_auth[1:2] == b"\xFF": + raise SOCKS5AuthError( + "All offered SOCKS5 authentication methods were rejected" + ) + else: + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + + # Now we can request the actual connection + writer.write(b"\x05" + cmd + b"\x00") + resolved = self._write_SOCKS5_address(dst, writer) + writer.flush() + + # Get the response + resp = self._readall(reader, 3) + if resp[0:1] != b"\x05": + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + + status = ord(resp[1:2]) + if status != 0x00: + # Connection failed: server returned an error + error = SOCKS5_ERRORS.get(status, "Unknown error") + raise SOCKS5Error("{:#04x}: {}".format(status, error)) + + # Get the bound address/port + bnd = self._read_SOCKS5_address(reader) + + super(socksocket, self).settimeout(self._timeout) + return (resolved, bnd) + finally: + reader.close() + writer.close() + + def _write_SOCKS5_address(self, addr, file): + """ + Return the host and port packed for the SOCKS5 protocol, + and the resolved address as a tuple object. + """ + host, port = addr + proxy_type, _, _, rdns, username, password = self.proxy + family_to_byte = {socket.AF_INET: b"\x01", socket.AF_INET6: b"\x04"} + + # If the given destination address is an IP address, we'll + # use the IP address request even if remote resolving was specified. + # Detect whether the address is IPv4/6 directly. + for family in (socket.AF_INET, socket.AF_INET6): + try: + addr_bytes = socket.inet_pton(family, host) + file.write(family_to_byte[family] + addr_bytes) + host = socket.inet_ntop(family, addr_bytes) + file.write(struct.pack(">H", port)) + return host, port + except socket.error: + continue + + # Well it's not an IP number, so it's probably a DNS name. + if rdns: + # Resolve remotely + host_bytes = host.encode("idna") + file.write(b"\x03" + chr(len(host_bytes)).encode() + host_bytes) + else: + # Resolve locally + addresses = socket.getaddrinfo( + host, + port, + socket.AF_UNSPEC, + socket.SOCK_STREAM, + socket.IPPROTO_TCP, + socket.AI_ADDRCONFIG, + ) + # We can't really work out what IP is reachable, so just pick the + # first. + target_addr = addresses[0] + family = target_addr[0] + host = target_addr[4][0] + + addr_bytes = socket.inet_pton(family, host) + file.write(family_to_byte[family] + addr_bytes) + host = socket.inet_ntop(family, addr_bytes) + file.write(struct.pack(">H", port)) + return host, port + + def _read_SOCKS5_address(self, file): + atyp = self._readall(file, 1) + if atyp == b"\x01": + addr = socket.inet_ntoa(self._readall(file, 4)) + elif atyp == b"\x03": + length = self._readall(file, 1) + addr = self._readall(file, ord(length)) + elif atyp == b"\x04": + addr = socket.inet_ntop(socket.AF_INET6, self._readall(file, 16)) + else: + raise GeneralProxyError("SOCKS5 proxy server sent invalid data") + + port = struct.unpack(">H", self._readall(file, 2))[0] + return addr, port + + def _negotiate_SOCKS4(self, dest_addr, dest_port): + """Negotiates a connection through a SOCKS4 server.""" + proxy_type, addr, port, rdns, username, password = self.proxy + + writer = self.makefile("wb") + reader = self.makefile("rb", 0) # buffering=0 renamed in Python 3 + try: + # Check if the destination address provided is an IP address + remote_resolve = False + try: + addr_bytes = socket.inet_aton(dest_addr) + except socket.error: + # It's a DNS name. Check where it should be resolved. + if rdns: + addr_bytes = b"\x00\x00\x00\x01" + remote_resolve = True + else: + addr_bytes = socket.inet_aton(socket.gethostbyname(dest_addr)) + + # Construct the request packet + writer.write(struct.pack(">BBH", 0x04, 0x01, dest_port)) + writer.write(addr_bytes) + + # The username parameter is considered userid for SOCKS4 + if username: + writer.write(username) + writer.write(b"\x00") + + # DNS name if remote resolving is required + # NOTE: This is actually an extension to the SOCKS4 protocol + # called SOCKS4A and may not be supported in all cases. + if remote_resolve: + writer.write(dest_addr.encode("idna") + b"\x00") + writer.flush() + + # Get the response from the server + resp = self._readall(reader, 8) + if resp[0:1] != b"\x00": + # Bad data + raise GeneralProxyError("SOCKS4 proxy server sent invalid data") + + status = ord(resp[1:2]) + if status != 0x5A: + # Connection failed: server returned an error + error = SOCKS4_ERRORS.get(status, "Unknown error") + raise SOCKS4Error("{:#04x}: {}".format(status, error)) + + # Get the bound address/port + self.proxy_sockname = ( + socket.inet_ntoa(resp[4:]), + struct.unpack(">H", resp[2:4])[0], + ) + if remote_resolve: + self.proxy_peername = socket.inet_ntoa(addr_bytes), dest_port + else: + self.proxy_peername = dest_addr, dest_port + finally: + reader.close() + writer.close() + + def _negotiate_HTTP(self, dest_addr, dest_port): + """Negotiates a connection through an HTTP server. + + NOTE: This currently only supports HTTP CONNECT-style proxies.""" + proxy_type, addr, port, rdns, username, password = self.proxy + + # If we need to resolve locally, we do this now + addr = dest_addr if rdns else socket.gethostbyname(dest_addr) + + http_headers = [ + ( + b"CONNECT " + + addr.encode("idna") + + b":" + + str(dest_port).encode() + + b" HTTP/1.1" + ), + b"Host: " + dest_addr.encode("idna"), + ] + + if username and password: + http_headers.append( + b"Proxy-Authorization: basic " + b64encode(username + b":" + password) + ) + + http_headers.append(b"\r\n") + + self.sendall(b"\r\n".join(http_headers)) + + # We just need the first line to check if the connection was successful + fobj = self.makefile() + status_line = fobj.readline() + fobj.close() + + if not status_line: + raise GeneralProxyError("Connection closed unexpectedly") + + try: + proto, status_code, status_msg = status_line.split(" ", 2) + except ValueError: + raise GeneralProxyError("HTTP proxy server sent invalid response") + + if not proto.startswith("HTTP/"): + raise GeneralProxyError("Proxy server does not appear to be an HTTP proxy") + + try: + status_code = int(status_code) + except ValueError: + raise HTTPError("HTTP proxy server did not return a valid HTTP status") + + if status_code != 200: + error = "{}: {}".format(status_code, status_msg) + if status_code in (400, 403, 405): + # It's likely that the HTTP proxy server does not support the + # CONNECT tunneling method + error += ( + "\n[*] Note: The HTTP proxy server may not be" + " supported by PySocks (must be a CONNECT tunnel" + " proxy)" + ) + raise HTTPError(error) + + self.proxy_sockname = (b"0.0.0.0", 0) + self.proxy_peername = addr, dest_port + + _proxy_negotiators = { + SOCKS4: _negotiate_SOCKS4, + SOCKS5: _negotiate_SOCKS5, + HTTP: _negotiate_HTTP, + } + + @set_self_blocking + def connect(self, dest_pair, catch_errors=None): + """ + Connects to the specified destination through a proxy. + Uses the same API as socket's connect(). + To select the proxy server, use set_proxy(). + + dest_pair - 2-tuple of (IP/hostname, port). + """ + if len(dest_pair) != 2 or dest_pair[0].startswith("["): + # Probably IPv6, not supported -- raise an error, and hope + # Happy Eyeballs (RFC6555) makes sure at least the IPv4 + # connection works... + raise socket.error("PySocks doesn't support IPv6: %s" % str(dest_pair)) + + dest_addr, dest_port = dest_pair + + if self.type == socket.SOCK_DGRAM: + if not self._proxyconn: + self.bind(("", 0)) + dest_addr = socket.gethostbyname(dest_addr) + + # If the host address is INADDR_ANY or similar, reset the peer + # address so that packets are received from any peer + if dest_addr == "0.0.0.0" and not dest_port: + self.proxy_peername = None + else: + self.proxy_peername = (dest_addr, dest_port) + return + + (proxy_type, proxy_addr, proxy_port, rdns, username, password) = self.proxy + + # Do a minimal input check first + if ( + not isinstance(dest_pair, (list, tuple)) + or len(dest_pair) != 2 + or not dest_addr + or not isinstance(dest_port, int) + ): + # Inputs failed, raise an error + raise GeneralProxyError("Invalid destination-connection (host, port) pair") + + # We set the timeout here so that we don't hang in connection or during + # negotiation. + super(socksocket, self).settimeout(self._timeout) + + if proxy_type is None: + # Treat like regular socket object + self.proxy_peername = dest_pair + super(socksocket, self).settimeout(self._timeout) + super(socksocket, self).connect((dest_addr, dest_port)) + return + + proxy_addr = self._proxy_addr() + + try: + # Initial connection to proxy server. + super(socksocket, self).connect(proxy_addr) + + except socket.error as error: + # Error while connecting to proxy + self.close() + if not catch_errors: + proxy_addr, proxy_port = proxy_addr + proxy_server = "{}:{}".format(proxy_addr, proxy_port) + printable_type = PRINTABLE_PROXY_TYPES[proxy_type] + + msg = "Error connecting to {} proxy {}".format( + printable_type, proxy_server + ) + log.debug("%s due to: %s", msg, error) + raise ProxyConnectionError(msg, error) + else: + raise error + + else: + # Connected to proxy server, now negotiate + try: + # Calls negotiate_{SOCKS4, SOCKS5, HTTP} + negotiate = self._proxy_negotiators[proxy_type] + negotiate(self, dest_addr, dest_port) + except socket.error as error: + if not catch_errors: + # Wrap socket errors + self.close() + raise GeneralProxyError("Socket error", error) + else: + raise error + except ProxyError: + # Protocol error while negotiating with proxy + self.close() + raise + + @set_self_blocking + def connect_ex(self, dest_pair): + """https://docs.python.org/3/library/socket.html#socket.socket.connect_ex + Like connect(address), but return an error indicator instead of raising an exception for errors returned by the C-level connect() call (other problems, such as "host not found" can still raise exceptions). + """ + try: + self.connect(dest_pair, catch_errors=True) + return 0 + except OSError as e: + # If the error is numeric (socket errors are numeric), then return number as + # connect_ex expects. Otherwise raise the error again (socket timeout for example) + if e.errno: + return e.errno + else: + raise + + def _proxy_addr(self): + """ + Return proxy address to connect to as tuple object + """ + (proxy_type, proxy_addr, proxy_port, rdns, username, password) = self.proxy + proxy_port = proxy_port or DEFAULT_PORTS.get(proxy_type) + if not proxy_port: + raise GeneralProxyError("Invalid proxy type") + return proxy_addr, proxy_port diff --git a/lib_files/sockshandler.py b/lib_files/sockshandler.py new file mode 100755 index 00000000..487c7f88 --- /dev/null +++ b/lib_files/sockshandler.py @@ -0,0 +1,162 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +""" +SocksiPy + urllib2 handler + +version: 0.3 +author: e + +This module provides a Handler which you can use with urllib2 to allow it to tunnel your connection through a socks.sockssocket socket, with out monkey patching the original socket... +""" +from __future__ import print_function + +import socket +import ssl + +try: + import six.moves.http_client + import six.moves.urllib.error + import six.moves.urllib.parse + import six.moves.urllib.request +except ImportError: # Python 3 + import urllib.request as urllib2 + import http.client as httplib + +import socks # $ pip install PySocks + + +def merge_dict(a, b): + d = a.copy() + d.update(b) + return d + + +def is_ip(s): + try: + if ":" in s: + socket.inet_pton(socket.AF_INET6, s) + elif "." in s: + socket.inet_aton(s) + else: + return False + except: + return False + else: + return True + + +socks4_no_rdns = set() + + +class SocksiPyConnection(httplib.HTTPConnection): + def __init__( + self, + proxytype, + proxyaddr, + proxyport=None, + rdns=True, + username=None, + password=None, + *args, + **kwargs + ): + self.proxyargs = (proxytype, proxyaddr, proxyport, rdns, username, password) + httplib.HTTPConnection.__init__(self, *args, **kwargs) + + def connect(self): + (proxytype, proxyaddr, proxyport, rdns, username, password) = self.proxyargs + rdns = rdns and proxyaddr not in socks4_no_rdns + while True: + try: + sock = socks.create_connection( + (self.host, self.port), + self.timeout, + None, + proxytype, + proxyaddr, + proxyport, + rdns, + username, + password, + ((socket.IPPROTO_TCP, socket.TCP_NODELAY, 1),), + ) + break + except socks.SOCKS4Error as e: + if rdns and "0x5b" in str(e) and not is_ip(self.host): + # Maybe a SOCKS4 server that doesn't support remote resolving + # Let's try again + rdns = False + socks4_no_rdns.add(proxyaddr) + else: + raise + self.sock = sock + + +class SocksiPyConnectionS(httplib.HTTPSConnection): + def __init__( + self, + proxytype, + proxyaddr, + proxyport=None, + rdns=True, + username=None, + password=None, + *args, + **kwargs + ): + self.proxyargs = (proxytype, proxyaddr, proxyport, rdns, username, password) + httplib.HTTPSConnection.__init__(self, *args, **kwargs) + + def connect(self): + SocksiPyConnection.connect(self) + self.sock = self._context.wrap_socket(self.sock, server_hostname=self.host) + if not self._context.check_hostname and self._check_hostname: + try: + ssl.match_hostname(self.sock.getpeercert(), self.host) + except Exception: + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + + +class SocksiPyHandler( + six.moves.urllib.request.HTTPHandler, six.moves.urllib.request.HTTPSHandler +): + def __init__(self, *args, **kwargs): + self.args = args + self.kw = kwargs + six.moves.urllib.request.HTTPHandler.__init__(self) + + def http_open(self, req): + def build(host, port=None, timeout=0, **kwargs): + kw = merge_dict(self.kw, kwargs) + conn = SocksiPyConnection( + *self.args, host=host, port=port, timeout=timeout, **kw + ) + return conn + + return self.do_open(build, req) + + def https_open(self, req): + def build(host, port=None, timeout=0, **kwargs): + kw = merge_dict(self.kw, kwargs) + conn = SocksiPyConnectionS( + *self.args, host=host, port=port, timeout=timeout, **kw + ) + return conn + + return self.do_open(build, req) + + +if __name__ == "__main__": + import sys + + try: + port = int(sys.argv[1]) + except (ValueError, IndexError): + port = 9050 + opener = six.moves.urllib.request.build_opener( + SocksiPyHandler(socks.PROXY_TYPE_SOCKS5, "localhost", port) + ) + print("HTTP: " + opener.open("http://httpbin.org/ip").read().decode()) + print("HTTPS: " + opener.open("https://httpbin.org/ip").read().decode()) diff --git a/lib_files/typing_extensions.py b/lib_files/typing_extensions.py new file mode 100644 index 00000000..21068449 --- /dev/null +++ b/lib_files/typing_extensions.py @@ -0,0 +1,3838 @@ +# -*- coding: utf-8 -*- +import abc +import collections +import collections.abc +import contextlib +import functools +import inspect +import operator +import sys +import types as _types +import typing +import warnings + +__all__ = [ + # Super-special typing primitives. + "Any", + "ClassVar", + "Concatenate", + "Final", + "LiteralString", + "ParamSpec", + "ParamSpecArgs", + "ParamSpecKwargs", + "Self", + "Type", + "TypeVar", + "TypeVarTuple", + "Unpack", + # ABCs (from collections.abc). + "Awaitable", + "AsyncIterator", + "AsyncIterable", + "Coroutine", + "AsyncGenerator", + "AsyncContextManager", + "Buffer", + "ChainMap", + # Concrete collection types. + "ContextManager", + "Counter", + "Deque", + "DefaultDict", + "NamedTuple", + "OrderedDict", + "TypedDict", + # Structural checks, a.k.a. protocols. + "SupportsAbs", + "SupportsBytes", + "SupportsComplex", + "SupportsFloat", + "SupportsIndex", + "SupportsInt", + "SupportsRound", + # One-off things. + "Annotated", + "assert_never", + "assert_type", + "clear_overloads", + "dataclass_transform", + "deprecated", + "Doc", + "get_overloads", + "final", + "get_args", + "get_origin", + "get_original_bases", + "get_protocol_members", + "get_type_hints", + "IntVar", + "is_protocol", + "is_typeddict", + "Literal", + "NewType", + "overload", + "override", + "Protocol", + "reveal_type", + "runtime", + "runtime_checkable", + "Text", + "TypeAlias", + "TypeAliasType", + "TypeGuard", + "TypeIs", + "TYPE_CHECKING", + "Never", + "NoReturn", + "ReadOnly", + "Required", + "NotRequired", + # Pure aliases, have always been in typing + "AbstractSet", + "AnyStr", + "BinaryIO", + "Callable", + "Collection", + "Container", + "Dict", + "ForwardRef", + "FrozenSet", + "Generator", + "Generic", + "Hashable", + "IO", + "ItemsView", + "Iterable", + "Iterator", + "KeysView", + "List", + "Mapping", + "MappingView", + "Match", + "MutableMapping", + "MutableSequence", + "MutableSet", + "NoDefault", + "Optional", + "Pattern", + "Reversible", + "Sequence", + "Set", + "Sized", + "TextIO", + "Tuple", + "Union", + "ValuesView", + "cast", + "no_type_check", + "no_type_check_decorator", +] + +# for backward compatibility +PEP_560 = True +GenericMeta = type +_PEP_696_IMPLEMENTED = sys.version_info >= (3, 13, 0, "beta") + +# The functions below are modified copies of typing internal helpers. +# They are needed by _ProtocolMeta and they provide support for PEP 646. + + +class _Sentinel: + def __repr__(self): + return "" + + +_marker = _Sentinel() + + +if sys.version_info >= (3, 10): + + def _should_collect_from_parameters(t): + return isinstance( + t, (typing._GenericAlias, _types.GenericAlias, _types.UnionType) + ) + +elif sys.version_info >= (3, 9): + + def _should_collect_from_parameters(t): + return isinstance(t, (typing._GenericAlias, _types.GenericAlias)) + +else: + + def _should_collect_from_parameters(t): + return isinstance(t, typing._GenericAlias) and not t._special + + +NoReturn = typing.NoReturn + +# Some unconstrained type variables. These are used by the container types. +# (These are not for export.) +T = typing.TypeVar("T") # Any type. +KT = typing.TypeVar("KT") # Key type. +VT = typing.TypeVar("VT") # Value type. +T_co = typing.TypeVar("T_co", covariant=True) # Any type covariant containers. +T_contra = typing.TypeVar("T_contra", contravariant=True) # Ditto contravariant. + + +if sys.version_info >= (3, 11): + from typing import Any +else: + + class _AnyMeta(type): + def __instancecheck__(self, obj): + if self is Any: + raise TypeError( + "typing_extensions.Any cannot be used with isinstance()" + ) + return super().__instancecheck__(obj) + + def __repr__(self): + if self is Any: + return "typing_extensions.Any" + return super().__repr__() + + class Any(metaclass=_AnyMeta): + """Special type indicating an unconstrained type. + - Any is compatible with every type. + - Any assumed to have all methods. + - All values assumed to be instances of Any. + Note that all the above statements are true from the point of view of + static type checkers. At runtime, Any should not be used with instance + checks. + """ + + def __new__(cls, *args, **kwargs): + if cls is Any: + raise TypeError("Any cannot be instantiated") + return super().__new__(cls, *args, **kwargs) + + +ClassVar = typing.ClassVar + + +class _ExtensionsSpecialForm(typing._SpecialForm, _root=True): + def __repr__(self): + return "typing_extensions." + self._name + + +Final = typing.Final + +if sys.version_info >= (3, 11): + final = typing.final +else: + # @final exists in 3.8+, but we backport it for all versions + # before 3.11 to keep support for the __final__ attribute. + # See https://bugs.python.org/issue46342 + def final(f): + """This decorator can be used to indicate to type checkers that + the decorated method cannot be overridden, and decorated class + cannot be subclassed. For example: + + class Base: + @final + def done(self) -> None: + ... + class Sub(Base): + def done(self) -> None: # Error reported by type checker + ... + @final + class Leaf: + ... + class Other(Leaf): # Error reported by type checker + ... + + There is no runtime checking of these properties. The decorator + sets the ``__final__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + """ + try: + f.__final__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return f + + +def IntVar(name): + return typing.TypeVar(name) + + +# A Literal bug was fixed in 3.11.0, 3.10.1 and 3.9.8 +if sys.version_info >= (3, 10, 1): + Literal = typing.Literal +else: + + def _flatten_literal_params(parameters): + """An internal helper for Literal creation: flatten Literals among parameters""" + params = [] + for p in parameters: + if isinstance(p, _LiteralGenericAlias): + params.extend(p.__args__) + else: + params.append(p) + return tuple(params) + + def _value_and_type_iter(params): + for p in params: + yield p, type(p) + + class _LiteralGenericAlias(typing._GenericAlias, _root=True): + def __eq__(self, other): + if not isinstance(other, _LiteralGenericAlias): + return NotImplemented + these_args_deduped = set(_value_and_type_iter(self.__args__)) + other_args_deduped = set(_value_and_type_iter(other.__args__)) + return these_args_deduped == other_args_deduped + + def __hash__(self): + return hash(frozenset(_value_and_type_iter(self.__args__))) + + class _LiteralForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, doc: str): + self._name = "Literal" + self._doc = self.__doc__ = doc + + def __getitem__(self, parameters): + if not isinstance(parameters, tuple): + parameters = (parameters,) + + parameters = _flatten_literal_params(parameters) + + val_type_pairs = list(_value_and_type_iter(parameters)) + try: + deduped_pairs = set(val_type_pairs) + except TypeError: + # unhashable parameters + pass + else: + # similar logic to typing._deduplicate on Python 3.9+ + if len(deduped_pairs) < len(val_type_pairs): + new_parameters = [] + for pair in val_type_pairs: + if pair in deduped_pairs: + new_parameters.append(pair[0]) + deduped_pairs.remove(pair) + assert not deduped_pairs, deduped_pairs + parameters = tuple(new_parameters) + + return _LiteralGenericAlias(self, parameters) + + Literal = _LiteralForm( + doc="""\ + A type that can be used to indicate to type checkers + that the corresponding value has a value literally equivalent + to the provided parameter. For example: + + var: Literal[4] = 4 + + The type checker understands that 'var' is literally equal to + the value 4 and no other value. + + Literal[...] cannot be subclassed. There is no runtime + checking verifying that the parameter is actually a value + instead of a type.""" + ) + + +_overload_dummy = typing._overload_dummy + + +if hasattr(typing, "get_overloads"): # 3.11+ + overload = typing.overload + get_overloads = typing.get_overloads + clear_overloads = typing.clear_overloads +else: + # {module: {qualname: {firstlineno: func}}} + _overload_registry = collections.defaultdict( + functools.partial(collections.defaultdict, dict) + ) + + def overload(func): + """Decorator for overloaded functions/methods. + + In a stub file, place two or more stub definitions for the same + function in a row, each decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + + In a non-stub file (i.e. a regular .py file), do the same but + follow it with an implementation. The implementation should *not* + be decorated with @overload. For example: + + @overload + def utf8(value: None) -> None: ... + @overload + def utf8(value: bytes) -> bytes: ... + @overload + def utf8(value: str) -> bytes: ... + def utf8(value): + # implementation goes here + + The overloads for a function can be retrieved at runtime using the + get_overloads() function. + """ + # classmethod and staticmethod + f = getattr(func, "__func__", func) + try: + _overload_registry[f.__module__][f.__qualname__][ + f.__code__.co_firstlineno + ] = func + except AttributeError: + # Not a normal function; ignore. + pass + return _overload_dummy + + def get_overloads(func): + """Return all defined overloads for *func* as a sequence.""" + # classmethod and staticmethod + f = getattr(func, "__func__", func) + if f.__module__ not in _overload_registry: + return [] + mod_dict = _overload_registry[f.__module__] + if f.__qualname__ not in mod_dict: + return [] + return list(mod_dict[f.__qualname__].values()) + + def clear_overloads(): + """Clear all overloads in the registry.""" + _overload_registry.clear() + + +# This is not a real generic class. Don't use outside annotations. +Type = typing.Type + +# Various ABCs mimicking those in collections.abc. +# A few are simply re-exported for completeness. +Awaitable = typing.Awaitable +Coroutine = typing.Coroutine +AsyncIterable = typing.AsyncIterable +AsyncIterator = typing.AsyncIterator +Deque = typing.Deque +DefaultDict = typing.DefaultDict +OrderedDict = typing.OrderedDict +Counter = typing.Counter +ChainMap = typing.ChainMap +Text = typing.Text +TYPE_CHECKING = typing.TYPE_CHECKING + + +if sys.version_info >= (3, 13, 0, "beta"): + from typing import AsyncContextManager, AsyncGenerator, ContextManager, Generator +else: + + def _is_dunder(attr): + return attr.startswith("__") and attr.endswith("__") + + # Python <3.9 doesn't have typing._SpecialGenericAlias + _special_generic_alias_base = getattr( + typing, "_SpecialGenericAlias", typing._GenericAlias + ) + + class _SpecialGenericAlias(_special_generic_alias_base, _root=True): + def __init__(self, origin, nparams, *, inst=True, name=None, defaults=()): + if _special_generic_alias_base is typing._GenericAlias: + # Python <3.9 + self.__origin__ = origin + self._nparams = nparams + super().__init__(origin, nparams, special=True, inst=inst, name=name) + else: + # Python >= 3.9 + super().__init__(origin, nparams, inst=inst, name=name) + self._defaults = defaults + + def __setattr__(self, attr, val): + allowed_attrs = {"_name", "_inst", "_nparams", "_defaults"} + if _special_generic_alias_base is typing._GenericAlias: + # Python <3.9 + allowed_attrs.add("__origin__") + if _is_dunder(attr) or attr in allowed_attrs: + object.__setattr__(self, attr, val) + else: + setattr(self.__origin__, attr, val) + + @typing._tp_cache + def __getitem__(self, params): + if not isinstance(params, tuple): + params = (params,) + msg = "Parameters to generic types must be types." + params = tuple(typing._type_check(p, msg) for p in params) + if ( + self._defaults + and len(params) < self._nparams + and len(params) + len(self._defaults) >= self._nparams + ): + params = (*params, *self._defaults[len(params) - self._nparams :]) + actual_len = len(params) + + if actual_len != self._nparams: + if self._defaults: + expected = f"at least {self._nparams - len(self._defaults)}" + else: + expected = str(self._nparams) + if not self._nparams: + raise TypeError(f"{self} is not a generic class") + raise TypeError( + f"Too {'many' if actual_len > self._nparams else 'few'}" + f" arguments for {self};" + f" actual {actual_len}, expected {expected}" + ) + return self.copy_with(params) + + _NoneType = type(None) + Generator = _SpecialGenericAlias( + collections.abc.Generator, 3, defaults=(_NoneType, _NoneType) + ) + AsyncGenerator = _SpecialGenericAlias( + collections.abc.AsyncGenerator, 2, defaults=(_NoneType,) + ) + ContextManager = _SpecialGenericAlias( + contextlib.AbstractContextManager, + 2, + name="ContextManager", + defaults=(typing.Optional[bool],), + ) + AsyncContextManager = _SpecialGenericAlias( + contextlib.AbstractAsyncContextManager, + 2, + name="AsyncContextManager", + defaults=(typing.Optional[bool],), + ) + + +_PROTO_ALLOWLIST = { + "collections.abc": [ + "Callable", + "Awaitable", + "Iterable", + "Iterator", + "AsyncIterable", + "Hashable", + "Sized", + "Container", + "Collection", + "Reversible", + "Buffer", + ], + "contextlib": ["AbstractContextManager", "AbstractAsyncContextManager"], + "typing_extensions": ["Buffer"], +} + + +_EXCLUDED_ATTRS = frozenset(typing.EXCLUDED_ATTRIBUTES) | { + "__match_args__", + "__protocol_attrs__", + "__non_callable_proto_members__", + "__final__", +} + + +def _get_protocol_attrs(cls): + attrs = set() + for base in cls.__mro__[:-1]: # without object + if base.__name__ in {"Protocol", "Generic"}: + continue + annotations = getattr(base, "__annotations__", {}) + for attr in (*base.__dict__, *annotations): + if not attr.startswith("_abc_") and attr not in _EXCLUDED_ATTRS: + attrs.add(attr) + return attrs + + +def _caller(depth=2): + try: + return sys._getframe(depth).f_globals.get("__name__", "__main__") + except (AttributeError, ValueError): # For platforms without _getframe() + return None + + +# `__match_args__` attribute was removed from protocol members in 3.13, +# we want to backport this change to older Python versions. +if sys.version_info >= (3, 13): + Protocol = typing.Protocol +else: + + def _allow_reckless_class_checks(depth=3): + """Allow instance and class checks for special stdlib modules. + The abc and functools modules indiscriminately call isinstance() and + issubclass() on the whole MRO of a user class, which may contain protocols. + """ + return _caller(depth) in {"abc", "functools", None} + + def _no_init(self, *args, **kwargs): + if type(self)._is_protocol: + raise TypeError("Protocols cannot be instantiated") + + def _type_check_issubclass_arg_1(arg): + """Raise TypeError if `arg` is not an instance of `type` + in `issubclass(arg, )`. + + In most cases, this is verified by type.__subclasscheck__. + Checking it again unnecessarily would slow down issubclass() checks, + so, we don't perform this check unless we absolutely have to. + + For various error paths, however, + we want to ensure that *this* error message is shown to the user + where relevant, rather than a typing.py-specific error message. + """ + if not isinstance(arg, type): + # Same error message as for issubclass(1, int). + raise TypeError("issubclass() arg 1 must be a class") + + # Inheriting from typing._ProtocolMeta isn't actually desirable, + # but is necessary to allow typing.Protocol and typing_extensions.Protocol + # to mix without getting TypeErrors about "metaclass conflict" + class _ProtocolMeta(type(typing.Protocol)): + # This metaclass is somewhat unfortunate, + # but is necessary for several reasons... + # + # NOTE: DO NOT call super() in any methods in this class + # That would call the methods on typing._ProtocolMeta on Python 3.8-3.11 + # and those are slow + def __new__(mcls, name, bases, namespace, **kwargs): + if name == "Protocol" and len(bases) < 2: + pass + elif {Protocol, typing.Protocol} & set(bases): + for base in bases: + if not ( + base in {object, typing.Generic, Protocol, typing.Protocol} + or base.__name__ in _PROTO_ALLOWLIST.get(base.__module__, []) + or is_protocol(base) + ): + raise TypeError( + "Protocols can only inherit from other protocols, " + f"got {base!r}" + ) + return abc.ABCMeta.__new__(mcls, name, bases, namespace, **kwargs) + + def __init__(cls, *args, **kwargs): + abc.ABCMeta.__init__(cls, *args, **kwargs) + if getattr(cls, "_is_protocol", False): + cls.__protocol_attrs__ = _get_protocol_attrs(cls) + + def __subclasscheck__(cls, other): + if cls is Protocol: + return type.__subclasscheck__(cls, other) + if ( + getattr(cls, "_is_protocol", False) + and not _allow_reckless_class_checks() + ): + if not getattr(cls, "_is_runtime_protocol", False): + _type_check_issubclass_arg_1(other) + raise TypeError( + "Instance and class checks can only be used with " + "@runtime_checkable protocols" + ) + if ( + # this attribute is set by @runtime_checkable: + cls.__non_callable_proto_members__ + and cls.__dict__.get("__subclasshook__") is _proto_hook + ): + _type_check_issubclass_arg_1(other) + non_method_attrs = sorted(cls.__non_callable_proto_members__) + raise TypeError( + "Protocols with non-method members don't support issubclass()." + f" Non-method members: {str(non_method_attrs)[1:-1]}." + ) + return abc.ABCMeta.__subclasscheck__(cls, other) + + def __instancecheck__(cls, instance): + # We need this method for situations where attributes are + # assigned in __init__. + if cls is Protocol: + return type.__instancecheck__(cls, instance) + if not getattr(cls, "_is_protocol", False): + # i.e., it's a concrete subclass of a protocol + return abc.ABCMeta.__instancecheck__(cls, instance) + + if ( + not getattr(cls, "_is_runtime_protocol", False) + and not _allow_reckless_class_checks() + ): + raise TypeError( + "Instance and class checks can only be used with" + " @runtime_checkable protocols" + ) + + if abc.ABCMeta.__instancecheck__(cls, instance): + return True + + for attr in cls.__protocol_attrs__: + try: + val = inspect.getattr_static(instance, attr) + except AttributeError: + break + # this attribute is set by @runtime_checkable: + if val is None and attr not in cls.__non_callable_proto_members__: + break + else: + return True + + return False + + def __eq__(cls, other): + # Hack so that typing.Generic.__class_getitem__ + # treats typing_extensions.Protocol + # as equivalent to typing.Protocol + if abc.ABCMeta.__eq__(cls, other) is True: + return True + return cls is Protocol and other is typing.Protocol + + # This has to be defined, or the abc-module cache + # complains about classes with this metaclass being unhashable, + # if we define only __eq__! + def __hash__(cls) -> int: + return type.__hash__(cls) + + @classmethod + def _proto_hook(cls, other): + if not cls.__dict__.get("_is_protocol", False): + return NotImplemented + + for attr in cls.__protocol_attrs__: + for base in other.__mro__: + # Check if the members appears in the class dictionary... + if attr in base.__dict__: + if base.__dict__[attr] is None: + return NotImplemented + break + + # ...or in annotations, if it is a sub-protocol. + annotations = getattr(base, "__annotations__", {}) + if ( + isinstance(annotations, collections.abc.Mapping) + and attr in annotations + and is_protocol(other) + ): + break + else: + return NotImplemented + return True + + class Protocol(typing.Generic, metaclass=_ProtocolMeta): + __doc__ = typing.Protocol.__doc__ + __slots__ = () + _is_protocol = True + _is_runtime_protocol = False + + def __init_subclass__(cls, *args, **kwargs): + super().__init_subclass__(*args, **kwargs) + + # Determine if this is a protocol or a concrete subclass. + if not cls.__dict__.get("_is_protocol", False): + cls._is_protocol = any(b is Protocol for b in cls.__bases__) + + # Set (or override) the protocol subclass hook. + if "__subclasshook__" not in cls.__dict__: + cls.__subclasshook__ = _proto_hook + + # Prohibit instantiation for protocol classes + if cls._is_protocol and cls.__init__ is Protocol.__init__: + cls.__init__ = _no_init + + +if sys.version_info >= (3, 13): + runtime_checkable = typing.runtime_checkable +else: + + def runtime_checkable(cls): + """Mark a protocol class as a runtime protocol. + + Such protocol can be used with isinstance() and issubclass(). + Raise TypeError if applied to a non-protocol class. + This allows a simple-minded structural check very similar to + one trick ponies in collections.abc such as Iterable. + + For example:: + + @runtime_checkable + class Closable(Protocol): + def close(self): ... + + assert isinstance(open('/some/file'), Closable) + + Warning: this will check only the presence of the required methods, + not their type signatures! + """ + if not issubclass(cls, typing.Generic) or not getattr( + cls, "_is_protocol", False + ): + raise TypeError( + "@runtime_checkable can be only applied to protocol classes," + f" got {cls!r}" + ) + cls._is_runtime_protocol = True + + # typing.Protocol classes on <=3.11 break if we execute this block, + # because typing.Protocol classes on <=3.11 don't have a + # `__protocol_attrs__` attribute, and this block relies on the + # `__protocol_attrs__` attribute. Meanwhile, typing.Protocol classes on 3.12.2+ + # break if we *don't* execute this block, because *they* assume that all + # protocol classes have a `__non_callable_proto_members__` attribute + # (which this block sets) + if isinstance(cls, _ProtocolMeta) or sys.version_info >= (3, 12, 2): + # PEP 544 prohibits using issubclass() + # with protocols that have non-method members. + # See gh-113320 for why we compute this attribute here, + # rather than in `_ProtocolMeta.__init__` + cls.__non_callable_proto_members__ = set() + for attr in cls.__protocol_attrs__: + try: + is_callable = callable(getattr(cls, attr, None)) + except Exception as e: + raise TypeError( + f"Failed to determine whether protocol member {attr!r} " + "is a method member" + ) from e + else: + if not is_callable: + cls.__non_callable_proto_members__.add(attr) + + return cls + + +# The "runtime" alias exists for backwards compatibility. +runtime = runtime_checkable + + +# Our version of runtime-checkable protocols is faster on Python 3.8-3.11 +if sys.version_info >= (3, 12): + SupportsInt = typing.SupportsInt + SupportsFloat = typing.SupportsFloat + SupportsComplex = typing.SupportsComplex + SupportsBytes = typing.SupportsBytes + SupportsIndex = typing.SupportsIndex + SupportsAbs = typing.SupportsAbs + SupportsRound = typing.SupportsRound +else: + + @runtime_checkable + class SupportsInt(Protocol): + """An ABC with one abstract method __int__.""" + + __slots__ = () + + @abc.abstractmethod + def __int__(self) -> int: + pass + + @runtime_checkable + class SupportsFloat(Protocol): + """An ABC with one abstract method __float__.""" + + __slots__ = () + + @abc.abstractmethod + def __float__(self) -> float: + pass + + @runtime_checkable + class SupportsComplex(Protocol): + """An ABC with one abstract method __complex__.""" + + __slots__ = () + + @abc.abstractmethod + def __complex__(self) -> complex: + pass + + @runtime_checkable + class SupportsBytes(Protocol): + """An ABC with one abstract method __bytes__.""" + + __slots__ = () + + @abc.abstractmethod + def __bytes__(self) -> bytes: + pass + + @runtime_checkable + class SupportsIndex(Protocol): + __slots__ = () + + @abc.abstractmethod + def __index__(self) -> int: + pass + + @runtime_checkable + class SupportsAbs(Protocol[T_co]): + """ + An ABC with one abstract method __abs__ that is covariant in its return type. + """ + + __slots__ = () + + @abc.abstractmethod + def __abs__(self) -> T_co: + pass + + @runtime_checkable + class SupportsRound(Protocol[T_co]): + """ + An ABC with one abstract method __round__ that is covariant in its return type. + """ + + __slots__ = () + + @abc.abstractmethod + def __round__(self, ndigits: int = 0) -> T_co: + pass + + +def _ensure_subclassable(mro_entries): + def inner(func): + if sys.implementation.name == "pypy" and sys.version_info < (3, 9): + cls_dict = { + "__call__": staticmethod(func), + "__mro_entries__": staticmethod(mro_entries), + } + t = type(func.__name__, (), cls_dict) + return functools.update_wrapper(t(), func) + else: + func.__mro_entries__ = mro_entries + return func + + return inner + + +# Update this to something like >=3.13.0b1 if and when +# PEP 728 is implemented in CPython +_PEP_728_IMPLEMENTED = False + +if _PEP_728_IMPLEMENTED: + # The standard library TypedDict in Python 3.8 does not store runtime information + # about which (if any) keys are optional. See https://bugs.python.org/issue38834 + # The standard library TypedDict in Python 3.9.0/1 does not honour the "total" + # keyword with old-style TypedDict(). See https://bugs.python.org/issue42059 + # The standard library TypedDict below Python 3.11 does not store runtime + # information about optional and required keys when using Required or NotRequired. + # Generic TypedDicts are also impossible using typing.TypedDict on Python <3.11. + # Aaaand on 3.12 we add __orig_bases__ to TypedDict + # to enable better runtime introspection. + # On 3.13 we deprecate some odd ways of creating TypedDicts. + # Also on 3.13, PEP 705 adds the ReadOnly[] qualifier. + # PEP 728 (still pending) makes more changes. + TypedDict = typing.TypedDict + _TypedDictMeta = typing._TypedDictMeta + is_typeddict = typing.is_typeddict +else: + # 3.10.0 and later + _TAKES_MODULE = "module" in inspect.signature(typing._type_check).parameters + + def _get_typeddict_qualifiers(annotation_type): + while True: + annotation_origin = get_origin(annotation_type) + if annotation_origin is Annotated: + annotation_args = get_args(annotation_type) + if annotation_args: + annotation_type = annotation_args[0] + else: + break + elif annotation_origin is Required: + yield Required + (annotation_type,) = get_args(annotation_type) + elif annotation_origin is NotRequired: + yield NotRequired + (annotation_type,) = get_args(annotation_type) + elif annotation_origin is ReadOnly: + yield ReadOnly + (annotation_type,) = get_args(annotation_type) + else: + break + + class _TypedDictMeta(type): + def __new__(cls, name, bases, ns, *, total=True, closed=False): + """Create new typed dict class object. + + This method is called when TypedDict is subclassed, + or when TypedDict is instantiated. This way + TypedDict supports all three syntax forms described in its docstring. + Subclasses and instances of TypedDict return actual dictionaries. + """ + for base in bases: + if type(base) is not _TypedDictMeta and base is not typing.Generic: + raise TypeError( + "cannot inherit from both a TypedDict type " + "and a non-TypedDict base class" + ) + + if any(issubclass(b, typing.Generic) for b in bases): + generic_base = (typing.Generic,) + else: + generic_base = () + + # typing.py generally doesn't let you inherit from plain Generic, unless + # the name of the class happens to be "Protocol" + tp_dict = type.__new__( + _TypedDictMeta, "Protocol", (*generic_base, dict), ns + ) + tp_dict.__name__ = name + if tp_dict.__qualname__ == "Protocol": + tp_dict.__qualname__ = name + + if not hasattr(tp_dict, "__orig_bases__"): + tp_dict.__orig_bases__ = bases + + annotations = {} + if "__annotations__" in ns: + own_annotations = ns["__annotations__"] + elif "__annotate__" in ns: + # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated + own_annotations = ns["__annotate__"](1) + else: + own_annotations = {} + msg = "TypedDict('Name', {f0: t0, f1: t1, ...}); each t must be a type" + if _TAKES_MODULE: + own_annotations = { + n: typing._type_check(tp, msg, module=tp_dict.__module__) + for n, tp in list(own_annotations.items()) + } + else: + own_annotations = { + n: typing._type_check(tp, msg) + for n, tp in list(own_annotations.items()) + } + required_keys = set() + optional_keys = set() + readonly_keys = set() + mutable_keys = set() + extra_items_type = None + + for base in bases: + base_dict = base.__dict__ + + annotations.update(base_dict.get("__annotations__", {})) + required_keys.update(base_dict.get("__required_keys__", ())) + optional_keys.update(base_dict.get("__optional_keys__", ())) + readonly_keys.update(base_dict.get("__readonly_keys__", ())) + mutable_keys.update(base_dict.get("__mutable_keys__", ())) + base_extra_items_type = base_dict.get("__extra_items__", None) + if base_extra_items_type is not None: + extra_items_type = base_extra_items_type + + if closed and extra_items_type is None: + extra_items_type = Never + if closed and "__extra_items__" in own_annotations: + annotation_type = own_annotations.pop("__extra_items__") + qualifiers = set(_get_typeddict_qualifiers(annotation_type)) + if Required in qualifiers: + raise TypeError( + "Special key __extra_items__ does not support Required" + ) + if NotRequired in qualifiers: + raise TypeError( + "Special key __extra_items__ does not support NotRequired" + ) + extra_items_type = annotation_type + + annotations.update(own_annotations) + for annotation_key, annotation_type in list(own_annotations.items()): + qualifiers = set(_get_typeddict_qualifiers(annotation_type)) + + if Required in qualifiers: + required_keys.add(annotation_key) + elif NotRequired in qualifiers: + optional_keys.add(annotation_key) + elif total: + required_keys.add(annotation_key) + else: + optional_keys.add(annotation_key) + if ReadOnly in qualifiers: + mutable_keys.discard(annotation_key) + readonly_keys.add(annotation_key) + else: + mutable_keys.add(annotation_key) + readonly_keys.discard(annotation_key) + + tp_dict.__annotations__ = annotations + tp_dict.__required_keys__ = frozenset(required_keys) + tp_dict.__optional_keys__ = frozenset(optional_keys) + tp_dict.__readonly_keys__ = frozenset(readonly_keys) + tp_dict.__mutable_keys__ = frozenset(mutable_keys) + if not hasattr(tp_dict, "__total__"): + tp_dict.__total__ = total + tp_dict.__closed__ = closed + tp_dict.__extra_items__ = extra_items_type + return tp_dict + + __call__ = dict # static method + + def __subclasscheck__(cls, other): + # Typed dicts are only for static structural subtyping. + raise TypeError("TypedDict does not support instance and class checks") + + __instancecheck__ = __subclasscheck__ + + _TypedDict = type.__new__(_TypedDictMeta, "TypedDict", (), {}) + + @_ensure_subclassable(lambda bases: (_TypedDict,)) + def TypedDict(typename, fields=_marker, /, *, total=True, closed=False, **kwargs): + """A simple typed namespace. At runtime it is equivalent to a plain dict. + + TypedDict creates a dictionary type such that a type checker will expect all + instances to have a certain set of keys, where each key is + associated with a value of a consistent type. This expectation + is not checked at runtime. + + Usage:: + + class Point2D(TypedDict): + x: int + y: int + label: str + + a: Point2D = {'x': 1, 'y': 2, 'label': 'good'} # OK + b: Point2D = {'z': 3, 'label': 'bad'} # Fails type check + + assert Point2D(x=1, y=2, label='first') == dict(x=1, y=2, label='first') + + The type info can be accessed via the Point2D.__annotations__ dict, and + the Point2D.__required_keys__ and Point2D.__optional_keys__ frozensets. + TypedDict supports an additional equivalent form:: + + Point2D = TypedDict('Point2D', {'x': int, 'y': int, 'label': str}) + + By default, all keys must be present in a TypedDict. It is possible + to override this by specifying totality:: + + class Point2D(TypedDict, total=False): + x: int + y: int + + This means that a Point2D TypedDict can have any of the keys omitted. A type + checker is only expected to support a literal False or True as the value of + the total argument. True is the default, and makes all items defined in the + class body be required. + + The Required and NotRequired special forms can also be used to mark + individual keys as being required or not required:: + + class Point2D(TypedDict): + x: int # the "x" key must always be present (Required is the default) + y: NotRequired[int] # the "y" key can be omitted + + See PEP 655 for more details on Required and NotRequired. + """ + if fields is _marker or fields is None: + if fields is _marker: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + + example = f"`{typename} = TypedDict({typename!r}, {{}})`" + deprecation_msg = ( + f"{deprecated_thing} is deprecated and will be disallowed in " + "Python 3.15. To create a TypedDict class with 0 fields " + "using the functional syntax, pass an empty dictionary, e.g. " + + example + + "." + ) + warnings.warn(deprecation_msg, DeprecationWarning, stacklevel=2) + if closed is not False and closed is not True: + kwargs["closed"] = closed + closed = False + fields = kwargs + elif kwargs: + raise TypeError( + "TypedDict takes either a dict or keyword arguments, but not both" + ) + if kwargs: + if sys.version_info >= (3, 13): + raise TypeError("TypedDict takes no keyword arguments") + warnings.warn( + "The kwargs-based syntax for TypedDict definitions is deprecated " + "in Python 3.11, will be removed in Python 3.13, and may not be " + "understood by third-party type checkers.", + DeprecationWarning, + stacklevel=2, + ) + + ns = {"__annotations__": dict(fields)} + module = _caller() + if module is not None: + # Setting correct module is necessary to make typed dict classes pickleable. + ns["__module__"] = module + + td = _TypedDictMeta(typename, (), ns, total=total, closed=closed) + td.__orig_bases__ = (TypedDict,) + return td + + if hasattr(typing, "_TypedDictMeta"): + _TYPEDDICT_TYPES = (typing._TypedDictMeta, _TypedDictMeta) + else: + _TYPEDDICT_TYPES = (_TypedDictMeta,) + + def is_typeddict(tp): + """Check if an annotation is a TypedDict class + + For example:: + class Film(TypedDict): + title: str + year: int + + is_typeddict(Film) # => True + is_typeddict(Union[list, str]) # => False + """ + # On 3.8, this would otherwise return True + if hasattr(typing, "TypedDict") and tp is typing.TypedDict: + return False + return isinstance(tp, _TYPEDDICT_TYPES) + + +if hasattr(typing, "assert_type"): + assert_type = typing.assert_type + +else: + + def assert_type(val, typ, /): + """Assert (to the type checker) that the value is of the given type. + + When the type checker encounters a call to assert_type(), it + emits an error if the value is not of the specified type:: + + def greet(name: str) -> None: + assert_type(name, str) # ok + assert_type(name, int) # type checker error + + At runtime this returns the first argument unchanged and otherwise + does nothing. + """ + return val + + +if hasattr(typing, "ReadOnly"): # 3.13+ + get_type_hints = typing.get_type_hints +else: # <=3.13 + # replaces _strip_annotations() + def _strip_extras(t): + """Strips Annotated, Required and NotRequired from a given type.""" + if isinstance(t, _AnnotatedAlias): + return _strip_extras(t.__origin__) + if hasattr(t, "__origin__") and t.__origin__ in ( + Required, + NotRequired, + ReadOnly, + ): + return _strip_extras(t.__args__[0]) + if isinstance(t, typing._GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return t.copy_with(stripped_args) + if hasattr(_types, "GenericAlias") and isinstance(t, _types.GenericAlias): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return _types.GenericAlias(t.__origin__, stripped_args) + if hasattr(_types, "UnionType") and isinstance(t, _types.UnionType): + stripped_args = tuple(_strip_extras(a) for a in t.__args__) + if stripped_args == t.__args__: + return t + return functools.reduce(operator.or_, stripped_args) + + return t + + def get_type_hints(obj, globalns=None, localns=None, include_extras=False): + """Return type hints for an object. + + This is often the same as obj.__annotations__, but it handles + forward references encoded as string literals, adds Optional[t] if a + default value equal to None is set and recursively replaces all + 'Annotated[T, ...]', 'Required[T]' or 'NotRequired[T]' with 'T' + (unless 'include_extras=True'). + + The argument may be a module, class, method, or function. The annotations + are returned as a dictionary. For classes, annotations include also + inherited members. + + TypeError is raised if the argument is not of a type that can contain + annotations, and an empty dictionary is returned if no annotations are + present. + + BEWARE -- the behavior of globalns and localns is counterintuitive + (unless you are familiar with how eval() and exec() work). The + search order is locals first, then globals. + + - If no dict arguments are passed, an attempt is made to use the + globals from obj (or the respective module's globals for classes), + and these are also used as the locals. If the object does not appear + to have globals, an empty dictionary is used. + + - If one dict argument is passed, it is used for both globals and + locals. + + - If two dict arguments are passed, they specify globals and + locals, respectively. + """ + if hasattr(typing, "Annotated"): # 3.9+ + hint = typing.get_type_hints( + obj, globalns=globalns, localns=localns, include_extras=True + ) + else: # 3.8 + hint = typing.get_type_hints(obj, globalns=globalns, localns=localns) + if include_extras: + return hint + return {k: _strip_extras(t) for k, t in list(hint.items())} + + +# Python 3.9+ has PEP 593 (Annotated) +if hasattr(typing, "Annotated"): + Annotated = typing.Annotated + # Not exported and not a public API, but needed for get_origin() and get_args() + # to work. + _AnnotatedAlias = typing._AnnotatedAlias +# 3.8 +else: + + class _AnnotatedAlias(typing._GenericAlias, _root=True): + """Runtime representation of an annotated type. + + At its core 'Annotated[t, dec1, dec2, ...]' is an alias for the type 't' + with extra annotations. The alias behaves like a normal typing alias, + instantiating is the same as instantiating the underlying type, binding + it to types is also the same. + """ + + def __init__(self, origin, metadata): + if isinstance(origin, _AnnotatedAlias): + metadata = origin.__metadata__ + metadata + origin = origin.__origin__ + super().__init__(origin, origin) + self.__metadata__ = metadata + + def copy_with(self, params): + assert len(params) == 1 + new_type = params[0] + return _AnnotatedAlias(new_type, self.__metadata__) + + def __repr__(self): + return ( + f"typing_extensions.Annotated[{typing._type_repr(self.__origin__)}, " + f"{', '.join(repr(a) for a in self.__metadata__)}]" + ) + + def __reduce__(self): + return operator.getitem, (Annotated, (self.__origin__, *self.__metadata__)) + + def __eq__(self, other): + if not isinstance(other, _AnnotatedAlias): + return NotImplemented + if self.__origin__ != other.__origin__: + return False + return self.__metadata__ == other.__metadata__ + + def __hash__(self): + return hash((self.__origin__, self.__metadata__)) + + class Annotated: + """Add context specific metadata to a type. + + Example: Annotated[int, runtime_check.Unsigned] indicates to the + hypothetical runtime_check module that this type is an unsigned int. + Every other consumer of this type can ignore this metadata and treat + this type as int. + + The first argument to Annotated must be a valid type (and will be in + the __origin__ field), the remaining arguments are kept as a tuple in + the __extra__ field. + + Details: + + - It's an error to call `Annotated` with less than two arguments. + - Nested Annotated are flattened:: + + Annotated[Annotated[T, Ann1, Ann2], Ann3] == Annotated[T, Ann1, Ann2, Ann3] + + - Instantiating an annotated type is equivalent to instantiating the + underlying type:: + + Annotated[C, Ann1](5) == C(5) + + - Annotated can be used as a generic type alias:: + + Optimized = Annotated[T, runtime.Optimize()] + Optimized[int] == Annotated[int, runtime.Optimize()] + + OptimizedList = Annotated[List[T], runtime.Optimize()] + OptimizedList[int] == Annotated[List[int], runtime.Optimize()] + """ + + __slots__ = () + + def __new__(cls, *args, **kwargs): + raise TypeError("Type Annotated cannot be instantiated.") + + @typing._tp_cache + def __class_getitem__(cls, params): + if not isinstance(params, tuple) or len(params) < 2: + raise TypeError( + "Annotated[...] should be used " + "with at least two arguments (a type and an " + "annotation)." + ) + allowed_special_forms = (ClassVar, Final) + if get_origin(params[0]) in allowed_special_forms: + origin = params[0] + else: + msg = "Annotated[t, ...]: t must be a type." + origin = typing._type_check(params[0], msg) + metadata = tuple(params[1:]) + return _AnnotatedAlias(origin, metadata) + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError(f"Cannot subclass {cls.__module__}.Annotated") + + +# Python 3.8 has get_origin() and get_args() but those implementations aren't +# Annotated-aware, so we can't use those. Python 3.9's versions don't support +# ParamSpecArgs and ParamSpecKwargs, so only Python 3.10's versions will do. +if sys.version_info[:2] >= (3, 10): + get_origin = typing.get_origin + get_args = typing.get_args +# 3.8-3.9 +else: + try: + # 3.9+ + from typing import _BaseGenericAlias + except ImportError: + _BaseGenericAlias = typing._GenericAlias + try: + # 3.9+ + from typing import GenericAlias as _typing_GenericAlias + except ImportError: + _typing_GenericAlias = typing._GenericAlias + + def get_origin(tp): + """Get the unsubscripted version of a type. + + This supports generic types, Callable, Tuple, Union, Literal, Final, ClassVar + and Annotated. Return None for unsupported types. Examples:: + + get_origin(Literal[42]) is Literal + get_origin(int) is None + get_origin(ClassVar[int]) is ClassVar + get_origin(Generic) is Generic + get_origin(Generic[T]) is Generic + get_origin(Union[T, int]) is Union + get_origin(List[Tuple[T, T]][int]) == list + get_origin(P.args) is P + """ + if isinstance(tp, _AnnotatedAlias): + return Annotated + if isinstance( + tp, + ( + typing._GenericAlias, + _typing_GenericAlias, + _BaseGenericAlias, + ParamSpecArgs, + ParamSpecKwargs, + ), + ): + return tp.__origin__ + if tp is typing.Generic: + return typing.Generic + return None + + def get_args(tp): + """Get type arguments with all substitutions performed. + + For unions, basic simplifications used by Union constructor are performed. + Examples:: + get_args(Dict[str, int]) == (str, int) + get_args(int) == () + get_args(Union[int, Union[T, int], str][int]) == (int, str) + get_args(Union[int, Tuple[T, int]][str]) == (int, Tuple[str, int]) + get_args(Callable[[], T][int]) == ([], int) + """ + if isinstance(tp, _AnnotatedAlias): + return (tp.__origin__, *tp.__metadata__) + if isinstance(tp, (typing._GenericAlias, _typing_GenericAlias)): + if getattr(tp, "_special", False): + return () + res = tp.__args__ + if get_origin(tp) is collections.abc.Callable and res[0] is not Ellipsis: + res = (list(res[:-1]), res[-1]) + return res + return () + + +# 3.10+ +if hasattr(typing, "TypeAlias"): + TypeAlias = typing.TypeAlias +# 3.9 +elif sys.version_info[:2] >= (3, 9): + + @_ExtensionsSpecialForm + def TypeAlias(self, parameters): + """Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example above. + """ + raise TypeError(f"{self} is not subscriptable") + + +# 3.8 +else: + TypeAlias = _ExtensionsSpecialForm( + "TypeAlias", + doc="""Special marker indicating that an assignment should + be recognized as a proper type alias definition by type + checkers. + + For example:: + + Predicate: TypeAlias = Callable[..., bool] + + It's invalid when used anywhere except as in the example + above.""", + ) + + +if hasattr(typing, "NoDefault"): + NoDefault = typing.NoDefault +else: + + class NoDefaultTypeMeta(type): + def __setattr__(cls, attr, value): + # TypeError is consistent with the behavior of NoneType + raise TypeError( + f"cannot set {attr!r} attribute of immutable type {cls.__name__!r}" + ) + + class NoDefaultType(metaclass=NoDefaultTypeMeta): + """The type of the NoDefault singleton.""" + + __slots__ = () + + def __new__(cls): + return globals().get("NoDefault") or object.__new__(cls) + + def __repr__(self): + return "typing_extensions.NoDefault" + + def __reduce__(self): + return "NoDefault" + + NoDefault = NoDefaultType() + del NoDefaultType, NoDefaultTypeMeta + + +def _set_default(type_param, default): + type_param.has_default = lambda: default is not NoDefault + type_param.__default__ = default + + +def _set_module(typevarlike): + # for pickling: + def_mod = _caller(depth=3) + if def_mod != "typing_extensions": + typevarlike.__module__ = def_mod + + +class _DefaultMixin: + """Mixin for TypeVarLike defaults.""" + + __slots__ = () + __init__ = _set_default + + +# Classes using this metaclass must provide a _backported_typevarlike ClassVar +class _TypeVarLikeMeta(type): + def __instancecheck__(cls, __instance: Any) -> bool: + return isinstance(__instance, cls._backported_typevarlike) + + +if _PEP_696_IMPLEMENTED: + from typing import TypeVar +else: + # Add default and infer_variance parameters from PEP 696 and 695 + class TypeVar(metaclass=_TypeVarLikeMeta): + """Type variable.""" + + _backported_typevarlike = typing.TypeVar + + def __new__( + cls, + name, + *constraints, + bound=None, + covariant=False, + contravariant=False, + default=NoDefault, + infer_variance=False, + ): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented (3.12+), can pass infer_variance to typing.TypeVar + typevar = typing.TypeVar( + name, + *constraints, + bound=bound, + covariant=covariant, + contravariant=contravariant, + infer_variance=infer_variance, + ) + else: + typevar = typing.TypeVar( + name, + *constraints, + bound=bound, + covariant=covariant, + contravariant=contravariant, + ) + if infer_variance and (covariant or contravariant): + raise ValueError( + "Variance cannot be specified with infer_variance." + ) + typevar.__infer_variance__ = infer_variance + + _set_default(typevar, default) + _set_module(typevar) + + def _tvar_prepare_subst(alias, args): + if typevar.has_default() and alias.__parameters__.index(typevar) == len( + args + ): + args += (typevar.__default__,) + return args + + typevar.__typing_prepare_subst__ = _tvar_prepare_subst + return typevar + + def __init_subclass__(cls) -> None: + raise TypeError(f"type '{__name__}.TypeVar' is not an acceptable base type") + + +# Python 3.10+ has PEP 612 +if hasattr(typing, "ParamSpecArgs"): + ParamSpecArgs = typing.ParamSpecArgs + ParamSpecKwargs = typing.ParamSpecKwargs +# 3.8-3.9 +else: + + class _Immutable: + """Mixin to indicate that object should not be copied.""" + + __slots__ = () + + def __copy__(self): + return self + + def __deepcopy__(self, memo): + return self + + class ParamSpecArgs(_Immutable): + """The args for a ParamSpec object. + + Given a ParamSpec object P, P.args is an instance of ParamSpecArgs. + + ParamSpecArgs objects have a reference back to their ParamSpec: + + P.args.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.args" + + def __eq__(self, other): + if not isinstance(other, ParamSpecArgs): + return NotImplemented + return self.__origin__ == other.__origin__ + + class ParamSpecKwargs(_Immutable): + """The kwargs for a ParamSpec object. + + Given a ParamSpec object P, P.kwargs is an instance of ParamSpecKwargs. + + ParamSpecKwargs objects have a reference back to their ParamSpec: + + P.kwargs.__origin__ is P + + This type is meant for runtime introspection and has no special meaning to + static type checkers. + """ + + def __init__(self, origin): + self.__origin__ = origin + + def __repr__(self): + return f"{self.__origin__.__name__}.kwargs" + + def __eq__(self, other): + if not isinstance(other, ParamSpecKwargs): + return NotImplemented + return self.__origin__ == other.__origin__ + + +if _PEP_696_IMPLEMENTED: + from typing import ParamSpec + +# 3.10+ +elif hasattr(typing, "ParamSpec"): + # Add default parameter - PEP 696 + class ParamSpec(metaclass=_TypeVarLikeMeta): + """Parameter specification.""" + + _backported_typevarlike = typing.ParamSpec + + def __new__( + cls, + name, + *, + bound=None, + covariant=False, + contravariant=False, + infer_variance=False, + default=NoDefault, + ): + if hasattr(typing, "TypeAliasType"): + # PEP 695 implemented, can pass infer_variance to typing.TypeVar + paramspec = typing.ParamSpec( + name, + bound=bound, + covariant=covariant, + contravariant=contravariant, + infer_variance=infer_variance, + ) + else: + paramspec = typing.ParamSpec( + name, bound=bound, covariant=covariant, contravariant=contravariant + ) + paramspec.__infer_variance__ = infer_variance + + _set_default(paramspec, default) + _set_module(paramspec) + + def _paramspec_prepare_subst(alias, args): + params = alias.__parameters__ + i = params.index(paramspec) + if i == len(args) and paramspec.has_default(): + args = [*args, paramspec.__default__] + if i >= len(args): + raise TypeError(f"Too few arguments for {alias}") + # Special case where Z[[int, str, bool]] == Z[int, str, bool] in PEP 612. + if len(params) == 1 and not typing._is_param_expr(args[0]): + assert i == 0 + args = (args,) + # Convert lists to tuples to help other libraries cache the results. + elif isinstance(args[i], list): + args = (*args[:i], tuple(args[i]), *args[i + 1 :]) + return args + + paramspec.__typing_prepare_subst__ = _paramspec_prepare_subst + return paramspec + + def __init_subclass__(cls) -> None: + raise TypeError( + f"type '{__name__}.ParamSpec' is not an acceptable base type" + ) + + +# 3.8-3.9 +else: + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class ParamSpec(list, _DefaultMixin): + """Parameter specification variable. + + Usage:: + + P = ParamSpec('P') + + Parameter specification variables exist primarily for the benefit of static + type checkers. They are used to forward the parameter types of one + callable to another callable, a pattern commonly found in higher order + functions and decorators. They are only valid when used in ``Concatenate``, + or s the first argument to ``Callable``. In Python 3.10 and higher, + they are also supported in user-defined Generics at runtime. + See class Generic for more information on generic types. An + example for annotating a decorator:: + + T = TypeVar('T') + P = ParamSpec('P') + + def add_logging(f: Callable[P, T]) -> Callable[P, T]: + '''A type-safe decorator to add logging to a function.''' + def inner(*args: P.args, **kwargs: P.kwargs) -> T: + logging.info(f'{f.__name__} was called') + return f(*args, **kwargs) + return inner + + @add_logging + def add_two(x: float, y: float) -> float: + '''Add two numbers together.''' + return x + y + + Parameter specification variables defined with covariant=True or + contravariant=True can be used to declare covariant or contravariant + generic types. These keyword arguments are valid, but their actual semantics + are yet to be decided. See PEP 612 for details. + + Parameter specification variables can be introspected. e.g.: + + P.__name__ == 'T' + P.__bound__ == None + P.__covariant__ == False + P.__contravariant__ == False + + Note that only parameter specification variables defined in global scope can + be pickled. + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + @property + def args(self): + return ParamSpecArgs(self) + + @property + def kwargs(self): + return ParamSpecKwargs(self) + + def __init__( + self, + name, + *, + bound=None, + covariant=False, + contravariant=False, + infer_variance=False, + default=NoDefault, + ): + list.__init__(self, [self]) + self.__name__ = name + self.__covariant__ = bool(covariant) + self.__contravariant__ = bool(contravariant) + self.__infer_variance__ = bool(infer_variance) + if bound: + self.__bound__ = typing._type_check(bound, "Bound must be a type.") + else: + self.__bound__ = None + _DefaultMixin.__init__(self, default) + + # for pickling: + def_mod = _caller() + if def_mod != "typing_extensions": + self.__module__ = def_mod + + def __repr__(self): + if self.__infer_variance__: + prefix = "" + elif self.__covariant__: + prefix = "+" + elif self.__contravariant__: + prefix = "-" + else: + prefix = "~" + return prefix + self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + # Hack to get typing._type_check to pass. + def __call__(self, *args, **kwargs): + pass + + +# 3.8-3.9 +if not hasattr(typing, "Concatenate"): + # Inherits from list as a workaround for Callable checks in Python < 3.9.2. + class _ConcatenateGenericAlias(list): + # Trick Generic into looking into this for __parameters__. + __class__ = typing._GenericAlias + + # Flag in 3.8. + _special = False + + def __init__(self, origin, args): + super().__init__(args) + self.__origin__ = origin + self.__args__ = args + + def __repr__(self): + _type_repr = typing._type_repr + return ( + f"{_type_repr(self.__origin__)}" + f"[{', '.join(_type_repr(arg) for arg in self.__args__)}]" + ) + + def __hash__(self): + return hash((self.__origin__, self.__args__)) + + # Hack to get typing._type_check to pass in Generic. + def __call__(self, *args, **kwargs): + pass + + @property + def __parameters__(self): + return tuple( + tp + for tp in self.__args__ + if isinstance(tp, (typing.TypeVar, ParamSpec)) + ) + + +# 3.8-3.9 +@typing._tp_cache +def _concatenate_getitem(self, parameters): + if parameters == (): + raise TypeError("Cannot take a Concatenate of no types.") + if not isinstance(parameters, tuple): + parameters = (parameters,) + if not isinstance(parameters[-1], ParamSpec): + raise TypeError( + "The last parameter to Concatenate should be a ParamSpec variable." + ) + msg = "Concatenate[arg, ...]: each arg must be a type." + parameters = tuple(typing._type_check(p, msg) for p in parameters) + return _ConcatenateGenericAlias(self, parameters) + + +# 3.10+ +if hasattr(typing, "Concatenate"): + Concatenate = typing.Concatenate + _ConcatenateGenericAlias = typing._ConcatenateGenericAlias +# 3.9 +elif sys.version_info[:2] >= (3, 9): + + @_ExtensionsSpecialForm + def Concatenate(self, parameters): + """Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """ + return _concatenate_getitem(self, parameters) + + +# 3.8 +else: + + class _ConcatenateForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + return _concatenate_getitem(self, parameters) + + Concatenate = _ConcatenateForm( + "Concatenate", + doc="""Used in conjunction with ``ParamSpec`` and ``Callable`` to represent a + higher order function which adds, removes or transforms parameters of a + callable. + + For example:: + + Callable[Concatenate[int, P], int] + + See PEP 612 for detailed information. + """, + ) + +# 3.10+ +if hasattr(typing, "TypeGuard"): + TypeGuard = typing.TypeGuard +# 3.9 +elif sys.version_info[:2] >= (3, 9): + + @_ExtensionsSpecialForm + def TypeGuard(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """ + item = typing._type_check(parameters, f"{self} accepts only a single type.") + return typing._GenericAlias(self, (item,)) + + +# 3.8 +else: + + class _TypeGuardForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check( + parameters, f"{self._name} accepts only a single type" + ) + return typing._GenericAlias(self, (item,)) + + TypeGuard = _TypeGuardForm( + "TypeGuard", + doc="""Special typing form used to annotate the return type of a user-defined + type guard function. ``TypeGuard`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeGuard`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeGuard[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeGuard`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the type inside ``TypeGuard``. + + For example:: + + def is_str(val: Union[str, float]): + # "isinstance" type guard + if isinstance(val, str): + # Type of ``val`` is narrowed to ``str`` + ... + else: + # Else, type of ``val`` is narrowed to ``float``. + ... + + Strict type narrowing is not enforced -- ``TypeB`` need not be a narrower + form of ``TypeA`` (it can even be a wider form) and this may lead to + type-unsafe results. The main reason is to allow for things like + narrowing ``List[object]`` to ``List[str]`` even though the latter is not + a subtype of the former, since ``List`` is invariant. The responsibility of + writing type-safe type guards is left to the user. + + ``TypeGuard`` also works with type variables. For more information, see + PEP 647 (User-Defined Type Guards). + """, + ) + +# 3.13+ +if hasattr(typing, "TypeIs"): + TypeIs = typing.TypeIs +# 3.9 +elif sys.version_info[:2] >= (3, 9): + + @_ExtensionsSpecialForm + def TypeIs(self, parameters): + """Special typing form used to annotate the return type of a user-defined + type narrower function. ``TypeIs`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeIs[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeIs`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the intersection of the type inside ``TypeGuard`` and the argument's + previously known type. + + For example:: + + def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]: + return hasattr(val, '__await__') + + def f(val: Union[int, Awaitable[int]]) -> int: + if is_awaitable(val): + assert_type(val, Awaitable[int]) + else: + assert_type(val, int) + + ``TypeIs`` also works with type variables. For more information, see + PEP 742 (Narrowing types with TypeIs). + """ + item = typing._type_check(parameters, f"{self} accepts only a single type.") + return typing._GenericAlias(self, (item,)) + + +# 3.8 +else: + + class _TypeIsForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check( + parameters, f"{self._name} accepts only a single type" + ) + return typing._GenericAlias(self, (item,)) + + TypeIs = _TypeIsForm( + "TypeIs", + doc="""Special typing form used to annotate the return type of a user-defined + type narrower function. ``TypeIs`` only accepts a single type argument. + At runtime, functions marked this way should return a boolean. + + ``TypeIs`` aims to benefit *type narrowing* -- a technique used by static + type checkers to determine a more precise type of an expression within a + program's code flow. Usually type narrowing is done by analyzing + conditional code flow and applying the narrowing to a block of code. The + conditional expression here is sometimes referred to as a "type guard". + + Sometimes it would be convenient to use a user-defined boolean function + as a type guard. Such a function should use ``TypeIs[...]`` as its + return type to alert static type checkers to this intention. + + Using ``-> TypeIs`` tells the static type checker that for a given + function: + + 1. The return value is a boolean. + 2. If the return value is ``True``, the type of its argument + is the intersection of the type inside ``TypeGuard`` and the argument's + previously known type. + + For example:: + + def is_awaitable(val: object) -> TypeIs[Awaitable[Any]]: + return hasattr(val, '__await__') + + def f(val: Union[int, Awaitable[int]]) -> int: + if is_awaitable(val): + assert_type(val, Awaitable[int]) + else: + assert_type(val, int) + + ``TypeIs`` also works with type variables. For more information, see + PEP 742 (Narrowing types with TypeIs). + """, + ) + + +# Vendored from cpython typing._SpecialFrom +class _SpecialForm(typing._Final, _root=True): + __slots__ = ("_name", "__doc__", "_getitem") + + def __init__(self, getitem): + self._getitem = getitem + self._name = getitem.__name__ + self.__doc__ = getitem.__doc__ + + def __getattr__(self, item): + if item in {"__name__", "__qualname__"}: + return self._name + + raise AttributeError(item) + + def __mro_entries__(self, bases): + raise TypeError(f"Cannot subclass {self!r}") + + def __repr__(self): + return f"typing_extensions.{self._name}" + + def __reduce__(self): + return self._name + + def __call__(self, *args, **kwds): + raise TypeError(f"Cannot instantiate {self!r}") + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + def __instancecheck__(self, obj): + raise TypeError(f"{self} cannot be used with isinstance()") + + def __subclasscheck__(self, cls): + raise TypeError(f"{self} cannot be used with issubclass()") + + @typing._tp_cache + def __getitem__(self, parameters): + return self._getitem(self, parameters) + + +if hasattr(typing, "LiteralString"): # 3.11+ + LiteralString = typing.LiteralString +else: + + @_SpecialForm + def LiteralString(self, params): + """Represents an arbitrary literal string. + + Example:: + + from typing_extensions import LiteralString + + def query(sql: LiteralString) -> ...: + ... + + query("SELECT * FROM table") # ok + query(f"SELECT * FROM {input()}") # not ok + + See PEP 675 for details. + + """ + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Self"): # 3.11+ + Self = typing.Self +else: + + @_SpecialForm + def Self(self, params): + """Used to spell the type of "self" in classes. + + Example:: + + from typing import Self + + class ReturnsSelf: + def parse(self, data: bytes) -> Self: + ... + return self + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Never"): # 3.11+ + Never = typing.Never +else: + + @_SpecialForm + def Never(self, params): + """The bottom type, a type that has no members. + + This can be used to define a function that should never be + called, or a function that never returns:: + + from typing_extensions import Never + + def never_call_me(arg: Never) -> None: + pass + + def int_or_str(arg: int | str) -> None: + never_call_me(arg) # type checker error + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + never_call_me(arg) # ok, arg is of type Never + + """ + + raise TypeError(f"{self} is not subscriptable") + + +if hasattr(typing, "Required"): # 3.11+ + Required = typing.Required + NotRequired = typing.NotRequired +elif sys.version_info[:2] >= (3, 9): # 3.9-3.10 + + @_ExtensionsSpecialForm + def Required(self, parameters): + """A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """ + item = typing._type_check( + parameters, f"{self._name} accepts only a single type." + ) + return typing._GenericAlias(self, (item,)) + + @_ExtensionsSpecialForm + def NotRequired(self, parameters): + """A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """ + item = typing._type_check( + parameters, f"{self._name} accepts only a single type." + ) + return typing._GenericAlias(self, (item,)) + +else: # 3.8 + + class _RequiredForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check( + parameters, f"{self._name} accepts only a single type." + ) + return typing._GenericAlias(self, (item,)) + + Required = _RequiredForm( + "Required", + doc="""A special typing construct to mark a key of a total=False TypedDict + as required. For example: + + class Movie(TypedDict, total=False): + title: Required[str] + year: int + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + + There is no runtime checking that a required key is actually provided + when instantiating a related TypedDict. + """, + ) + NotRequired = _RequiredForm( + "NotRequired", + doc="""A special typing construct to mark a key of a TypedDict as + potentially missing. For example: + + class Movie(TypedDict): + title: str + year: NotRequired[int] + + m = Movie( + title='The Matrix', # typechecker error if key is omitted + year=1999, + ) + """, + ) + + +if hasattr(typing, "ReadOnly"): + ReadOnly = typing.ReadOnly +elif sys.version_info[:2] >= (3, 9): # 3.9-3.12 + + @_ExtensionsSpecialForm + def ReadOnly(self, parameters): + """A special typing construct to mark an item of a TypedDict as read-only. + + For example: + + class Movie(TypedDict): + title: ReadOnly[str] + year: int + + def mutate_movie(m: Movie) -> None: + m["year"] = 1992 # allowed + m["title"] = "The Matrix" # typechecker error + + There is no runtime checking for this property. + """ + item = typing._type_check( + parameters, f"{self._name} accepts only a single type." + ) + return typing._GenericAlias(self, (item,)) + +else: # 3.8 + + class _ReadOnlyForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check( + parameters, f"{self._name} accepts only a single type." + ) + return typing._GenericAlias(self, (item,)) + + ReadOnly = _ReadOnlyForm( + "ReadOnly", + doc="""A special typing construct to mark a key of a TypedDict as read-only. + + For example: + + class Movie(TypedDict): + title: ReadOnly[str] + year: int + + def mutate_movie(m: Movie) -> None: + m["year"] = 1992 # allowed + m["title"] = "The Matrix" # typechecker error + + There is no runtime checking for this propery. + """, + ) + + +_UNPACK_DOC = """\ +Type unpack operator. + +The type unpack operator takes the child types from some container type, +such as `tuple[int, str]` or a `TypeVarTuple`, and 'pulls them out'. For +example: + + # For some generic class `Foo`: + Foo[Unpack[tuple[int, str]]] # Equivalent to Foo[int, str] + + Ts = TypeVarTuple('Ts') + # Specifies that `Bar` is generic in an arbitrary number of types. + # (Think of `Ts` as a tuple of an arbitrary number of individual + # `TypeVar`s, which the `Unpack` is 'pulling out' directly into the + # `Generic[]`.) + class Bar(Generic[Unpack[Ts]]): ... + Bar[int] # Valid + Bar[int, str] # Also valid + +From Python 3.11, this can also be done using the `*` operator: + + Foo[*tuple[int, str]] + class Bar(Generic[*Ts]): ... + +The operator can also be used along with a `TypedDict` to annotate +`**kwargs` in a function signature. For instance: + + class Movie(TypedDict): + name: str + year: int + + # This function expects two keyword arguments - *name* of type `str` and + # *year* of type `int`. + def foo(**kwargs: Unpack[Movie]): ... + +Note that there is only some runtime checking of this operator. Not +everything the runtime allows may be accepted by static type checkers. + +For more information, see PEP 646 and PEP 692. +""" + + +if sys.version_info >= (3, 12): # PEP 692 changed the repr of Unpack[] + Unpack = typing.Unpack + + def _is_unpack(obj): + return get_origin(obj) is Unpack + +elif sys.version_info[:2] >= (3, 9): # 3.9+ + + class _UnpackSpecialForm(_ExtensionsSpecialForm, _root=True): + def __init__(self, getitem): + super().__init__(getitem) + self.__doc__ = _UNPACK_DOC + + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + @property + def __typing_unpacked_tuple_args__(self): + assert self.__origin__ is Unpack + assert len(self.__args__) == 1 + (arg,) = self.__args__ + if isinstance(arg, (typing._GenericAlias, _types.GenericAlias)): + if arg.__origin__ is not tuple: + raise TypeError("Unpack[...] must be used with a tuple type") + return arg.__args__ + return None + + @_UnpackSpecialForm + def Unpack(self, parameters): + item = typing._type_check( + parameters, f"{self._name} accepts only a single type." + ) + return _UnpackAlias(self, (item,)) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + +else: # 3.8 + + class _UnpackAlias(typing._GenericAlias, _root=True): + __class__ = typing.TypeVar + + class _UnpackForm(_ExtensionsSpecialForm, _root=True): + def __getitem__(self, parameters): + item = typing._type_check( + parameters, f"{self._name} accepts only a single type." + ) + return _UnpackAlias(self, (item,)) + + Unpack = _UnpackForm("Unpack", doc=_UNPACK_DOC) + + def _is_unpack(obj): + return isinstance(obj, _UnpackAlias) + + +if _PEP_696_IMPLEMENTED: + from typing import TypeVarTuple + +elif hasattr(typing, "TypeVarTuple"): # 3.11+ + + def _unpack_args(*args): + newargs = [] + for arg in args: + subargs = getattr(arg, "__typing_unpacked_tuple_args__", None) + if subargs is not None and not (subargs and subargs[-1] is ...): + newargs.extend(subargs) + else: + newargs.append(arg) + return newargs + + # Add default parameter - PEP 696 + class TypeVarTuple(metaclass=_TypeVarLikeMeta): + """Type variable tuple.""" + + _backported_typevarlike = typing.TypeVarTuple + + def __new__(cls, name, *, default=NoDefault): + tvt = typing.TypeVarTuple(name) + _set_default(tvt, default) + _set_module(tvt) + + def _typevartuple_prepare_subst(alias, args): + params = alias.__parameters__ + typevartuple_index = params.index(tvt) + for param in params[typevartuple_index + 1 :]: + if isinstance(param, TypeVarTuple): + raise TypeError( + f"More than one TypeVarTuple parameter in {alias}" + ) + + alen = len(args) + plen = len(params) + left = typevartuple_index + right = plen - typevartuple_index - 1 + var_tuple_index = None + fillarg = None + for k, arg in enumerate(args): + if not isinstance(arg, type): + subargs = getattr(arg, "__typing_unpacked_tuple_args__", None) + if subargs and len(subargs) == 2 and subargs[-1] is ...: + if var_tuple_index is not None: + raise TypeError( + "More than one unpacked " + "arbitrary-length tuple argument" + ) + var_tuple_index = k + fillarg = subargs[0] + if var_tuple_index is not None: + left = min(left, var_tuple_index) + right = min(right, alen - var_tuple_index - 1) + elif left + right > alen: + raise TypeError( + f"Too few arguments for {alias};" + f" actual {alen}, expected at least {plen - 1}" + ) + if left == alen - right and tvt.has_default(): + replacement = _unpack_args(tvt.__default__) + else: + replacement = args[left : alen - right] + + return ( + *args[:left], + *([fillarg] * (typevartuple_index - left)), + replacement, + *([fillarg] * (plen - right - left - typevartuple_index - 1)), + *args[alen - right :], + ) + + tvt.__typing_prepare_subst__ = _typevartuple_prepare_subst + return tvt + + def __init_subclass__(self, *args, **kwds): + raise TypeError("Cannot subclass special typing classes") + +else: # <=3.10 + + class TypeVarTuple(_DefaultMixin): + """Type variable tuple. + + Usage:: + + Ts = TypeVarTuple('Ts') + + In the same way that a normal type variable is a stand-in for a single + type such as ``int``, a type variable *tuple* is a stand-in for a *tuple* + type such as ``Tuple[int, str]``. + + Type variable tuples can be used in ``Generic`` declarations. + Consider the following example:: + + class Array(Generic[*Ts]): ... + + The ``Ts`` type variable tuple here behaves like ``tuple[T1, T2]``, + where ``T1`` and ``T2`` are type variables. To use these type variables + as type parameters of ``Array``, we must *unpack* the type variable tuple using + the star operator: ``*Ts``. The signature of ``Array`` then behaves + as if we had simply written ``class Array(Generic[T1, T2]): ...``. + In contrast to ``Generic[T1, T2]``, however, ``Generic[*Shape]`` allows + us to parameterise the class with an *arbitrary* number of type parameters. + + Type variable tuples can be used anywhere a normal ``TypeVar`` can. + This includes class definitions, as shown above, as well as function + signatures and variable annotations:: + + class Array(Generic[*Ts]): + + def __init__(self, shape: Tuple[*Ts]): + self._shape: Tuple[*Ts] = shape + + def get_shape(self) -> Tuple[*Ts]: + return self._shape + + shape = (Height(480), Width(640)) + x: Array[Height, Width] = Array(shape) + y = abs(x) # Inferred type is Array[Height, Width] + z = x + x # ... is Array[Height, Width] + x.get_shape() # ... is tuple[Height, Width] + + """ + + # Trick Generic __parameters__. + __class__ = typing.TypeVar + + def __iter__(self): + yield self.__unpacked__ + + def __init__(self, name, *, default=NoDefault): + self.__name__ = name + _DefaultMixin.__init__(self, default) + + # for pickling: + def_mod = _caller() + if def_mod != "typing_extensions": + self.__module__ = def_mod + + self.__unpacked__ = Unpack[self] + + def __repr__(self): + return self.__name__ + + def __hash__(self): + return object.__hash__(self) + + def __eq__(self, other): + return self is other + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(self, *args, **kwds): + if "_root" not in kwds: + raise TypeError("Cannot subclass special typing classes") + + +if hasattr(typing, "reveal_type"): # 3.11+ + reveal_type = typing.reveal_type +else: # <=3.10 + + def reveal_type(obj: T, /) -> T: + """Reveal the inferred type of a variable. + + When a static type checker encounters a call to ``reveal_type()``, + it will emit the inferred type of the argument:: + + x: int = 1 + reveal_type(x) + + Running a static type checker (e.g., ``mypy``) on this example + will produce output similar to 'Revealed type is "builtins.int"'. + + At runtime, the function prints the runtime type of the + argument and returns it unchanged. + + """ + # print(f"Runtime type is {type(obj).__name__!r}", file=sys.stderr) + return obj + + +if hasattr(typing, "_ASSERT_NEVER_REPR_MAX_LENGTH"): # 3.11+ + _ASSERT_NEVER_REPR_MAX_LENGTH = typing._ASSERT_NEVER_REPR_MAX_LENGTH +else: # <=3.10 + _ASSERT_NEVER_REPR_MAX_LENGTH = 100 + + +if hasattr(typing, "assert_never"): # 3.11+ + assert_never = typing.assert_never +else: # <=3.10 + + def assert_never(arg: Never, /) -> Never: + """Assert to the type checker that a line of code is unreachable. + + Example:: + + def int_or_str(arg: int | str) -> None: + match arg: + case int(): + print("It's an int") + case str(): + print("It's a str") + case _: + assert_never(arg) + + If a type checker finds that a call to assert_never() is + reachable, it will emit an error. + + At runtime, this throws an exception when called. + + """ + value = repr(arg) + if len(value) > _ASSERT_NEVER_REPR_MAX_LENGTH: + value = value[:_ASSERT_NEVER_REPR_MAX_LENGTH] + "..." + raise AssertionError(f"Expected code to be unreachable, but got: {value}") + + +if sys.version_info >= (3, 12): # 3.12+ + # dataclass_transform exists in 3.11 but lacks the frozen_default parameter + dataclass_transform = typing.dataclass_transform +else: # <=3.11 + + def dataclass_transform( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + frozen_default: bool = False, + field_specifiers: typing.Tuple[ + typing.Union[typing.Type[typing.Any], typing.Callable[..., typing.Any]], ... + ] = (), + **kwargs: typing.Any, + ) -> typing.Callable[[T], T]: + """Decorator that marks a function, class, or metaclass as providing + dataclass-like behavior. + + Example: + + from typing_extensions import dataclass_transform + + _T = TypeVar("_T") + + # Used on a decorator function + @dataclass_transform() + def create_model(cls: type[_T]) -> type[_T]: + ... + return cls + + @create_model + class CustomerModel: + id: int + name: str + + # Used on a base class + @dataclass_transform() + class ModelBase: ... + + class CustomerModel(ModelBase): + id: int + name: str + + # Used on a metaclass + @dataclass_transform() + class ModelMeta(type): ... + + class ModelBase(metaclass=ModelMeta): ... + + class CustomerModel(ModelBase): + id: int + name: str + + Each of the ``CustomerModel`` classes defined in this example will now + behave similarly to a dataclass created with the ``@dataclasses.dataclass`` + decorator. For example, the type checker will synthesize an ``__init__`` + method. + + The arguments to this decorator can be used to customize this behavior: + - ``eq_default`` indicates whether the ``eq`` parameter is assumed to be + True or False if it is omitted by the caller. + - ``order_default`` indicates whether the ``order`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``kw_only_default`` indicates whether the ``kw_only`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``frozen_default`` indicates whether the ``frozen`` parameter is + assumed to be True or False if it is omitted by the caller. + - ``field_specifiers`` specifies a static list of supported classes + or functions that describe fields, similar to ``dataclasses.field()``. + + At runtime, this decorator records its arguments in the + ``__dataclass_transform__`` attribute on the decorated object. + + See PEP 681 for details. + + """ + + def decorator(cls_or_fn): + cls_or_fn.__dataclass_transform__ = { + "eq_default": eq_default, + "order_default": order_default, + "kw_only_default": kw_only_default, + "frozen_default": frozen_default, + "field_specifiers": field_specifiers, + "kwargs": kwargs, + } + return cls_or_fn + + return decorator + + +if hasattr(typing, "override"): # 3.12+ + override = typing.override +else: # <=3.11 + _F = typing.TypeVar("_F", bound=typing.Callable[..., typing.Any]) + + def override(arg: _F, /) -> _F: + """Indicate that a method is intended to override a method in a base class. + + Usage: + + class Base: + def method(self) -> None: + pass + + class Child(Base): + @override + def method(self) -> None: + super().method() + + When this decorator is applied to a method, the type checker will + validate that it overrides a method with the same name on a base class. + This helps prevent bugs that may occur when a base class is changed + without an equivalent change to a child class. + + There is no runtime checking of these properties. The decorator + sets the ``__override__`` attribute to ``True`` on the decorated object + to allow runtime introspection. + + See PEP 698 for details. + + """ + try: + arg.__override__ = True + except (AttributeError, TypeError): + # Skip the attribute silently if it is not writable. + # AttributeError happens if the object has __slots__ or a + # read-only property, TypeError if it's a builtin class. + pass + return arg + + +if hasattr(warnings, "deprecated"): + deprecated = warnings.deprecated +else: + _T = typing.TypeVar("_T") + + class deprecated: + """Indicate that a class, function or overload is deprecated. + + When this decorator is applied to an object, the type checker + will generate a diagnostic on usage of the deprecated object. + + Usage: + + @deprecated("Use B instead") + class A: + pass + + @deprecated("Use g instead") + def f(): + pass + + @overload + @deprecated("int support is deprecated") + def g(x: int) -> int: ... + @overload + def g(x: str) -> int: ... + + The warning specified by *category* will be emitted at runtime + on use of deprecated objects. For functions, that happens on calls; + for classes, on instantiation and on creation of subclasses. + If the *category* is ``None``, no warning is emitted at runtime. + The *stacklevel* determines where the + warning is emitted. If it is ``1`` (the default), the warning + is emitted at the direct caller of the deprecated object; if it + is higher, it is emitted further up the stack. + Static type checker behavior is not affected by the *category* + and *stacklevel* arguments. + + The deprecation message passed to the decorator is saved in the + ``__deprecated__`` attribute on the decorated object. + If applied to an overload, the decorator + must be after the ``@overload`` decorator for the attribute to + exist on the overload as returned by ``get_overloads()``. + + See PEP 702 for details. + + """ + + def __init__( + self, + message: str, + /, + *, + category: typing.Optional[typing.Type[Warning]] = DeprecationWarning, + stacklevel: int = 1, + ) -> None: + if not isinstance(message, str): + raise TypeError( + "Expected an object of type str for 'message', not " + f"{type(message).__name__!r}" + ) + self.message = message + self.category = category + self.stacklevel = stacklevel + + def __call__(self, arg: _T, /) -> _T: + # Make sure the inner functions created below don't + # retain a reference to self. + msg = self.message + category = self.category + stacklevel = self.stacklevel + if category is None: + arg.__deprecated__ = msg + return arg + elif isinstance(arg, type): + import functools + from types import MethodType + + original_new = arg.__new__ + + @functools.wraps(original_new) + def __new__(cls, *args, **kwargs): + if cls is arg: + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + if original_new is not object.__new__: + return original_new(cls, *args, **kwargs) + # Mirrors a similar check in object.__new__. + elif cls.__init__ is object.__init__ and (args or kwargs): + raise TypeError(f"{cls.__name__}() takes no arguments") + else: + return original_new(cls) + + arg.__new__ = staticmethod(__new__) + + original_init_subclass = arg.__init_subclass__ + # We need slightly different behavior if __init_subclass__ + # is a bound method (likely if it was implemented in Python) + if isinstance(original_init_subclass, MethodType): + original_init_subclass = original_init_subclass.__func__ + + @functools.wraps(original_init_subclass) + def __init_subclass__(*args, **kwargs): + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + return original_init_subclass(*args, **kwargs) + + arg.__init_subclass__ = classmethod(__init_subclass__) + # Or otherwise, which likely means it's a builtin such as + # object's implementation of __init_subclass__. + else: + + @functools.wraps(original_init_subclass) + def __init_subclass__(*args, **kwargs): + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + return original_init_subclass(*args, **kwargs) + + arg.__init_subclass__ = __init_subclass__ + + arg.__deprecated__ = __new__.__deprecated__ = msg + __init_subclass__.__deprecated__ = msg + return arg + elif callable(arg): + import functools + + @functools.wraps(arg) + def wrapper(*args, **kwargs): + warnings.warn(msg, category=category, stacklevel=stacklevel + 1) + return arg(*args, **kwargs) + + arg.__deprecated__ = wrapper.__deprecated__ = msg + return wrapper + else: + raise TypeError( + "@deprecated decorator with non-None category must be applied to " + f"a class or callable, not {arg!r}" + ) + + +# We have to do some monkey patching to deal with the dual nature of +# Unpack/TypeVarTuple: +# - We want Unpack to be a kind of TypeVar so it gets accepted in +# Generic[Unpack[Ts]] +# - We want it to *not* be treated as a TypeVar for the purposes of +# counting generic parameters, so that when we subscript a generic, +# the runtime doesn't try to substitute the Unpack with the subscripted type. +if not hasattr(typing, "TypeVarTuple"): + + def _check_generic(cls, parameters, elen=_marker): + """Check correct count for parameters of a generic cls (internal helper). + + This gives a nice error message in case of count mismatch. + """ + if not elen: + raise TypeError(f"{cls} is not a generic class") + if elen is _marker: + if not hasattr(cls, "__parameters__") or not cls.__parameters__: + raise TypeError(f"{cls} is not a generic class") + elen = len(cls.__parameters__) + alen = len(parameters) + if alen != elen: + expect_val = elen + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + num_tv_tuples = sum(isinstance(p, TypeVarTuple) for p in parameters) + if (num_tv_tuples > 0) and (alen >= elen - num_tv_tuples): + return + + # deal with TypeVarLike defaults + # required TypeVarLikes cannot appear after a defaulted one. + if alen < elen: + # since we validate TypeVarLike default in _collect_type_vars + # or _collect_parameters we can safely check parameters[alen] + if ( + getattr(parameters[alen], "__default__", NoDefault) + is not NoDefault + ): + return + + num_default_tv = sum( + getattr(p, "__default__", NoDefault) is not NoDefault + for p in parameters + ) + + elen -= num_default_tv + + expect_val = f"at least {elen}" + + things = "arguments" if sys.version_info >= (3, 10) else "parameters" + raise TypeError( + f"Too {'many' if alen > elen else 'few'} {things}" + f" for {cls}; actual {alen}, expected {expect_val}" + ) + +else: + # Python 3.11+ + + def _check_generic(cls, parameters, elen): + """Check correct count for parameters of a generic cls (internal helper). + + This gives a nice error message in case of count mismatch. + """ + if not elen: + raise TypeError(f"{cls} is not a generic class") + alen = len(parameters) + if alen != elen: + expect_val = elen + if hasattr(cls, "__parameters__"): + parameters = [p for p in cls.__parameters__ if not _is_unpack(p)] + + # deal with TypeVarLike defaults + # required TypeVarLikes cannot appear after a defaulted one. + if alen < elen: + # since we validate TypeVarLike default in _collect_type_vars + # or _collect_parameters we can safely check parameters[alen] + if ( + getattr(parameters[alen], "__default__", NoDefault) + is not NoDefault + ): + return + + num_default_tv = sum( + getattr(p, "__default__", NoDefault) is not NoDefault + for p in parameters + ) + + elen -= num_default_tv + + expect_val = f"at least {elen}" + + raise TypeError( + f"Too {'many' if alen > elen else 'few'} arguments" + f" for {cls}; actual {alen}, expected {expect_val}" + ) + + +if not _PEP_696_IMPLEMENTED: + typing._check_generic = _check_generic + + +def _has_generic_or_protocol_as_origin() -> bool: + try: + frame = sys._getframe(2) + # - Catch AttributeError: not all Python implementations have sys._getframe() + # - Catch ValueError: maybe we're called from an unexpected module + # and the call stack isn't deep enough + except (AttributeError, ValueError): + return False # err on the side of leniency + else: + # If we somehow get invoked from outside typing.py, + # also err on the side of leniency + if frame.f_globals.get("__name__") != "typing": + return False + origin = frame.f_locals.get("origin") + # Cannot use "in" because origin may be an object with a buggy __eq__ that + # throws an error. + return ( + origin is typing.Generic or origin is Protocol or origin is typing.Protocol + ) + + +_TYPEVARTUPLE_TYPES = {TypeVarTuple, getattr(typing, "TypeVarTuple", None)} + + +def _is_unpacked_typevartuple(x) -> bool: + if get_origin(x) is not Unpack: + return False + args = get_args(x) + return bool(args) and len(args) == 1 and type(args[0]) in _TYPEVARTUPLE_TYPES + + +# Python 3.11+ _collect_type_vars was renamed to _collect_parameters +if hasattr(typing, "_collect_type_vars"): + + def _collect_type_vars(types, typevar_types=None): + """Collect all type variable contained in types in order of + first appearance (lexicographic order). For example:: + + _collect_type_vars((T, List[S, T])) == (T, S) + """ + if typevar_types is None: + typevar_types = typing.TypeVar + tvars = [] + + # A required TypeVarLike cannot appear after a TypeVarLike with a default + # if it was a direct call to `Generic[]` or `Protocol[]` + enforce_default_ordering = _has_generic_or_protocol_as_origin() + default_encountered = False + + # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple + type_var_tuple_encountered = False + + for t in types: + if _is_unpacked_typevartuple(t): + type_var_tuple_encountered = True + elif isinstance(t, typevar_types) and t not in tvars: + if enforce_default_ordering: + has_default = getattr(t, "__default__", NoDefault) is not NoDefault + if has_default: + if type_var_tuple_encountered: + raise TypeError( + "Type parameter with a default follows TypeVarTuple" + ) + default_encountered = True + elif default_encountered: + raise TypeError( + f"Type parameter {t!r} without a default" + " follows type parameter with a default" + ) + + tvars.append(t) + if _should_collect_from_parameters(t): + tvars.extend([t for t in t.__parameters__ if t not in tvars]) + return tuple(tvars) + + typing._collect_type_vars = _collect_type_vars +else: + + def _collect_parameters(args): + """Collect all type variables and parameter specifications in args + in order of first appearance (lexicographic order). + + For example:: + + assert _collect_parameters((T, Callable[P, T])) == (T, P) + """ + parameters = [] + + # A required TypeVarLike cannot appear after a TypeVarLike with default + # if it was a direct call to `Generic[]` or `Protocol[]` + enforce_default_ordering = _has_generic_or_protocol_as_origin() + default_encountered = False + + # Also, a TypeVarLike with a default cannot appear after a TypeVarTuple + type_var_tuple_encountered = False + + for t in args: + if isinstance(t, type): + # We don't want __parameters__ descriptor of a bare Python class. + pass + elif isinstance(t, tuple): + # `t` might be a tuple, when `ParamSpec` is substituted with + # `[T, int]`, or `[int, *Ts]`, etc. + for x in t: + for collected in _collect_parameters([x]): + if collected not in parameters: + parameters.append(collected) + elif hasattr(t, "__typing_subst__"): + if t not in parameters: + if enforce_default_ordering: + has_default = ( + getattr(t, "__default__", NoDefault) is not NoDefault + ) + + if type_var_tuple_encountered and has_default: + raise TypeError( + "Type parameter with a default follows TypeVarTuple" + ) + + if has_default: + default_encountered = True + elif default_encountered: + raise TypeError( + f"Type parameter {t!r} without a default" + " follows type parameter with a default" + ) + + parameters.append(t) + else: + if _is_unpacked_typevartuple(t): + type_var_tuple_encountered = True + for x in getattr(t, "__parameters__", ()): + if x not in parameters: + parameters.append(x) + + return tuple(parameters) + + if not _PEP_696_IMPLEMENTED: + typing._collect_parameters = _collect_parameters + +# Backport typing.NamedTuple as it exists in Python 3.13. +# In 3.11, the ability to define generic `NamedTuple`s was supported. +# This was explicitly disallowed in 3.9-3.10, and only half-worked in <=3.8. +# On 3.12, we added __orig_bases__ to call-based NamedTuples +# On 3.13, we deprecated kwargs-based NamedTuples +if sys.version_info >= (3, 13): + NamedTuple = typing.NamedTuple +else: + + def _make_nmtuple(name, types, module, defaults=()): + fields = [n for n, t in types] + annotations = { + n: typing._type_check(t, f"field {n} annotation must be a type") + for n, t in types + } + nm_tpl = collections.namedtuple(name, fields, defaults=defaults, module=module) + nm_tpl.__annotations__ = nm_tpl.__new__.__annotations__ = annotations + # The `_field_types` attribute was removed in 3.9; + # in earlier versions, it is the same as the `__annotations__` attribute + if sys.version_info < (3, 9): + nm_tpl._field_types = annotations + return nm_tpl + + _prohibited_namedtuple_fields = typing._prohibited + _special_namedtuple_fields = frozenset( + {"__module__", "__name__", "__annotations__"} + ) + + class _NamedTupleMeta(type): + def __new__(cls, typename, bases, ns): + assert _NamedTuple in bases + for base in bases: + if base is not _NamedTuple and base is not typing.Generic: + raise TypeError( + "can only inherit from a NamedTuple type and Generic" + ) + bases = tuple(tuple if base is _NamedTuple else base for base in bases) + if "__annotations__" in ns: + types = ns["__annotations__"] + elif "__annotate__" in ns: + # TODO: Use inspect.VALUE here, and make the annotations lazily evaluated + types = ns["__annotate__"](1) + else: + types = {} + default_names = [] + for field_name in types: + if field_name in ns: + default_names.append(field_name) + elif default_names: + raise TypeError( + f"Non-default namedtuple field {field_name} " + "cannot follow default field" + f"{'s' if len(default_names) > 1 else ''} " + f"{', '.join(default_names)}" + ) + nm_tpl = _make_nmtuple( + typename, + list(types.items()), + defaults=[ns[n] for n in default_names], + module=ns["__module__"], + ) + nm_tpl.__bases__ = bases + if typing.Generic in bases: + if hasattr(typing, "_generic_class_getitem"): # 3.12+ + nm_tpl.__class_getitem__ = classmethod( + typing._generic_class_getitem + ) + else: + class_getitem = typing.Generic.__class_getitem__.__func__ + nm_tpl.__class_getitem__ = classmethod(class_getitem) + # update from user namespace without overriding special namedtuple attributes + for key, val in list(ns.items()): + if key in _prohibited_namedtuple_fields: + raise AttributeError("Cannot overwrite NamedTuple attribute " + key) + elif key not in _special_namedtuple_fields: + if key not in nm_tpl._fields: + setattr(nm_tpl, key, ns[key]) + try: + set_name = type(val).__set_name__ + except AttributeError: + pass + else: + try: + set_name(val, nm_tpl, key) + except BaseException as e: + msg = ( + f"Error calling __set_name__ on {type(val).__name__!r} " + f"instance {key!r} in {typename!r}" + ) + # BaseException.add_note() existed on py311, + # but the __set_name__ machinery didn't start + # using add_note() until py312. + # Making sure exceptions are raised in the same way + # as in "normal" classes seems most important here. + if sys.version_info >= (3, 12): + e.add_note(msg) + raise + else: + raise RuntimeError(msg) from e + + if typing.Generic in bases: + nm_tpl.__init_subclass__() + return nm_tpl + + _NamedTuple = type.__new__(_NamedTupleMeta, "NamedTuple", (), {}) + + def _namedtuple_mro_entries(bases): + assert NamedTuple in bases + return (_NamedTuple,) + + @_ensure_subclassable(_namedtuple_mro_entries) + def NamedTuple(typename, fields=_marker, /, **kwargs): + """Typed version of namedtuple. + + Usage:: + + class Employee(NamedTuple): + name: str + id: int + + This is equivalent to:: + + Employee = collections.namedtuple('Employee', ['name', 'id']) + + The resulting class has an extra __annotations__ attribute, giving a + dict that maps field names to types. (The field names are also in + the _fields attribute, which is part of the namedtuple API.) + An alternative equivalent functional syntax is also accepted:: + + Employee = NamedTuple('Employee', [('name', str), ('id', int)]) + """ + if fields is _marker: + if kwargs: + deprecated_thing = "Creating NamedTuple classes using keyword arguments" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "Use the class-based or functional syntax instead." + ) + else: + deprecated_thing = "Failing to pass a value for the 'fields' parameter" + example = f"`{typename} = NamedTuple({typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + + example + + "." + ) + elif fields is None: + if kwargs: + raise TypeError( + "Cannot pass `None` as the 'fields' parameter " + "and also specify fields using keyword arguments" + ) + else: + deprecated_thing = "Passing `None` as the 'fields' parameter" + example = f"`{typename} = NamedTuple({typename!r}, [])`" + deprecation_msg = ( + "{name} is deprecated and will be disallowed in Python {remove}. " + "To create a NamedTuple class with 0 fields " + "using the functional syntax, " + "pass an empty list, e.g. " + + example + + "." + ) + elif kwargs: + raise TypeError( + "Either list of fields or keywords" + " can be provided to NamedTuple, not both" + ) + if fields is _marker or fields is None: + warnings.warn( + deprecation_msg.format(name=deprecated_thing, remove="3.15"), + DeprecationWarning, + stacklevel=2, + ) + fields = list(kwargs.items()) + nt = _make_nmtuple(typename, fields, module=_caller()) + nt.__orig_bases__ = (NamedTuple,) + return nt + + +if hasattr(collections.abc, "Buffer"): + Buffer = collections.abc.Buffer +else: + + class Buffer(abc.ABC): # noqa: B024 + """Base class for classes that implement the buffer protocol. + + The buffer protocol allows Python objects to expose a low-level + memory buffer interface. Before Python 3.12, it is not possible + to implement the buffer protocol in pure Python code, or even + to check whether a class implements the buffer protocol. In + Python 3.12 and higher, the ``__buffer__`` method allows access + to the buffer protocol from Python code, and the + ``collections.abc.Buffer`` ABC allows checking whether a class + implements the buffer protocol. + + To indicate support for the buffer protocol in earlier versions, + inherit from this ABC, either in a stub file or at runtime, + or use ABC registration. This ABC provides no methods, because + there is no Python-accessible methods shared by pre-3.12 buffer + classes. It is useful primarily for static checks. + + """ + + # As a courtesy, register the most common stdlib buffer classes. + Buffer.register(memoryview) + Buffer.register(bytearray) + Buffer.register(bytes) + + +# Backport of types.get_original_bases, available on 3.12+ in CPython +if hasattr(_types, "get_original_bases"): + get_original_bases = _types.get_original_bases +else: + + def get_original_bases(cls, /): + """Return the class's "original" bases prior to modification by `__mro_entries__`. + + Examples:: + + from typing import TypeVar, Generic + from typing_extensions import NamedTuple, TypedDict + + T = TypeVar("T") + class Foo(Generic[T]): ... + class Bar(Foo[int], float): ... + class Baz(list[str]): ... + Eggs = NamedTuple("Eggs", [("a", int), ("b", str)]) + Spam = TypedDict("Spam", {"a": int, "b": str}) + + assert get_original_bases(Bar) == (Foo[int], float) + assert get_original_bases(Baz) == (list[str],) + assert get_original_bases(Eggs) == (NamedTuple,) + assert get_original_bases(Spam) == (TypedDict,) + assert get_original_bases(int) == (object,) + """ + try: + return cls.__dict__.get("__orig_bases__", cls.__bases__) + except AttributeError: + raise TypeError( + f"Expected an instance of type, not {type(cls).__name__!r}" + ) from None + + +# NewType is a class on Python 3.10+, making it pickleable +# The error message for subclassing instances of NewType was improved on 3.11+ +if sys.version_info >= (3, 11): + NewType = typing.NewType +else: + + class NewType: + """NewType creates simple unique types with almost zero + runtime overhead. NewType(name, tp) is considered a subtype of tp + by static type checkers. At runtime, NewType(name, tp) returns + a dummy callable that simply returns its argument. Usage:: + UserId = NewType('UserId', int) + def name_by_id(user_id: UserId) -> str: + ... + UserId('user') # Fails type check + name_by_id(42) # Fails type check + name_by_id(UserId(42)) # OK + num = UserId(5) + 1 # type: int + """ + + def __call__(self, obj, /): + return obj + + def __init__(self, name, tp): + self.__qualname__ = name + if "." in name: + name = name.rpartition(".")[-1] + self.__name__ = name + self.__supertype__ = tp + def_mod = _caller() + if def_mod != "typing_extensions": + self.__module__ = def_mod + + def __mro_entries__(self, bases): + # We defined __mro_entries__ to get a better error message + # if a user attempts to subclass a NewType instance. bpo-46170 + supercls_name = self.__name__ + + class Dummy: + def __init_subclass__(cls): + subcls_name = cls.__name__ + raise TypeError( + "Cannot subclass an instance of NewType. " + "Perhaps you were looking for: " + f"`{subcls_name} = NewType({subcls_name!r}, {supercls_name})`" + ) + + return (Dummy,) + + def __repr__(self): + return f"{self.__module__}.{self.__qualname__}" + + def __reduce__(self): + return self.__qualname__ + + if sys.version_info >= (3, 10): + # PEP 604 methods + # It doesn't make sense to have these methods on Python <3.10 + + def __or__(self, other): + return typing.Union[self, other] + + def __ror__(self, other): + return typing.Union[other, self] + + +if hasattr(typing, "TypeAliasType"): + TypeAliasType = typing.TypeAliasType +else: + + def _is_unionable(obj): + """Corresponds to is_unionable() in unionobject.c in CPython.""" + return obj is None or isinstance( + obj, (type, _types.GenericAlias, _types.UnionType, TypeAliasType) + ) + + class TypeAliasType: + """Create named, parameterized type aliases. + + This provides a backport of the new `type` statement in Python 3.12: + + type ListOrSet[T] = list[T] | set[T] + + is equivalent to: + + T = TypeVar("T") + ListOrSet = TypeAliasType("ListOrSet", list[T] | set[T], type_params=(T,)) + + The name ListOrSet can then be used as an alias for the type it refers to. + + The type_params argument should contain all the type parameters used + in the value of the type alias. If the alias is not generic, this + argument is omitted. + + Static type checkers should only support type aliases declared using + TypeAliasType that follow these rules: + + - The first argument (the name) must be a string literal. + - The TypeAliasType instance must be immediately assigned to a variable + of the same name. (For example, 'X = TypeAliasType("Y", int)' is invalid, + as is 'X, Y = TypeAliasType("X", int), TypeAliasType("Y", int)'). + + """ + + def __init__(self, name: str, value, *, type_params=()): + if not isinstance(name, str): + raise TypeError("TypeAliasType name must be a string") + self.__value__ = value + self.__type_params__ = type_params + + parameters = [] + for type_param in type_params: + if isinstance(type_param, TypeVarTuple): + parameters.extend(type_param) + else: + parameters.append(type_param) + self.__parameters__ = tuple(parameters) + def_mod = _caller() + if def_mod != "typing_extensions": + self.__module__ = def_mod + # Setting this attribute closes the TypeAliasType from further modification + self.__name__ = name + + def __setattr__(self, name: str, value: object, /) -> None: + if hasattr(self, "__name__"): + self._raise_attribute_error(name) + super().__setattr__(name, value) + + def __delattr__(self, name: str, /) -> Never: + self._raise_attribute_error(name) + + def _raise_attribute_error(self, name: str) -> Never: + # Match the Python 3.12 error messages exactly + if name == "__name__": + raise AttributeError("readonly attribute") + elif name in { + "__value__", + "__type_params__", + "__parameters__", + "__module__", + }: + raise AttributeError( + f"attribute '{name}' of 'typing.TypeAliasType' objects " + "is not writable" + ) + else: + raise AttributeError( + f"'typing.TypeAliasType' object has no attribute '{name}'" + ) + + def __repr__(self) -> str: + return self.__name__ + + def __getitem__(self, parameters): + if not isinstance(parameters, tuple): + parameters = (parameters,) + parameters = [ + typing._type_check( + item, f"Subscripting {self.__name__} requires a type." + ) + for item in parameters + ] + return typing._GenericAlias(self, tuple(parameters)) + + def __reduce__(self): + return self.__name__ + + def __init_subclass__(cls, *args, **kwargs): + raise TypeError( + "type 'typing_extensions.TypeAliasType' is not an acceptable base type" + ) + + # The presence of this method convinces typing._type_check + # that TypeAliasTypes are types. + def __call__(self): + raise TypeError("Type alias is not callable") + + if sys.version_info >= (3, 10): + + def __or__(self, right): + # For forward compatibility with 3.12, reject Unions + # that are not accepted by the built-in Union. + if not _is_unionable(right): + return NotImplemented + return typing.Union[self, right] + + def __ror__(self, left): + if not _is_unionable(left): + return NotImplemented + return typing.Union[left, self] + + +if hasattr(typing, "is_protocol"): + is_protocol = typing.is_protocol + get_protocol_members = typing.get_protocol_members +else: + + def is_protocol(tp: type, /) -> bool: + """Return True if the given type is a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, is_protocol + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> is_protocol(P) + True + >>> is_protocol(int) + False + """ + return ( + isinstance(tp, type) + and getattr(tp, "_is_protocol", False) + and tp is not Protocol + and tp is not typing.Protocol + ) + + def get_protocol_members(tp: type, /) -> typing.FrozenSet[str]: + """Return the set of members defined in a Protocol. + + Example:: + + >>> from typing_extensions import Protocol, get_protocol_members + >>> class P(Protocol): + ... def a(self) -> str: ... + ... b: int + >>> get_protocol_members(P) + frozenset({'a', 'b'}) + + Raise a TypeError for arguments that are not Protocols. + """ + if not is_protocol(tp): + raise TypeError(f"{tp!r} is not a Protocol") + if hasattr(tp, "__protocol_attrs__"): + return frozenset(tp.__protocol_attrs__) + return frozenset(_get_protocol_attrs(tp)) + + +if hasattr(typing, "Doc"): + Doc = typing.Doc +else: + + class Doc: + """Define the documentation of a type annotation using ``Annotated``, to be + used in class attributes, function and method parameters, return values, + and variables. + + The value should be a positional-only string literal to allow static tools + like editors and documentation generators to use it. + + This complements docstrings. + + The string value passed is available in the attribute ``documentation``. + + Example:: + + >>> from typing_extensions import Annotated, Doc + >>> def hi(to: Annotated[str, Doc("Who to say hi to")]) -> None: ... + """ + + def __init__(self, documentation: str, /) -> None: + self.documentation = documentation + + def __repr__(self) -> str: + return f"Doc({self.documentation!r})" + + def __hash__(self) -> int: + return hash(self.documentation) + + def __eq__(self, other: object) -> bool: + if not isinstance(other, Doc): + return NotImplemented + return self.documentation == other.documentation + + +_CapsuleType = getattr(_types, "CapsuleType", None) + +if _CapsuleType is None: + try: + import _socket + except ImportError: + pass + else: + _CAPI = getattr(_socket, "CAPI", None) + if _CAPI is not None: + _CapsuleType = type(_CAPI) + +if _CapsuleType is not None: + CapsuleType = _CapsuleType + __all__.append("CapsuleType") + + +# Aliases for items that have always been in typing. +# Explicitly assign these (rather than using `from typing import *` at the top), +# so that we get a CI error if one of these is deleted from typing.py +# in a future version of Python +AbstractSet = typing.AbstractSet +AnyStr = typing.AnyStr +BinaryIO = typing.BinaryIO +Callable = typing.Callable +Collection = typing.Collection +Container = typing.Container +Dict = typing.Dict +ForwardRef = typing.ForwardRef +FrozenSet = typing.FrozenSet +Generic = typing.Generic +Hashable = typing.Hashable +IO = typing.IO +ItemsView = typing.ItemsView +Iterable = typing.Iterable +Iterator = typing.Iterator +KeysView = typing.KeysView +List = typing.List +Mapping = typing.Mapping +MappingView = typing.MappingView +Match = typing.Match +MutableMapping = typing.MutableMapping +MutableSequence = typing.MutableSequence +MutableSet = typing.MutableSet +Optional = typing.Optional +Pattern = typing.Pattern +Reversible = typing.Reversible +Sequence = typing.Sequence +Set = typing.Set +Sized = typing.Sized +TextIO = typing.TextIO +Tuple = typing.Tuple +Union = typing.Union +ValuesView = typing.ValuesView +cast = typing.cast +no_type_check = typing.no_type_check +no_type_check_decorator = typing.no_type_check_decorator diff --git a/scripts/pack.sh b/scripts/pack.sh index 81bef62b..fe72be8b 100755 --- a/scripts/pack.sh +++ b/scripts/pack.sh @@ -62,6 +62,9 @@ echo "Generate package - ${version}" && \ ucc-gen --source "${input}" --ta-version "${version}" && \ jq '.' globalConfig.json > globalConfig.new.json && \ mv globalConfig.new.json globalConfig.json && \ +cp -f "lib_files/typing_extensions.py" "$output/TA_dataset/lib/typing_extensions.py" && \ +cp -f "lib_files/socks.py" "$output/TA_dataset/lib/socks.py" && \ +cp -f "lib_files/sockshandler.py" "$output/TA_dataset/lib/sockshandler.py" && \ echo "Construct tarball" && \ slim package "${output}/${input}" -o "${release}" && \ echo "Validate released tarball" && \