2025-12-25 upload
This commit is contained in:
19
venv/Lib/site-packages/mitmproxy/net/dns/classes.py
Normal file
19
venv/Lib/site-packages/mitmproxy/net/dns/classes.py
Normal file
@@ -0,0 +1,19 @@
|
||||
IN = 1
|
||||
CH = 3
|
||||
HS = 4
|
||||
NONE = 254
|
||||
ANY = 255
|
||||
|
||||
_STRINGS = {IN: "IN", CH: "CH", HS: "HS", NONE: "NONE", ANY: "ANY"}
|
||||
_INTS = {v: k for k, v in _STRINGS.items()}
|
||||
|
||||
|
||||
def to_str(class_: int) -> str:
|
||||
return _STRINGS.get(class_, f"CLASS({class_})")
|
||||
|
||||
|
||||
def from_str(class_: str) -> int:
|
||||
try:
|
||||
return _INTS[class_]
|
||||
except KeyError:
|
||||
return int(class_.removeprefix("CLASS(").removesuffix(")"))
|
||||
169
venv/Lib/site-packages/mitmproxy/net/dns/domain_names.py
Normal file
169
venv/Lib/site-packages/mitmproxy/net/dns/domain_names.py
Normal file
@@ -0,0 +1,169 @@
|
||||
import struct
|
||||
from typing import Optional
|
||||
|
||||
from . import types
|
||||
|
||||
_LABEL_SIZE = struct.Struct("!B")
|
||||
_POINTER_OFFSET = struct.Struct("!H")
|
||||
_POINTER_INDICATOR = 0b11000000
|
||||
|
||||
|
||||
Cache = dict[int, Optional[tuple[str, int]]]
|
||||
|
||||
|
||||
def cache() -> Cache:
|
||||
return dict()
|
||||
|
||||
|
||||
def _unpack_label_into(labels: list[str], buffer: bytes, offset: int) -> int:
|
||||
(size,) = _LABEL_SIZE.unpack_from(buffer, offset)
|
||||
if size >= 64:
|
||||
raise struct.error(f"unpack encountered a label of length {size}")
|
||||
elif size == 0:
|
||||
return _LABEL_SIZE.size
|
||||
else:
|
||||
offset += _LABEL_SIZE.size
|
||||
end_label = offset + size
|
||||
if len(buffer) < end_label:
|
||||
raise struct.error(f"unpack requires a label buffer of {size} bytes")
|
||||
try:
|
||||
labels.append(buffer[offset:end_label].decode("idna"))
|
||||
except UnicodeDecodeError:
|
||||
raise struct.error(
|
||||
f"unpack encountered an illegal characters at offset {offset}"
|
||||
)
|
||||
return _LABEL_SIZE.size + size
|
||||
|
||||
|
||||
def unpack_from_with_compression(
|
||||
buffer: bytes, offset: int, cache: Cache
|
||||
) -> tuple[str, int]:
|
||||
if offset in cache:
|
||||
result = cache[offset]
|
||||
if result is None:
|
||||
raise struct.error(f"unpack encountered domain name loop")
|
||||
else:
|
||||
cache[offset] = None # this will indicate that the offset is being unpacked
|
||||
start_offset = offset
|
||||
labels = []
|
||||
while True:
|
||||
(size,) = _LABEL_SIZE.unpack_from(buffer, offset)
|
||||
if size & _POINTER_INDICATOR == _POINTER_INDICATOR:
|
||||
(pointer,) = _POINTER_OFFSET.unpack_from(buffer, offset)
|
||||
offset += _POINTER_OFFSET.size
|
||||
label, _ = unpack_from_with_compression(
|
||||
buffer, pointer & ~(_POINTER_INDICATOR << 8), cache
|
||||
)
|
||||
labels.append(label)
|
||||
break
|
||||
else:
|
||||
offset += _unpack_label_into(labels, buffer, offset)
|
||||
if size == 0:
|
||||
break
|
||||
result = ".".join(labels), (offset - start_offset)
|
||||
cache[start_offset] = result
|
||||
return result
|
||||
|
||||
|
||||
def unpack_from(buffer: bytes, offset: int) -> tuple[str, int]:
|
||||
"""Converts RDATA into a domain name without pointer compression from a given offset and also returns the binary size."""
|
||||
labels: list[str] = []
|
||||
while True:
|
||||
(size,) = _LABEL_SIZE.unpack_from(buffer, offset)
|
||||
if size & _POINTER_INDICATOR == _POINTER_INDICATOR:
|
||||
raise struct.error(
|
||||
f"unpack encountered a pointer which is not supported in RDATA"
|
||||
)
|
||||
else:
|
||||
offset += _unpack_label_into(labels, buffer, offset)
|
||||
if size == 0:
|
||||
break
|
||||
return ".".join(labels), offset
|
||||
|
||||
|
||||
def unpack(buffer: bytes) -> str:
|
||||
"""Converts RDATA into a domain name without pointer compression."""
|
||||
name, length = unpack_from(buffer, 0)
|
||||
if length != len(buffer):
|
||||
raise struct.error(f"unpack requires a buffer of {length} bytes")
|
||||
return name
|
||||
|
||||
|
||||
def pack(name: str) -> bytes:
|
||||
"""Converts a domain name into RDATA without pointer compression."""
|
||||
buffer = bytearray()
|
||||
if len(name) > 0:
|
||||
for part in name.split("."):
|
||||
label = part.encode("idna")
|
||||
size = len(label)
|
||||
if size == 0:
|
||||
raise ValueError(f"domain name '{name}' contains empty labels")
|
||||
if size >= 64: # pragma: no cover
|
||||
# encoding with 'idna' will already have raised an exception earlier
|
||||
raise ValueError(
|
||||
f"encoded label '{part}' of domain name '{name}' is too long ({size} bytes)"
|
||||
)
|
||||
buffer.extend(_LABEL_SIZE.pack(size))
|
||||
buffer.extend(label)
|
||||
buffer.extend(_LABEL_SIZE.pack(0))
|
||||
return bytes(buffer)
|
||||
|
||||
|
||||
def record_data_can_have_compression(record_type: int) -> bool:
|
||||
if record_type in (
|
||||
types.CNAME,
|
||||
types.HINFO,
|
||||
types.MB,
|
||||
types.MD,
|
||||
types.MF,
|
||||
types.MG,
|
||||
types.MINFO,
|
||||
types.MR,
|
||||
types.MX,
|
||||
types.NS,
|
||||
types.PTR,
|
||||
types.SOA,
|
||||
types.TXT,
|
||||
types.RP,
|
||||
types.AFSDB,
|
||||
types.RT,
|
||||
types.SIG,
|
||||
types.PX,
|
||||
types.NXT,
|
||||
types.NAPTR,
|
||||
types.SRV,
|
||||
):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def decompress_from_record_data(
|
||||
buffer: bytes, offset: int, end_data: int, cached_names: Cache
|
||||
) -> bytes:
|
||||
# we decompress compression pointers in RDATA by iterating through each byte and checking
|
||||
# if it has a leading 0b11, if so we try to decompress it and update it in the data variable.
|
||||
data = bytearray(buffer[offset:end_data])
|
||||
data_offset = 0
|
||||
decompress_size = 0
|
||||
while data_offset < end_data - offset:
|
||||
if buffer[offset + data_offset] & _POINTER_INDICATOR == _POINTER_INDICATOR:
|
||||
try:
|
||||
(
|
||||
rr_name,
|
||||
rr_name_len,
|
||||
) = unpack_from_with_compression(
|
||||
buffer, offset + data_offset, cached_names
|
||||
)
|
||||
data[
|
||||
data_offset + decompress_size : data_offset
|
||||
+ decompress_size
|
||||
+ rr_name_len
|
||||
] = pack(rr_name)
|
||||
decompress_size += len(rr_name)
|
||||
data_offset += rr_name_len
|
||||
continue
|
||||
except struct.error:
|
||||
# the byte isn't actually a domain name compression pointer but some other data type
|
||||
pass
|
||||
data_offset += 1
|
||||
return bytes(data)
|
||||
134
venv/Lib/site-packages/mitmproxy/net/dns/https_records.py
Normal file
134
venv/Lib/site-packages/mitmproxy/net/dns/https_records.py
Normal file
@@ -0,0 +1,134 @@
|
||||
import enum
|
||||
import struct
|
||||
from dataclasses import dataclass
|
||||
from typing import Self
|
||||
|
||||
from ...utils import strutils
|
||||
from . import domain_names
|
||||
|
||||
"""
|
||||
HTTPS records are formatted as follows (as per RFC9460):
|
||||
- a 2-octet field for SvcPriority as an integer in network byte order.
|
||||
- the uncompressed, fully qualified TargetName, represented as a sequence of length-prefixed labels per Section 3.1 of [RFC1035].
|
||||
- the SvcParams, consuming the remainder of the record (so smaller than 65535 octets and constrained by the RDATA and DNS message sizes).
|
||||
|
||||
When the list of SvcParams is non-empty, it contains a series of SvcParamKey=SvcParamValue pairs, represented as:
|
||||
- a 2-octet field containing the SvcParamKey as an integer in network byte order. (See Section 14.3.2 for the defined values.)
|
||||
- a 2-octet field containing the length of the SvcParamValue as an integer between 0 and 65535 in network byte order.
|
||||
- an octet string of this length whose contents are the SvcParamValue in a format determined by the SvcParamKey.
|
||||
|
||||
https://datatracker.ietf.org/doc/rfc9460/
|
||||
https://datatracker.ietf.org/doc/rfc1035/
|
||||
"""
|
||||
|
||||
|
||||
class SVCParamKeys(enum.Enum):
|
||||
MANDATORY = 0
|
||||
ALPN = 1
|
||||
NO_DEFAULT_ALPN = 2
|
||||
PORT = 3
|
||||
IPV4HINT = 4
|
||||
ECH = 5
|
||||
IPV6HINT = 6
|
||||
|
||||
|
||||
type HTTPSRecordJSON = dict[str | int, str | int]
|
||||
|
||||
|
||||
@dataclass
|
||||
class HTTPSRecord:
|
||||
priority: int
|
||||
target_name: str
|
||||
params: dict[int, bytes]
|
||||
|
||||
def __repr__(self):
|
||||
return str(self.to_json())
|
||||
|
||||
def to_json(self) -> HTTPSRecordJSON:
|
||||
ret: HTTPSRecordJSON = {
|
||||
"target_name": self.target_name,
|
||||
"priority": self.priority,
|
||||
}
|
||||
typ: str | int
|
||||
for typ, val in self.params.items():
|
||||
try:
|
||||
typ = SVCParamKeys(typ).name.lower()
|
||||
except ValueError:
|
||||
pass
|
||||
ret[typ] = strutils.bytes_to_escaped_str(val)
|
||||
return ret
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, data: HTTPSRecordJSON) -> Self:
|
||||
target_name = data.pop("target_name")
|
||||
assert isinstance(target_name, str)
|
||||
priority = data.pop("priority")
|
||||
assert isinstance(priority, int)
|
||||
params: dict[int, bytes] = {}
|
||||
for k, v in data.items():
|
||||
if isinstance(k, str):
|
||||
k = SVCParamKeys[k.upper()].value
|
||||
assert isinstance(v, str)
|
||||
params[k] = strutils.escaped_str_to_bytes(v)
|
||||
return cls(target_name=target_name, priority=priority, params=params)
|
||||
|
||||
|
||||
def _unpack_params(data: bytes, offset: int) -> dict[int, bytes]:
|
||||
"""Unpacks the service parameters from the given offset."""
|
||||
params = {}
|
||||
while offset < len(data):
|
||||
param_type = struct.unpack("!H", data[offset : offset + 2])[0]
|
||||
offset += 2
|
||||
param_length = struct.unpack("!H", data[offset : offset + 2])[0]
|
||||
offset += 2
|
||||
if offset + param_length > len(data):
|
||||
raise struct.error(
|
||||
"unpack requires a buffer of %i bytes" % (offset + param_length)
|
||||
)
|
||||
param_value = data[offset : offset + param_length]
|
||||
offset += param_length
|
||||
params[param_type] = param_value
|
||||
return params
|
||||
|
||||
|
||||
def unpack(data: bytes) -> HTTPSRecord:
|
||||
"""
|
||||
Unpacks HTTPS RDATA from byte data.
|
||||
|
||||
Raises:
|
||||
struct.error if the record is malformed.
|
||||
"""
|
||||
offset = 0
|
||||
|
||||
# Priority (2 bytes)
|
||||
priority = struct.unpack("!h", data[offset : offset + 2])[0]
|
||||
offset += 2
|
||||
|
||||
# TargetName (variable length)
|
||||
target_name, offset = domain_names.unpack_from(data, offset)
|
||||
|
||||
# Service Parameters (remaining bytes)
|
||||
params = _unpack_params(data, offset)
|
||||
|
||||
return HTTPSRecord(priority=priority, target_name=target_name, params=params)
|
||||
|
||||
|
||||
def _pack_params(params: dict[int, bytes]) -> bytes:
|
||||
"""Converts the service parameters into the raw byte format"""
|
||||
buffer = bytearray()
|
||||
|
||||
for k, v in params.items():
|
||||
buffer.extend(struct.pack("!H", k))
|
||||
buffer.extend(struct.pack("!H", len(v)))
|
||||
buffer.extend(v)
|
||||
|
||||
return bytes(buffer)
|
||||
|
||||
|
||||
def pack(record: HTTPSRecord) -> bytes:
|
||||
"""Packs the HTTPS record into its bytes form."""
|
||||
buffer = bytearray()
|
||||
buffer.extend(struct.pack("!h", record.priority))
|
||||
buffer.extend(domain_names.pack(record.target_name))
|
||||
buffer.extend(_pack_params(record.params))
|
||||
return bytes(buffer)
|
||||
27
venv/Lib/site-packages/mitmproxy/net/dns/op_codes.py
Normal file
27
venv/Lib/site-packages/mitmproxy/net/dns/op_codes.py
Normal file
@@ -0,0 +1,27 @@
|
||||
QUERY = 0
|
||||
IQUERY = 1
|
||||
STATUS = 2
|
||||
NOTIFY = 4
|
||||
UPDATE = 5
|
||||
DSO = 6
|
||||
|
||||
_STRINGS = {
|
||||
QUERY: "QUERY",
|
||||
IQUERY: "IQUERY",
|
||||
STATUS: "STATUS",
|
||||
NOTIFY: "NOTIFY",
|
||||
UPDATE: "UPDATE",
|
||||
DSO: "DSO",
|
||||
}
|
||||
_INTS = {v: k for k, v in _STRINGS.items()}
|
||||
|
||||
|
||||
def to_str(op_code: int) -> str:
|
||||
return _STRINGS.get(op_code, f"OPCODE({op_code})")
|
||||
|
||||
|
||||
def from_str(op_code: str) -> int:
|
||||
try:
|
||||
return _INTS[op_code]
|
||||
except KeyError:
|
||||
return int(op_code.removeprefix("OPCODE(").removesuffix(")"))
|
||||
58
venv/Lib/site-packages/mitmproxy/net/dns/response_codes.py
Normal file
58
venv/Lib/site-packages/mitmproxy/net/dns/response_codes.py
Normal file
@@ -0,0 +1,58 @@
|
||||
NOERROR = 0
|
||||
FORMERR = 1
|
||||
SERVFAIL = 2
|
||||
NXDOMAIN = 3
|
||||
NOTIMP = 4
|
||||
REFUSED = 5
|
||||
YXDOMAIN = 6
|
||||
YXRRSET = 7
|
||||
NXRRSET = 8
|
||||
NOTAUTH = 9
|
||||
NOTZONE = 10
|
||||
DSOTYPENI = 11
|
||||
|
||||
_CODES = {
|
||||
NOERROR: 200,
|
||||
FORMERR: 400,
|
||||
SERVFAIL: 500,
|
||||
NXDOMAIN: 404,
|
||||
NOTIMP: 501,
|
||||
REFUSED: 403,
|
||||
YXDOMAIN: 409,
|
||||
YXRRSET: 409,
|
||||
NXRRSET: 410,
|
||||
NOTAUTH: 401,
|
||||
NOTZONE: 404,
|
||||
DSOTYPENI: 501,
|
||||
}
|
||||
|
||||
_STRINGS = {
|
||||
NOERROR: "NOERROR",
|
||||
FORMERR: "FORMERR",
|
||||
SERVFAIL: "SERVFAIL",
|
||||
NXDOMAIN: "NXDOMAIN",
|
||||
NOTIMP: "NOTIMP",
|
||||
REFUSED: "REFUSED",
|
||||
YXDOMAIN: "YXDOMAIN",
|
||||
YXRRSET: "YXRRSET",
|
||||
NXRRSET: "NXRRSET",
|
||||
NOTAUTH: "NOTAUTH",
|
||||
NOTZONE: "NOTZONE",
|
||||
DSOTYPENI: "DSOTYPENI",
|
||||
}
|
||||
_INTS = {v: k for k, v in _STRINGS.items()}
|
||||
|
||||
|
||||
def http_equiv_status_code(response_code: int) -> int:
|
||||
return _CODES.get(response_code, 500)
|
||||
|
||||
|
||||
def to_str(response_code: int) -> str:
|
||||
return _STRINGS.get(response_code, f"RCODE({response_code})")
|
||||
|
||||
|
||||
def from_str(response_code: str) -> int:
|
||||
try:
|
||||
return _INTS[response_code]
|
||||
except KeyError:
|
||||
return int(response_code.removeprefix("RCODE(").removesuffix(")"))
|
||||
193
venv/Lib/site-packages/mitmproxy/net/dns/types.py
Normal file
193
venv/Lib/site-packages/mitmproxy/net/dns/types.py
Normal file
@@ -0,0 +1,193 @@
|
||||
A = 1
|
||||
NS = 2
|
||||
MD = 3
|
||||
MF = 4
|
||||
CNAME = 5
|
||||
SOA = 6
|
||||
MB = 7
|
||||
MG = 8
|
||||
MR = 9
|
||||
NULL = 10
|
||||
WKS = 11
|
||||
PTR = 12
|
||||
HINFO = 13
|
||||
MINFO = 14
|
||||
MX = 15
|
||||
TXT = 16
|
||||
RP = 17
|
||||
AFSDB = 18
|
||||
X25 = 19
|
||||
ISDN = 20
|
||||
RT = 21
|
||||
NSAP = 22
|
||||
NSAP_PTR = 23
|
||||
SIG = 24
|
||||
KEY = 25
|
||||
PX = 26
|
||||
GPOS = 27
|
||||
AAAA = 28
|
||||
LOC = 29
|
||||
NXT = 30
|
||||
EID = 31
|
||||
NIMLOC = 32
|
||||
SRV = 33
|
||||
ATMA = 34
|
||||
NAPTR = 35
|
||||
KX = 36
|
||||
CERT = 37
|
||||
A6 = 38
|
||||
DNAME = 39
|
||||
SINK = 40
|
||||
OPT = 41
|
||||
APL = 42
|
||||
DS = 43
|
||||
SSHFP = 44
|
||||
IPSECKEY = 45
|
||||
RRSIG = 46
|
||||
NSEC = 47
|
||||
DNSKEY = 48
|
||||
DHCID = 49
|
||||
NSEC3 = 50
|
||||
NSEC3PARAM = 51
|
||||
TLSA = 52
|
||||
SMIMEA = 53
|
||||
HIP = 55
|
||||
NINFO = 56
|
||||
RKEY = 57
|
||||
TALINK = 58
|
||||
CDS = 59
|
||||
CDNSKEY = 60
|
||||
OPENPGPKEY = 61
|
||||
CSYNC = 62
|
||||
ZONEMD = 63
|
||||
SVCB = 64
|
||||
HTTPS = 65
|
||||
SPF = 99
|
||||
UINFO = 100
|
||||
UID = 101
|
||||
GID = 102
|
||||
UNSPEC = 103
|
||||
NID = 104
|
||||
L32 = 105
|
||||
L64 = 106
|
||||
LP = 107
|
||||
EUI48 = 108
|
||||
EUI64 = 109
|
||||
TKEY = 249
|
||||
TSIG = 250
|
||||
IXFR = 251
|
||||
AXFR = 252
|
||||
MAILB = 253
|
||||
MAILA = 254
|
||||
ANY = 255
|
||||
URI = 256
|
||||
CAA = 257
|
||||
AVC = 258
|
||||
DOA = 259
|
||||
AMTRELAY = 260
|
||||
TA = 32768
|
||||
DLV = 32769
|
||||
|
||||
_STRINGS = {
|
||||
A: "A",
|
||||
NS: "NS",
|
||||
MD: "MD",
|
||||
MF: "MF",
|
||||
CNAME: "CNAME",
|
||||
SOA: "SOA",
|
||||
MB: "MB",
|
||||
MG: "MG",
|
||||
MR: "MR",
|
||||
NULL: "NULL",
|
||||
WKS: "WKS",
|
||||
PTR: "PTR",
|
||||
HINFO: "HINFO",
|
||||
MINFO: "MINFO",
|
||||
MX: "MX",
|
||||
TXT: "TXT",
|
||||
RP: "RP",
|
||||
AFSDB: "AFSDB",
|
||||
X25: "X25",
|
||||
ISDN: "ISDN",
|
||||
RT: "RT",
|
||||
NSAP: "NSAP",
|
||||
NSAP_PTR: "NSAP_PTR",
|
||||
SIG: "SIG",
|
||||
KEY: "KEY",
|
||||
PX: "PX",
|
||||
GPOS: "GPOS",
|
||||
AAAA: "AAAA",
|
||||
LOC: "LOC",
|
||||
NXT: "NXT",
|
||||
EID: "EID",
|
||||
NIMLOC: "NIMLOC",
|
||||
SRV: "SRV",
|
||||
ATMA: "ATMA",
|
||||
NAPTR: "NAPTR",
|
||||
KX: "KX",
|
||||
CERT: "CERT",
|
||||
A6: "A6",
|
||||
DNAME: "DNAME",
|
||||
SINK: "SINK",
|
||||
OPT: "OPT",
|
||||
APL: "APL",
|
||||
DS: "DS",
|
||||
SSHFP: "SSHFP",
|
||||
IPSECKEY: "IPSECKEY",
|
||||
RRSIG: "RRSIG",
|
||||
NSEC: "NSEC",
|
||||
DNSKEY: "DNSKEY",
|
||||
DHCID: "DHCID",
|
||||
NSEC3: "NSEC3",
|
||||
NSEC3PARAM: "NSEC3PARAM",
|
||||
TLSA: "TLSA",
|
||||
SMIMEA: "SMIMEA",
|
||||
HIP: "HIP",
|
||||
NINFO: "NINFO",
|
||||
RKEY: "RKEY",
|
||||
TALINK: "TALINK",
|
||||
CDS: "CDS",
|
||||
CDNSKEY: "CDNSKEY",
|
||||
OPENPGPKEY: "OPENPGPKEY",
|
||||
CSYNC: "CSYNC",
|
||||
ZONEMD: "ZONEMD",
|
||||
SVCB: "SVCB",
|
||||
HTTPS: "HTTPS",
|
||||
SPF: "SPF",
|
||||
UINFO: "UINFO",
|
||||
UID: "UID",
|
||||
GID: "GID",
|
||||
UNSPEC: "UNSPEC",
|
||||
NID: "NID",
|
||||
L32: "L32",
|
||||
L64: "L64",
|
||||
LP: "LP",
|
||||
EUI48: "EUI48",
|
||||
EUI64: "EUI64",
|
||||
TKEY: "TKEY",
|
||||
TSIG: "TSIG",
|
||||
IXFR: "IXFR",
|
||||
AXFR: "AXFR",
|
||||
MAILB: "MAILB",
|
||||
MAILA: "MAILA",
|
||||
ANY: "ANY",
|
||||
URI: "URI",
|
||||
CAA: "CAA",
|
||||
AVC: "AVC",
|
||||
DOA: "DOA",
|
||||
AMTRELAY: "AMTRELAY",
|
||||
TA: "TA",
|
||||
DLV: "DLV",
|
||||
}
|
||||
_INTS = {v: k for k, v in _STRINGS.items()}
|
||||
|
||||
|
||||
def to_str(type_: int) -> str:
|
||||
return _STRINGS.get(type_, f"TYPE({type_})")
|
||||
|
||||
|
||||
def from_str(type_: str) -> int:
|
||||
try:
|
||||
return _INTS[type_]
|
||||
except KeyError:
|
||||
return int(type_.removeprefix("TYPE(").removesuffix(")"))
|
||||
Reference in New Issue
Block a user