2025-12-25 upload
This commit is contained in:
4
venv/Lib/site-packages/mitmproxy/contrib/README.md
Normal file
4
venv/Lib/site-packages/mitmproxy/contrib/README.md
Normal file
@@ -0,0 +1,4 @@
|
||||
# mitmproxy/contrib
|
||||
|
||||
This directory includes vendored code from other sources.
|
||||
See the respective README and LICENSE files for details.
|
||||
28
venv/Lib/site-packages/mitmproxy/contrib/click/LICENSE.BSD-3
Normal file
28
venv/Lib/site-packages/mitmproxy/contrib/click/LICENSE.BSD-3
Normal file
@@ -0,0 +1,28 @@
|
||||
Copyright 2014 Pallets
|
||||
|
||||
Redistribution and use in source and binary forms, with or without
|
||||
modification, are permitted provided that the following conditions are
|
||||
met:
|
||||
|
||||
1. Redistributions of source code must retain the above copyright
|
||||
notice, this list of conditions and the following disclaimer.
|
||||
|
||||
2. Redistributions in binary form must reproduce the above copyright
|
||||
notice, this list of conditions and the following disclaimer in the
|
||||
documentation and/or other materials provided with the distribution.
|
||||
|
||||
3. Neither the name of the copyright holder nor the names of its
|
||||
contributors may be used to endorse or promote products derived from
|
||||
this software without specific prior written permission.
|
||||
|
||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
|
||||
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
|
||||
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
|
||||
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
|
||||
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
|
||||
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
||||
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
159
venv/Lib/site-packages/mitmproxy/contrib/click/__init__.py
Normal file
159
venv/Lib/site-packages/mitmproxy/contrib/click/__init__.py
Normal file
@@ -0,0 +1,159 @@
|
||||
"""
|
||||
SPDX-License-Identifier: BSD-3-Clause
|
||||
|
||||
A vendored copy of click.style() @ 4f7b255
|
||||
"""
|
||||
import typing as t
|
||||
|
||||
_ansi_colors = {
|
||||
"black": 30,
|
||||
"red": 31,
|
||||
"green": 32,
|
||||
"yellow": 33,
|
||||
"blue": 34,
|
||||
"magenta": 35,
|
||||
"cyan": 36,
|
||||
"white": 37,
|
||||
"reset": 39,
|
||||
"bright_black": 90,
|
||||
"bright_red": 91,
|
||||
"bright_green": 92,
|
||||
"bright_yellow": 93,
|
||||
"bright_blue": 94,
|
||||
"bright_magenta": 95,
|
||||
"bright_cyan": 96,
|
||||
"bright_white": 97,
|
||||
}
|
||||
_ansi_reset_all = "\033[0m"
|
||||
|
||||
|
||||
def _interpret_color(
|
||||
color: t.Union[int, t.Tuple[int, int, int], str], offset: int = 0
|
||||
) -> str:
|
||||
if isinstance(color, int):
|
||||
return f"{38 + offset};5;{color:d}"
|
||||
|
||||
if isinstance(color, (tuple, list)):
|
||||
r, g, b = color
|
||||
return f"{38 + offset};2;{r:d};{g:d};{b:d}"
|
||||
|
||||
return str(_ansi_colors[color] + offset)
|
||||
|
||||
|
||||
def style(
|
||||
text: t.Any,
|
||||
fg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None,
|
||||
bg: t.Optional[t.Union[int, t.Tuple[int, int, int], str]] = None,
|
||||
bold: t.Optional[bool] = None,
|
||||
dim: t.Optional[bool] = None,
|
||||
underline: t.Optional[bool] = None,
|
||||
overline: t.Optional[bool] = None,
|
||||
italic: t.Optional[bool] = None,
|
||||
blink: t.Optional[bool] = None,
|
||||
reverse: t.Optional[bool] = None,
|
||||
strikethrough: t.Optional[bool] = None,
|
||||
reset: bool = True,
|
||||
) -> str:
|
||||
"""Styles a text with ANSI styles and returns the new string. By
|
||||
default the styling is self contained which means that at the end
|
||||
of the string a reset code is issued. This can be prevented by
|
||||
passing ``reset=False``.
|
||||
Examples::
|
||||
click.echo(click.style('Hello World!', fg='green'))
|
||||
click.echo(click.style('ATTENTION!', blink=True))
|
||||
click.echo(click.style('Some things', reverse=True, fg='cyan'))
|
||||
click.echo(click.style('More colors', fg=(255, 12, 128), bg=117))
|
||||
Supported color names:
|
||||
* ``black`` (might be a gray)
|
||||
* ``red``
|
||||
* ``green``
|
||||
* ``yellow`` (might be an orange)
|
||||
* ``blue``
|
||||
* ``magenta``
|
||||
* ``cyan``
|
||||
* ``white`` (might be light gray)
|
||||
* ``bright_black``
|
||||
* ``bright_red``
|
||||
* ``bright_green``
|
||||
* ``bright_yellow``
|
||||
* ``bright_blue``
|
||||
* ``bright_magenta``
|
||||
* ``bright_cyan``
|
||||
* ``bright_white``
|
||||
* ``reset`` (reset the color code only)
|
||||
If the terminal supports it, color may also be specified as:
|
||||
- An integer in the interval [0, 255]. The terminal must support
|
||||
8-bit/256-color mode.
|
||||
- An RGB tuple of three integers in [0, 255]. The terminal must
|
||||
support 24-bit/true-color mode.
|
||||
See https://en.wikipedia.org/wiki/ANSI_color and
|
||||
https://gist.github.com/XVilka/8346728 for more information.
|
||||
:param text: the string to style with ansi codes.
|
||||
:param fg: if provided this will become the foreground color.
|
||||
:param bg: if provided this will become the background color.
|
||||
:param bold: if provided this will enable or disable bold mode.
|
||||
:param dim: if provided this will enable or disable dim mode. This is
|
||||
badly supported.
|
||||
:param underline: if provided this will enable or disable underline.
|
||||
:param overline: if provided this will enable or disable overline.
|
||||
:param italic: if provided this will enable or disable italic.
|
||||
:param blink: if provided this will enable or disable blinking.
|
||||
:param reverse: if provided this will enable or disable inverse
|
||||
rendering (foreground becomes background and the
|
||||
other way round).
|
||||
:param strikethrough: if provided this will enable or disable
|
||||
striking through text.
|
||||
:param reset: by default a reset-all code is added at the end of the
|
||||
string which means that styles do not carry over. This
|
||||
can be disabled to compose styles.
|
||||
.. versionchanged:: 8.0
|
||||
A non-string ``message`` is converted to a string.
|
||||
.. versionchanged:: 8.0
|
||||
Added support for 256 and RGB color codes.
|
||||
.. versionchanged:: 8.0
|
||||
Added the ``strikethrough``, ``italic``, and ``overline``
|
||||
parameters.
|
||||
.. versionchanged:: 7.0
|
||||
Added support for bright colors.
|
||||
.. versionadded:: 2.0
|
||||
"""
|
||||
if not isinstance(text, str):
|
||||
text = str(text)
|
||||
|
||||
bits = []
|
||||
|
||||
if fg:
|
||||
try:
|
||||
bits.append(f"\033[{_interpret_color(fg)}m")
|
||||
except KeyError:
|
||||
raise TypeError(f"Unknown color {fg!r}") from None
|
||||
|
||||
if bg:
|
||||
try:
|
||||
bits.append(f"\033[{_interpret_color(bg, 10)}m")
|
||||
except KeyError:
|
||||
raise TypeError(f"Unknown color {bg!r}") from None
|
||||
|
||||
if bold is not None:
|
||||
bits.append(f"\033[{1 if bold else 22}m")
|
||||
if dim is not None:
|
||||
bits.append(f"\033[{2 if dim else 22}m")
|
||||
if underline is not None:
|
||||
bits.append(f"\033[{4 if underline else 24}m")
|
||||
if overline is not None:
|
||||
bits.append(f"\033[{53 if overline else 55}m")
|
||||
if italic is not None:
|
||||
bits.append(f"\033[{3 if italic else 23}m")
|
||||
if blink is not None:
|
||||
bits.append(f"\033[{5 if blink else 25}m")
|
||||
if reverse is not None:
|
||||
bits.append(f"\033[{7 if reverse else 27}m")
|
||||
if strikethrough is not None:
|
||||
bits.append(f"\033[{9 if strikethrough else 29}m")
|
||||
bits.append(text)
|
||||
if reset:
|
||||
bits.append(_ansi_reset_all)
|
||||
return "".join(bits)
|
||||
|
||||
|
||||
__all__ = ["style"]
|
||||
142
venv/Lib/site-packages/mitmproxy/contrib/imghdr.py
Normal file
142
venv/Lib/site-packages/mitmproxy/contrib/imghdr.py
Normal file
@@ -0,0 +1,142 @@
|
||||
# A vendored copy of Python's imghdr module, which is slated for removal in Python 3.13.
|
||||
#
|
||||
# Source: https://github.com/python/cpython/blob/3.12/Lib/imghdr.py
|
||||
# SPDX-License-Identifier: PSF-2.0
|
||||
|
||||
"""Recognize image file formats based on their first few bytes."""
|
||||
|
||||
from os import PathLike
|
||||
import warnings
|
||||
|
||||
__all__ = ["what"]
|
||||
|
||||
|
||||
# warnings._deprecated(__name__, remove=(3, 13))
|
||||
|
||||
|
||||
#-------------------------#
|
||||
# Recognize image headers #
|
||||
#-------------------------#
|
||||
|
||||
def what(file, h=None):
|
||||
"""Return the type of image contained in a file or byte stream."""
|
||||
f = None
|
||||
try:
|
||||
if h is None:
|
||||
if isinstance(file, (str, PathLike)):
|
||||
f = open(file, 'rb')
|
||||
h = f.read(32)
|
||||
else:
|
||||
location = file.tell()
|
||||
h = file.read(32)
|
||||
file.seek(location)
|
||||
for tf in tests:
|
||||
res = tf(h, f)
|
||||
if res:
|
||||
return res
|
||||
finally:
|
||||
if f: f.close()
|
||||
return None
|
||||
|
||||
|
||||
#---------------------------------#
|
||||
# Subroutines per image file type #
|
||||
#---------------------------------#
|
||||
|
||||
tests = []
|
||||
|
||||
def test_jpeg(h, f):
|
||||
"""Test for JPEG data with JFIF or Exif markers; and raw JPEG."""
|
||||
if h[6:10] in (b'JFIF', b'Exif'):
|
||||
return 'jpeg'
|
||||
elif h[:4] == b'\xff\xd8\xff\xdb':
|
||||
return 'jpeg'
|
||||
|
||||
tests.append(test_jpeg)
|
||||
|
||||
def test_png(h, f):
|
||||
"""Verify if the image is a PNG."""
|
||||
if h.startswith(b'\211PNG\r\n\032\n'):
|
||||
return 'png'
|
||||
|
||||
tests.append(test_png)
|
||||
|
||||
def test_gif(h, f):
|
||||
"""Verify if the image is a GIF ('87 or '89 variants)."""
|
||||
if h[:6] in (b'GIF87a', b'GIF89a'):
|
||||
return 'gif'
|
||||
|
||||
tests.append(test_gif)
|
||||
|
||||
def test_tiff(h, f):
|
||||
"""Verify if the image is a TIFF (can be in Motorola or Intel byte order)."""
|
||||
if h[:2] in (b'MM', b'II'):
|
||||
return 'tiff'
|
||||
|
||||
tests.append(test_tiff)
|
||||
|
||||
def test_rgb(h, f):
|
||||
"""test for the SGI image library."""
|
||||
if h.startswith(b'\001\332'):
|
||||
return 'rgb'
|
||||
|
||||
tests.append(test_rgb)
|
||||
|
||||
def test_pbm(h, f):
|
||||
"""Verify if the image is a PBM (portable bitmap)."""
|
||||
if len(h) >= 3 and \
|
||||
h[0] == ord(b'P') and h[1] in b'14' and h[2] in b' \t\n\r':
|
||||
return 'pbm'
|
||||
|
||||
tests.append(test_pbm)
|
||||
|
||||
def test_pgm(h, f):
|
||||
"""Verify if the image is a PGM (portable graymap)."""
|
||||
if len(h) >= 3 and \
|
||||
h[0] == ord(b'P') and h[1] in b'25' and h[2] in b' \t\n\r':
|
||||
return 'pgm'
|
||||
|
||||
tests.append(test_pgm)
|
||||
|
||||
def test_ppm(h, f):
|
||||
"""Verify if the image is a PPM (portable pixmap)."""
|
||||
if len(h) >= 3 and \
|
||||
h[0] == ord(b'P') and h[1] in b'36' and h[2] in b' \t\n\r':
|
||||
return 'ppm'
|
||||
|
||||
tests.append(test_ppm)
|
||||
|
||||
def test_rast(h, f):
|
||||
"""test for the Sun raster file."""
|
||||
if h.startswith(b'\x59\xA6\x6A\x95'):
|
||||
return 'rast'
|
||||
|
||||
tests.append(test_rast)
|
||||
|
||||
def test_xbm(h, f):
|
||||
"""Verify if the image is a X bitmap (X10 or X11)."""
|
||||
if h.startswith(b'#define '):
|
||||
return 'xbm'
|
||||
|
||||
tests.append(test_xbm)
|
||||
|
||||
def test_bmp(h, f):
|
||||
"""Verify if the image is a BMP file."""
|
||||
if h.startswith(b'BM'):
|
||||
return 'bmp'
|
||||
|
||||
tests.append(test_bmp)
|
||||
|
||||
def test_webp(h, f):
|
||||
"""Verify if the image is a WebP."""
|
||||
if h.startswith(b'RIFF') and h[8:12] == b'WEBP':
|
||||
return 'webp'
|
||||
|
||||
tests.append(test_webp)
|
||||
|
||||
def test_exr(h, f):
|
||||
"""verify is the image ia a OpenEXR fileOpenEXR."""
|
||||
if h.startswith(b'\x76\x2f\x31\x01'):
|
||||
return 'exr'
|
||||
|
||||
tests.append(test_exr)
|
||||
@@ -0,0 +1 @@
|
||||
Either MIT or CC-0 - see the individual .ksy files for the respective license.
|
||||
@@ -0,0 +1,3 @@
|
||||
# Kaitai Struct Formats
|
||||
|
||||
Most of the formats here are vendored from https://github.com/kaitai-io/kaitai_struct_formats/.
|
||||
@@ -0,0 +1,140 @@
|
||||
meta:
|
||||
id: dtls_client_hello
|
||||
endian: be
|
||||
license: MIT
|
||||
|
||||
seq:
|
||||
- id: version
|
||||
type: version
|
||||
|
||||
- id: random
|
||||
type: random
|
||||
|
||||
- id: session_id
|
||||
type: session_id
|
||||
|
||||
- id: cookie
|
||||
type: cookie
|
||||
|
||||
- id: cipher_suites
|
||||
type: cipher_suites
|
||||
|
||||
- id: compression_methods
|
||||
type: compression_methods
|
||||
|
||||
- id: extensions
|
||||
type: extensions
|
||||
if: _io.eof == false
|
||||
|
||||
types:
|
||||
version:
|
||||
seq:
|
||||
- id: major
|
||||
type: u1
|
||||
|
||||
- id: minor
|
||||
type: u1
|
||||
|
||||
random:
|
||||
seq:
|
||||
- id: gmt_unix_time
|
||||
type: u4
|
||||
|
||||
- id: random
|
||||
size: 28
|
||||
|
||||
session_id:
|
||||
seq:
|
||||
- id: len
|
||||
type: u1
|
||||
|
||||
- id: sid
|
||||
size: len
|
||||
|
||||
cookie:
|
||||
seq:
|
||||
- id: len
|
||||
type: u1
|
||||
|
||||
- id: cookie
|
||||
size: len
|
||||
|
||||
cipher_suites:
|
||||
seq:
|
||||
- id: len
|
||||
type: u2
|
||||
|
||||
- id: cipher_suites
|
||||
type: u2
|
||||
repeat: expr
|
||||
repeat-expr: len/2
|
||||
|
||||
compression_methods:
|
||||
seq:
|
||||
- id: len
|
||||
type: u1
|
||||
|
||||
- id: compression_methods
|
||||
size: len
|
||||
|
||||
extensions:
|
||||
seq:
|
||||
- id: len
|
||||
type: u2
|
||||
|
||||
- id: extensions
|
||||
type: extension
|
||||
repeat: eos
|
||||
|
||||
extension:
|
||||
seq:
|
||||
- id: type
|
||||
type: u2
|
||||
|
||||
- id: len
|
||||
type: u2
|
||||
|
||||
- id: body
|
||||
size: len
|
||||
type:
|
||||
switch-on: type
|
||||
cases:
|
||||
0: sni
|
||||
16: alpn
|
||||
|
||||
sni:
|
||||
seq:
|
||||
- id: list_length
|
||||
type: u2
|
||||
|
||||
- id: server_names
|
||||
type: server_name
|
||||
repeat: eos
|
||||
|
||||
server_name:
|
||||
seq:
|
||||
- id: name_type
|
||||
type: u1
|
||||
|
||||
- id: length
|
||||
type: u2
|
||||
|
||||
- id: host_name
|
||||
size: length
|
||||
|
||||
alpn:
|
||||
seq:
|
||||
- id: ext_len
|
||||
type: u2
|
||||
|
||||
- id: alpn_protocols
|
||||
type: protocol
|
||||
repeat: eos
|
||||
|
||||
protocol:
|
||||
seq:
|
||||
- id: strlen
|
||||
type: u1
|
||||
|
||||
- id: name
|
||||
size: strlen
|
||||
@@ -0,0 +1,202 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
|
||||
import kaitaistruct
|
||||
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
|
||||
|
||||
|
||||
if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9):
|
||||
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
|
||||
|
||||
class DtlsClientHello(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.version = DtlsClientHello.Version(self._io, self, self._root)
|
||||
self.random = DtlsClientHello.Random(self._io, self, self._root)
|
||||
self.session_id = DtlsClientHello.SessionId(self._io, self, self._root)
|
||||
self.cookie = DtlsClientHello.Cookie(self._io, self, self._root)
|
||||
self.cipher_suites = DtlsClientHello.CipherSuites(self._io, self, self._root)
|
||||
self.compression_methods = DtlsClientHello.CompressionMethods(self._io, self, self._root)
|
||||
if self._io.is_eof() == False:
|
||||
self.extensions = DtlsClientHello.Extensions(self._io, self, self._root)
|
||||
|
||||
|
||||
class ServerName(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.name_type = self._io.read_u1()
|
||||
self.length = self._io.read_u2be()
|
||||
self.host_name = self._io.read_bytes(self.length)
|
||||
|
||||
|
||||
class Random(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.gmt_unix_time = self._io.read_u4be()
|
||||
self.random = self._io.read_bytes(28)
|
||||
|
||||
|
||||
class SessionId(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.len = self._io.read_u1()
|
||||
self.sid = self._io.read_bytes(self.len)
|
||||
|
||||
|
||||
class Sni(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.list_length = self._io.read_u2be()
|
||||
self.server_names = []
|
||||
i = 0
|
||||
while not self._io.is_eof():
|
||||
self.server_names.append(DtlsClientHello.ServerName(self._io, self, self._root))
|
||||
i += 1
|
||||
|
||||
|
||||
|
||||
class CipherSuites(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.len = self._io.read_u2be()
|
||||
self.cipher_suites = []
|
||||
for i in range(self.len // 2):
|
||||
self.cipher_suites.append(self._io.read_u2be())
|
||||
|
||||
|
||||
|
||||
class CompressionMethods(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.len = self._io.read_u1()
|
||||
self.compression_methods = self._io.read_bytes(self.len)
|
||||
|
||||
|
||||
class Alpn(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.ext_len = self._io.read_u2be()
|
||||
self.alpn_protocols = []
|
||||
i = 0
|
||||
while not self._io.is_eof():
|
||||
self.alpn_protocols.append(DtlsClientHello.Protocol(self._io, self, self._root))
|
||||
i += 1
|
||||
|
||||
|
||||
|
||||
class Extensions(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.len = self._io.read_u2be()
|
||||
self.extensions = []
|
||||
i = 0
|
||||
while not self._io.is_eof():
|
||||
self.extensions.append(DtlsClientHello.Extension(self._io, self, self._root))
|
||||
i += 1
|
||||
|
||||
|
||||
|
||||
class Version(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.major = self._io.read_u1()
|
||||
self.minor = self._io.read_u1()
|
||||
|
||||
|
||||
class Cookie(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.len = self._io.read_u1()
|
||||
self.cookie = self._io.read_bytes(self.len)
|
||||
|
||||
|
||||
class Protocol(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.strlen = self._io.read_u1()
|
||||
self.name = self._io.read_bytes(self.strlen)
|
||||
|
||||
|
||||
class Extension(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.type = self._io.read_u2be()
|
||||
self.len = self._io.read_u2be()
|
||||
_on = self.type
|
||||
if _on == 0:
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = DtlsClientHello.Sni(_io__raw_body, self, self._root)
|
||||
elif _on == 16:
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = DtlsClientHello.Alpn(_io__raw_body, self, self._root)
|
||||
else:
|
||||
self.body = self._io.read_bytes(self.len)
|
||||
|
||||
|
||||
|
||||
654
venv/Lib/site-packages/mitmproxy/contrib/kaitaistruct/exif.py
Normal file
654
venv/Lib/site-packages/mitmproxy/contrib/kaitaistruct/exif.py
Normal file
@@ -0,0 +1,654 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
|
||||
import kaitaistruct
|
||||
from kaitaistruct import KaitaiStream, KaitaiStruct
|
||||
from enum import Enum
|
||||
|
||||
|
||||
if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9):
|
||||
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
|
||||
|
||||
class Exif(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.endianness = self._io.read_u2le()
|
||||
self.body = Exif.ExifBody(self._io, self, self._root)
|
||||
|
||||
class ExifBody(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
_on = self._root.endianness
|
||||
if _on == 18761:
|
||||
self._is_le = True
|
||||
elif _on == 19789:
|
||||
self._is_le = False
|
||||
if not hasattr(self, '_is_le'):
|
||||
raise kaitaistruct.UndecidedEndiannessError("/types/exif_body")
|
||||
elif self._is_le == True:
|
||||
self._read_le()
|
||||
elif self._is_le == False:
|
||||
self._read_be()
|
||||
|
||||
def _read_le(self):
|
||||
self.version = self._io.read_u2le()
|
||||
self.ifd0_ofs = self._io.read_u4le()
|
||||
|
||||
def _read_be(self):
|
||||
self.version = self._io.read_u2be()
|
||||
self.ifd0_ofs = self._io.read_u4be()
|
||||
|
||||
class Ifd(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None, _is_le=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._is_le = _is_le
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
if not hasattr(self, '_is_le'):
|
||||
raise kaitaistruct.UndecidedEndiannessError("/types/exif_body/types/ifd")
|
||||
elif self._is_le == True:
|
||||
self._read_le()
|
||||
elif self._is_le == False:
|
||||
self._read_be()
|
||||
|
||||
def _read_le(self):
|
||||
self.num_fields = self._io.read_u2le()
|
||||
self.fields = []
|
||||
for i in range(self.num_fields):
|
||||
self.fields.append(Exif.ExifBody.IfdField(self._io, self, self._root, self._is_le))
|
||||
|
||||
self.next_ifd_ofs = self._io.read_u4le()
|
||||
|
||||
def _read_be(self):
|
||||
self.num_fields = self._io.read_u2be()
|
||||
self.fields = []
|
||||
for i in range(self.num_fields):
|
||||
self.fields.append(Exif.ExifBody.IfdField(self._io, self, self._root, self._is_le))
|
||||
|
||||
self.next_ifd_ofs = self._io.read_u4be()
|
||||
|
||||
@property
|
||||
def next_ifd(self):
|
||||
if hasattr(self, '_m_next_ifd'):
|
||||
return self._m_next_ifd
|
||||
|
||||
if self.next_ifd_ofs != 0:
|
||||
_pos = self._io.pos()
|
||||
self._io.seek(self.next_ifd_ofs)
|
||||
if self._is_le:
|
||||
self._m_next_ifd = Exif.ExifBody.Ifd(self._io, self, self._root, self._is_le)
|
||||
else:
|
||||
self._m_next_ifd = Exif.ExifBody.Ifd(self._io, self, self._root, self._is_le)
|
||||
self._io.seek(_pos)
|
||||
|
||||
return getattr(self, '_m_next_ifd', None)
|
||||
|
||||
|
||||
class IfdField(KaitaiStruct):
|
||||
|
||||
class FieldTypeEnum(Enum):
|
||||
byte = 1
|
||||
ascii_string = 2
|
||||
word = 3
|
||||
dword = 4
|
||||
rational = 5
|
||||
undefined = 7
|
||||
slong = 9
|
||||
srational = 10
|
||||
|
||||
class TagEnum(Enum):
|
||||
image_width = 256
|
||||
image_height = 257
|
||||
bits_per_sample = 258
|
||||
compression = 259
|
||||
photometric_interpretation = 262
|
||||
thresholding = 263
|
||||
cell_width = 264
|
||||
cell_length = 265
|
||||
fill_order = 266
|
||||
document_name = 269
|
||||
image_description = 270
|
||||
make = 271
|
||||
model = 272
|
||||
strip_offsets = 273
|
||||
orientation = 274
|
||||
samples_per_pixel = 277
|
||||
rows_per_strip = 278
|
||||
strip_byte_counts = 279
|
||||
min_sample_value = 280
|
||||
max_sample_value = 281
|
||||
x_resolution = 282
|
||||
y_resolution = 283
|
||||
planar_configuration = 284
|
||||
page_name = 285
|
||||
x_position = 286
|
||||
y_position = 287
|
||||
free_offsets = 288
|
||||
free_byte_counts = 289
|
||||
gray_response_unit = 290
|
||||
gray_response_curve = 291
|
||||
t4_options = 292
|
||||
t6_options = 293
|
||||
resolution_unit = 296
|
||||
page_number = 297
|
||||
color_response_unit = 300
|
||||
transfer_function = 301
|
||||
software = 305
|
||||
modify_date = 306
|
||||
artist = 315
|
||||
host_computer = 316
|
||||
predictor = 317
|
||||
white_point = 318
|
||||
primary_chromaticities = 319
|
||||
color_map = 320
|
||||
halftone_hints = 321
|
||||
tile_width = 322
|
||||
tile_length = 323
|
||||
tile_offsets = 324
|
||||
tile_byte_counts = 325
|
||||
bad_fax_lines = 326
|
||||
clean_fax_data = 327
|
||||
consecutive_bad_fax_lines = 328
|
||||
sub_ifd = 330
|
||||
ink_set = 332
|
||||
ink_names = 333
|
||||
numberof_inks = 334
|
||||
dot_range = 336
|
||||
target_printer = 337
|
||||
extra_samples = 338
|
||||
sample_format = 339
|
||||
s_min_sample_value = 340
|
||||
s_max_sample_value = 341
|
||||
transfer_range = 342
|
||||
clip_path = 343
|
||||
x_clip_path_units = 344
|
||||
y_clip_path_units = 345
|
||||
indexed = 346
|
||||
jpeg_tables = 347
|
||||
opi_proxy = 351
|
||||
global_parameters_ifd = 400
|
||||
profile_type = 401
|
||||
fax_profile = 402
|
||||
coding_methods = 403
|
||||
version_year = 404
|
||||
mode_number = 405
|
||||
decode = 433
|
||||
default_image_color = 434
|
||||
t82_options = 435
|
||||
jpeg_tables2 = 437
|
||||
jpeg_proc = 512
|
||||
thumbnail_offset = 513
|
||||
thumbnail_length = 514
|
||||
jpeg_restart_interval = 515
|
||||
jpeg_lossless_predictors = 517
|
||||
jpeg_point_transforms = 518
|
||||
jpegq_tables = 519
|
||||
jpegdc_tables = 520
|
||||
jpegac_tables = 521
|
||||
y_cb_cr_coefficients = 529
|
||||
y_cb_cr_sub_sampling = 530
|
||||
y_cb_cr_positioning = 531
|
||||
reference_black_white = 532
|
||||
strip_row_counts = 559
|
||||
application_notes = 700
|
||||
uspto_miscellaneous = 999
|
||||
related_image_file_format = 4096
|
||||
related_image_width = 4097
|
||||
related_image_height = 4098
|
||||
rating = 18246
|
||||
xp_dip_xml = 18247
|
||||
stitch_info = 18248
|
||||
rating_percent = 18249
|
||||
sony_raw_file_type = 28672
|
||||
light_falloff_params = 28722
|
||||
chromatic_aberration_corr_params = 28725
|
||||
distortion_corr_params = 28727
|
||||
image_id = 32781
|
||||
wang_tag1 = 32931
|
||||
wang_annotation = 32932
|
||||
wang_tag3 = 32933
|
||||
wang_tag4 = 32934
|
||||
image_reference_points = 32953
|
||||
region_xform_tack_point = 32954
|
||||
warp_quadrilateral = 32955
|
||||
affine_transform_mat = 32956
|
||||
matteing = 32995
|
||||
data_type = 32996
|
||||
image_depth = 32997
|
||||
tile_depth = 32998
|
||||
image_full_width = 33300
|
||||
image_full_height = 33301
|
||||
texture_format = 33302
|
||||
wrap_modes = 33303
|
||||
fov_cot = 33304
|
||||
matrix_world_to_screen = 33305
|
||||
matrix_world_to_camera = 33306
|
||||
model2 = 33405
|
||||
cfa_repeat_pattern_dim = 33421
|
||||
cfa_pattern2 = 33422
|
||||
battery_level = 33423
|
||||
kodak_ifd = 33424
|
||||
copyright = 33432
|
||||
exposure_time = 33434
|
||||
f_number = 33437
|
||||
md_file_tag = 33445
|
||||
md_scale_pixel = 33446
|
||||
md_color_table = 33447
|
||||
md_lab_name = 33448
|
||||
md_sample_info = 33449
|
||||
md_prep_date = 33450
|
||||
md_prep_time = 33451
|
||||
md_file_units = 33452
|
||||
pixel_scale = 33550
|
||||
advent_scale = 33589
|
||||
advent_revision = 33590
|
||||
uic1_tag = 33628
|
||||
uic2_tag = 33629
|
||||
uic3_tag = 33630
|
||||
uic4_tag = 33631
|
||||
iptc_naa = 33723
|
||||
intergraph_packet_data = 33918
|
||||
intergraph_flag_registers = 33919
|
||||
intergraph_matrix = 33920
|
||||
ingr_reserved = 33921
|
||||
model_tie_point = 33922
|
||||
site = 34016
|
||||
color_sequence = 34017
|
||||
it8_header = 34018
|
||||
raster_padding = 34019
|
||||
bits_per_run_length = 34020
|
||||
bits_per_extended_run_length = 34021
|
||||
color_table = 34022
|
||||
image_color_indicator = 34023
|
||||
background_color_indicator = 34024
|
||||
image_color_value = 34025
|
||||
background_color_value = 34026
|
||||
pixel_intensity_range = 34027
|
||||
transparency_indicator = 34028
|
||||
color_characterization = 34029
|
||||
hc_usage = 34030
|
||||
trap_indicator = 34031
|
||||
cmyk_equivalent = 34032
|
||||
sem_info = 34118
|
||||
afcp_iptc = 34152
|
||||
pixel_magic_jbig_options = 34232
|
||||
jpl_carto_ifd = 34263
|
||||
model_transform = 34264
|
||||
wb_grgb_levels = 34306
|
||||
leaf_data = 34310
|
||||
photoshop_settings = 34377
|
||||
exif_offset = 34665
|
||||
icc_profile = 34675
|
||||
tiff_fx_extensions = 34687
|
||||
multi_profiles = 34688
|
||||
shared_data = 34689
|
||||
t88_options = 34690
|
||||
image_layer = 34732
|
||||
geo_tiff_directory = 34735
|
||||
geo_tiff_double_params = 34736
|
||||
geo_tiff_ascii_params = 34737
|
||||
jbig_options = 34750
|
||||
exposure_program = 34850
|
||||
spectral_sensitivity = 34852
|
||||
gps_info = 34853
|
||||
iso = 34855
|
||||
opto_electric_conv_factor = 34856
|
||||
interlace = 34857
|
||||
time_zone_offset = 34858
|
||||
self_timer_mode = 34859
|
||||
sensitivity_type = 34864
|
||||
standard_output_sensitivity = 34865
|
||||
recommended_exposure_index = 34866
|
||||
iso_speed = 34867
|
||||
iso_speed_latitudeyyy = 34868
|
||||
iso_speed_latitudezzz = 34869
|
||||
fax_recv_params = 34908
|
||||
fax_sub_address = 34909
|
||||
fax_recv_time = 34910
|
||||
fedex_edr = 34929
|
||||
leaf_sub_ifd = 34954
|
||||
exif_version = 36864
|
||||
date_time_original = 36867
|
||||
create_date = 36868
|
||||
google_plus_upload_code = 36873
|
||||
offset_time = 36880
|
||||
offset_time_original = 36881
|
||||
offset_time_digitized = 36882
|
||||
components_configuration = 37121
|
||||
compressed_bits_per_pixel = 37122
|
||||
shutter_speed_value = 37377
|
||||
aperture_value = 37378
|
||||
brightness_value = 37379
|
||||
exposure_compensation = 37380
|
||||
max_aperture_value = 37381
|
||||
subject_distance = 37382
|
||||
metering_mode = 37383
|
||||
light_source = 37384
|
||||
flash = 37385
|
||||
focal_length = 37386
|
||||
flash_energy = 37387
|
||||
spatial_frequency_response = 37388
|
||||
noise = 37389
|
||||
focal_plane_x_resolution = 37390
|
||||
focal_plane_y_resolution = 37391
|
||||
focal_plane_resolution_unit = 37392
|
||||
image_number = 37393
|
||||
security_classification = 37394
|
||||
image_history = 37395
|
||||
subject_area = 37396
|
||||
exposure_index = 37397
|
||||
tiff_ep_standard_id = 37398
|
||||
sensing_method = 37399
|
||||
cip3_data_file = 37434
|
||||
cip3_sheet = 37435
|
||||
cip3_side = 37436
|
||||
sto_nits = 37439
|
||||
maker_note = 37500
|
||||
user_comment = 37510
|
||||
sub_sec_time = 37520
|
||||
sub_sec_time_original = 37521
|
||||
sub_sec_time_digitized = 37522
|
||||
ms_document_text = 37679
|
||||
ms_property_set_storage = 37680
|
||||
ms_document_text_position = 37681
|
||||
image_source_data = 37724
|
||||
ambient_temperature = 37888
|
||||
humidity = 37889
|
||||
pressure = 37890
|
||||
water_depth = 37891
|
||||
acceleration = 37892
|
||||
camera_elevation_angle = 37893
|
||||
xp_title = 40091
|
||||
xp_comment = 40092
|
||||
xp_author = 40093
|
||||
xp_keywords = 40094
|
||||
xp_subject = 40095
|
||||
flashpix_version = 40960
|
||||
color_space = 40961
|
||||
exif_image_width = 40962
|
||||
exif_image_height = 40963
|
||||
related_sound_file = 40964
|
||||
interop_offset = 40965
|
||||
samsung_raw_pointers_offset = 40976
|
||||
samsung_raw_pointers_length = 40977
|
||||
samsung_raw_byte_order = 41217
|
||||
samsung_raw_unknown = 41218
|
||||
flash_energy2 = 41483
|
||||
spatial_frequency_response2 = 41484
|
||||
noise2 = 41485
|
||||
focal_plane_x_resolution2 = 41486
|
||||
focal_plane_y_resolution2 = 41487
|
||||
focal_plane_resolution_unit2 = 41488
|
||||
image_number2 = 41489
|
||||
security_classification2 = 41490
|
||||
image_history2 = 41491
|
||||
subject_location = 41492
|
||||
exposure_index2 = 41493
|
||||
tiff_ep_standard_id2 = 41494
|
||||
sensing_method2 = 41495
|
||||
file_source = 41728
|
||||
scene_type = 41729
|
||||
cfa_pattern = 41730
|
||||
custom_rendered = 41985
|
||||
exposure_mode = 41986
|
||||
white_balance = 41987
|
||||
digital_zoom_ratio = 41988
|
||||
focal_length_in35mm_format = 41989
|
||||
scene_capture_type = 41990
|
||||
gain_control = 41991
|
||||
contrast = 41992
|
||||
saturation = 41993
|
||||
sharpness = 41994
|
||||
device_setting_description = 41995
|
||||
subject_distance_range = 41996
|
||||
image_unique_id = 42016
|
||||
owner_name = 42032
|
||||
serial_number = 42033
|
||||
lens_info = 42034
|
||||
lens_make = 42035
|
||||
lens_model = 42036
|
||||
lens_serial_number = 42037
|
||||
gdal_metadata = 42112
|
||||
gdal_no_data = 42113
|
||||
gamma = 42240
|
||||
expand_software = 44992
|
||||
expand_lens = 44993
|
||||
expand_film = 44994
|
||||
expand_filter_lens = 44995
|
||||
expand_scanner = 44996
|
||||
expand_flash_lamp = 44997
|
||||
pixel_format = 48129
|
||||
transformation = 48130
|
||||
uncompressed = 48131
|
||||
image_type = 48132
|
||||
image_width2 = 48256
|
||||
image_height2 = 48257
|
||||
width_resolution = 48258
|
||||
height_resolution = 48259
|
||||
image_offset = 48320
|
||||
image_byte_count = 48321
|
||||
alpha_offset = 48322
|
||||
alpha_byte_count = 48323
|
||||
image_data_discard = 48324
|
||||
alpha_data_discard = 48325
|
||||
oce_scanjob_desc = 50215
|
||||
oce_application_selector = 50216
|
||||
oce_id_number = 50217
|
||||
oce_image_logic = 50218
|
||||
annotations = 50255
|
||||
print_im = 50341
|
||||
original_file_name = 50547
|
||||
uspto_original_content_type = 50560
|
||||
dng_version = 50706
|
||||
dng_backward_version = 50707
|
||||
unique_camera_model = 50708
|
||||
localized_camera_model = 50709
|
||||
cfa_plane_color = 50710
|
||||
cfa_layout = 50711
|
||||
linearization_table = 50712
|
||||
black_level_repeat_dim = 50713
|
||||
black_level = 50714
|
||||
black_level_delta_h = 50715
|
||||
black_level_delta_v = 50716
|
||||
white_level = 50717
|
||||
default_scale = 50718
|
||||
default_crop_origin = 50719
|
||||
default_crop_size = 50720
|
||||
color_matrix1 = 50721
|
||||
color_matrix2 = 50722
|
||||
camera_calibration1 = 50723
|
||||
camera_calibration2 = 50724
|
||||
reduction_matrix1 = 50725
|
||||
reduction_matrix2 = 50726
|
||||
analog_balance = 50727
|
||||
as_shot_neutral = 50728
|
||||
as_shot_white_xy = 50729
|
||||
baseline_exposure = 50730
|
||||
baseline_noise = 50731
|
||||
baseline_sharpness = 50732
|
||||
bayer_green_split = 50733
|
||||
linear_response_limit = 50734
|
||||
camera_serial_number = 50735
|
||||
dng_lens_info = 50736
|
||||
chroma_blur_radius = 50737
|
||||
anti_alias_strength = 50738
|
||||
shadow_scale = 50739
|
||||
sr2_private = 50740
|
||||
maker_note_safety = 50741
|
||||
raw_image_segmentation = 50752
|
||||
calibration_illuminant1 = 50778
|
||||
calibration_illuminant2 = 50779
|
||||
best_quality_scale = 50780
|
||||
raw_data_unique_id = 50781
|
||||
alias_layer_metadata = 50784
|
||||
original_raw_file_name = 50827
|
||||
original_raw_file_data = 50828
|
||||
active_area = 50829
|
||||
masked_areas = 50830
|
||||
as_shot_icc_profile = 50831
|
||||
as_shot_pre_profile_matrix = 50832
|
||||
current_icc_profile = 50833
|
||||
current_pre_profile_matrix = 50834
|
||||
colorimetric_reference = 50879
|
||||
s_raw_type = 50885
|
||||
panasonic_title = 50898
|
||||
panasonic_title2 = 50899
|
||||
camera_calibration_sig = 50931
|
||||
profile_calibration_sig = 50932
|
||||
profile_ifd = 50933
|
||||
as_shot_profile_name = 50934
|
||||
noise_reduction_applied = 50935
|
||||
profile_name = 50936
|
||||
profile_hue_sat_map_dims = 50937
|
||||
profile_hue_sat_map_data1 = 50938
|
||||
profile_hue_sat_map_data2 = 50939
|
||||
profile_tone_curve = 50940
|
||||
profile_embed_policy = 50941
|
||||
profile_copyright = 50942
|
||||
forward_matrix1 = 50964
|
||||
forward_matrix2 = 50965
|
||||
preview_application_name = 50966
|
||||
preview_application_version = 50967
|
||||
preview_settings_name = 50968
|
||||
preview_settings_digest = 50969
|
||||
preview_color_space = 50970
|
||||
preview_date_time = 50971
|
||||
raw_image_digest = 50972
|
||||
original_raw_file_digest = 50973
|
||||
sub_tile_block_size = 50974
|
||||
row_interleave_factor = 50975
|
||||
profile_look_table_dims = 50981
|
||||
profile_look_table_data = 50982
|
||||
opcode_list1 = 51008
|
||||
opcode_list2 = 51009
|
||||
opcode_list3 = 51022
|
||||
noise_profile = 51041
|
||||
time_codes = 51043
|
||||
frame_rate = 51044
|
||||
t_stop = 51058
|
||||
reel_name = 51081
|
||||
original_default_final_size = 51089
|
||||
original_best_quality_size = 51090
|
||||
original_default_crop_size = 51091
|
||||
camera_label = 51105
|
||||
profile_hue_sat_map_encoding = 51107
|
||||
profile_look_table_encoding = 51108
|
||||
baseline_exposure_offset = 51109
|
||||
default_black_render = 51110
|
||||
new_raw_image_digest = 51111
|
||||
raw_to_preview_gain = 51112
|
||||
default_user_crop = 51125
|
||||
padding = 59932
|
||||
offset_schema = 59933
|
||||
owner_name2 = 65000
|
||||
serial_number2 = 65001
|
||||
lens = 65002
|
||||
kdc_ifd = 65024
|
||||
raw_file = 65100
|
||||
converter = 65101
|
||||
white_balance2 = 65102
|
||||
exposure = 65105
|
||||
shadows = 65106
|
||||
brightness = 65107
|
||||
contrast2 = 65108
|
||||
saturation2 = 65109
|
||||
sharpness2 = 65110
|
||||
smoothness = 65111
|
||||
moire_filter = 65112
|
||||
def __init__(self, _io, _parent=None, _root=None, _is_le=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._is_le = _is_le
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
if not hasattr(self, '_is_le'):
|
||||
raise kaitaistruct.UndecidedEndiannessError("/types/exif_body/types/ifd_field")
|
||||
elif self._is_le == True:
|
||||
self._read_le()
|
||||
elif self._is_le == False:
|
||||
self._read_be()
|
||||
|
||||
def _read_le(self):
|
||||
self.tag = KaitaiStream.resolve_enum(Exif.ExifBody.IfdField.TagEnum, self._io.read_u2le())
|
||||
self.field_type = KaitaiStream.resolve_enum(Exif.ExifBody.IfdField.FieldTypeEnum, self._io.read_u2le())
|
||||
self.length = self._io.read_u4le()
|
||||
self.ofs_or_data = self._io.read_u4le()
|
||||
|
||||
def _read_be(self):
|
||||
self.tag = KaitaiStream.resolve_enum(Exif.ExifBody.IfdField.TagEnum, self._io.read_u2be())
|
||||
self.field_type = KaitaiStream.resolve_enum(Exif.ExifBody.IfdField.FieldTypeEnum, self._io.read_u2be())
|
||||
self.length = self._io.read_u4be()
|
||||
self.ofs_or_data = self._io.read_u4be()
|
||||
|
||||
@property
|
||||
def type_byte_length(self):
|
||||
if hasattr(self, '_m_type_byte_length'):
|
||||
return self._m_type_byte_length
|
||||
|
||||
self._m_type_byte_length = (2 if self.field_type == Exif.ExifBody.IfdField.FieldTypeEnum.word else (4 if self.field_type == Exif.ExifBody.IfdField.FieldTypeEnum.dword else 1))
|
||||
return getattr(self, '_m_type_byte_length', None)
|
||||
|
||||
@property
|
||||
def byte_length(self):
|
||||
if hasattr(self, '_m_byte_length'):
|
||||
return self._m_byte_length
|
||||
|
||||
self._m_byte_length = (self.length * self.type_byte_length)
|
||||
return getattr(self, '_m_byte_length', None)
|
||||
|
||||
@property
|
||||
def is_immediate_data(self):
|
||||
if hasattr(self, '_m_is_immediate_data'):
|
||||
return self._m_is_immediate_data
|
||||
|
||||
self._m_is_immediate_data = self.byte_length <= 4
|
||||
return getattr(self, '_m_is_immediate_data', None)
|
||||
|
||||
@property
|
||||
def data(self):
|
||||
if hasattr(self, '_m_data'):
|
||||
return self._m_data
|
||||
|
||||
if not (self.is_immediate_data):
|
||||
io = self._root._io
|
||||
_pos = io.pos()
|
||||
io.seek(self.ofs_or_data)
|
||||
if self._is_le:
|
||||
self._m_data = io.read_bytes(self.byte_length)
|
||||
else:
|
||||
self._m_data = io.read_bytes(self.byte_length)
|
||||
io.seek(_pos)
|
||||
|
||||
return getattr(self, '_m_data', None)
|
||||
|
||||
|
||||
@property
|
||||
def ifd0(self):
|
||||
if hasattr(self, '_m_ifd0'):
|
||||
return self._m_ifd0
|
||||
|
||||
_pos = self._io.pos()
|
||||
self._io.seek(self.ifd0_ofs)
|
||||
if self._is_le:
|
||||
self._m_ifd0 = Exif.ExifBody.Ifd(self._io, self, self._root, self._is_le)
|
||||
else:
|
||||
self._m_ifd0 = Exif.ExifBody.Ifd(self._io, self, self._root, self._is_le)
|
||||
self._io.seek(_pos)
|
||||
return getattr(self, '_m_ifd0', None)
|
||||
|
||||
|
||||
|
||||
357
venv/Lib/site-packages/mitmproxy/contrib/kaitaistruct/gif.py
Normal file
357
venv/Lib/site-packages/mitmproxy/contrib/kaitaistruct/gif.py
Normal file
@@ -0,0 +1,357 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
|
||||
import kaitaistruct
|
||||
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
|
||||
from enum import Enum
|
||||
|
||||
|
||||
if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9):
|
||||
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
|
||||
|
||||
class Gif(KaitaiStruct):
|
||||
"""GIF (Graphics Interchange Format) is an image file format, developed
|
||||
in 1987. It became popular in 1990s as one of the main image formats
|
||||
used in World Wide Web.
|
||||
|
||||
GIF format allows encoding of palette-based images up to 256 colors
|
||||
(each of the colors can be chosen from a 24-bit RGB
|
||||
colorspace). Image data stream uses LZW (Lempel-Ziv-Welch) lossless
|
||||
compression.
|
||||
|
||||
Over the years, several version of the format were published and
|
||||
several extensions to it were made, namely, a popular Netscape
|
||||
extension that allows to store several images in one file, switching
|
||||
between them, which produces crude form of animation.
|
||||
|
||||
Structurally, format consists of several mandatory headers and then
|
||||
a stream of blocks follows. Blocks can carry additional
|
||||
metainformation or image data.
|
||||
"""
|
||||
|
||||
class BlockType(Enum):
|
||||
extension = 33
|
||||
local_image_descriptor = 44
|
||||
end_of_file = 59
|
||||
|
||||
class ExtensionLabel(Enum):
|
||||
graphic_control = 249
|
||||
comment = 254
|
||||
application = 255
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.hdr = Gif.Header(self._io, self, self._root)
|
||||
self.logical_screen_descriptor = Gif.LogicalScreenDescriptorStruct(self._io, self, self._root)
|
||||
if self.logical_screen_descriptor.has_color_table:
|
||||
self._raw_global_color_table = self._io.read_bytes((self.logical_screen_descriptor.color_table_size * 3))
|
||||
_io__raw_global_color_table = KaitaiStream(BytesIO(self._raw_global_color_table))
|
||||
self.global_color_table = Gif.ColorTable(_io__raw_global_color_table, self, self._root)
|
||||
|
||||
self.blocks = []
|
||||
i = 0
|
||||
while True:
|
||||
_ = Gif.Block(self._io, self, self._root)
|
||||
self.blocks.append(_)
|
||||
if ((self._io.is_eof()) or (_.block_type == Gif.BlockType.end_of_file)) :
|
||||
break
|
||||
i += 1
|
||||
|
||||
class ImageData(KaitaiStruct):
|
||||
"""
|
||||
.. seealso::
|
||||
- section 22 - https://www.w3.org/Graphics/GIF/spec-gif89a.txt
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.lzw_min_code_size = self._io.read_u1()
|
||||
self.subblocks = Gif.Subblocks(self._io, self, self._root)
|
||||
|
||||
|
||||
class ColorTableEntry(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.red = self._io.read_u1()
|
||||
self.green = self._io.read_u1()
|
||||
self.blue = self._io.read_u1()
|
||||
|
||||
|
||||
class LogicalScreenDescriptorStruct(KaitaiStruct):
|
||||
"""
|
||||
.. seealso::
|
||||
- section 18 - https://www.w3.org/Graphics/GIF/spec-gif89a.txt
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.screen_width = self._io.read_u2le()
|
||||
self.screen_height = self._io.read_u2le()
|
||||
self.flags = self._io.read_u1()
|
||||
self.bg_color_index = self._io.read_u1()
|
||||
self.pixel_aspect_ratio = self._io.read_u1()
|
||||
|
||||
@property
|
||||
def has_color_table(self):
|
||||
if hasattr(self, '_m_has_color_table'):
|
||||
return self._m_has_color_table
|
||||
|
||||
self._m_has_color_table = (self.flags & 128) != 0
|
||||
return getattr(self, '_m_has_color_table', None)
|
||||
|
||||
@property
|
||||
def color_table_size(self):
|
||||
if hasattr(self, '_m_color_table_size'):
|
||||
return self._m_color_table_size
|
||||
|
||||
self._m_color_table_size = (2 << (self.flags & 7))
|
||||
return getattr(self, '_m_color_table_size', None)
|
||||
|
||||
|
||||
class LocalImageDescriptor(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.left = self._io.read_u2le()
|
||||
self.top = self._io.read_u2le()
|
||||
self.width = self._io.read_u2le()
|
||||
self.height = self._io.read_u2le()
|
||||
self.flags = self._io.read_u1()
|
||||
if self.has_color_table:
|
||||
self._raw_local_color_table = self._io.read_bytes((self.color_table_size * 3))
|
||||
_io__raw_local_color_table = KaitaiStream(BytesIO(self._raw_local_color_table))
|
||||
self.local_color_table = Gif.ColorTable(_io__raw_local_color_table, self, self._root)
|
||||
|
||||
self.image_data = Gif.ImageData(self._io, self, self._root)
|
||||
|
||||
@property
|
||||
def has_color_table(self):
|
||||
if hasattr(self, '_m_has_color_table'):
|
||||
return self._m_has_color_table
|
||||
|
||||
self._m_has_color_table = (self.flags & 128) != 0
|
||||
return getattr(self, '_m_has_color_table', None)
|
||||
|
||||
@property
|
||||
def has_interlace(self):
|
||||
if hasattr(self, '_m_has_interlace'):
|
||||
return self._m_has_interlace
|
||||
|
||||
self._m_has_interlace = (self.flags & 64) != 0
|
||||
return getattr(self, '_m_has_interlace', None)
|
||||
|
||||
@property
|
||||
def has_sorted_color_table(self):
|
||||
if hasattr(self, '_m_has_sorted_color_table'):
|
||||
return self._m_has_sorted_color_table
|
||||
|
||||
self._m_has_sorted_color_table = (self.flags & 32) != 0
|
||||
return getattr(self, '_m_has_sorted_color_table', None)
|
||||
|
||||
@property
|
||||
def color_table_size(self):
|
||||
if hasattr(self, '_m_color_table_size'):
|
||||
return self._m_color_table_size
|
||||
|
||||
self._m_color_table_size = (2 << (self.flags & 7))
|
||||
return getattr(self, '_m_color_table_size', None)
|
||||
|
||||
|
||||
class Block(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.block_type = KaitaiStream.resolve_enum(Gif.BlockType, self._io.read_u1())
|
||||
_on = self.block_type
|
||||
if _on == Gif.BlockType.extension:
|
||||
self.body = Gif.Extension(self._io, self, self._root)
|
||||
elif _on == Gif.BlockType.local_image_descriptor:
|
||||
self.body = Gif.LocalImageDescriptor(self._io, self, self._root)
|
||||
|
||||
|
||||
class ColorTable(KaitaiStruct):
|
||||
"""
|
||||
.. seealso::
|
||||
- section 19 - https://www.w3.org/Graphics/GIF/spec-gif89a.txt
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.entries = []
|
||||
i = 0
|
||||
while not self._io.is_eof():
|
||||
self.entries.append(Gif.ColorTableEntry(self._io, self, self._root))
|
||||
i += 1
|
||||
|
||||
|
||||
|
||||
class Header(KaitaiStruct):
|
||||
"""
|
||||
.. seealso::
|
||||
- section 17 - https://www.w3.org/Graphics/GIF/spec-gif89a.txt
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.magic = self._io.read_bytes(3)
|
||||
if not self.magic == b"\x47\x49\x46":
|
||||
raise kaitaistruct.ValidationNotEqualError(b"\x47\x49\x46", self.magic, self._io, u"/types/header/seq/0")
|
||||
self.version = (self._io.read_bytes(3)).decode(u"ASCII")
|
||||
|
||||
|
||||
class ExtGraphicControl(KaitaiStruct):
|
||||
"""
|
||||
.. seealso::
|
||||
- section 23 - https://www.w3.org/Graphics/GIF/spec-gif89a.txt
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.block_size = self._io.read_bytes(1)
|
||||
if not self.block_size == b"\x04":
|
||||
raise kaitaistruct.ValidationNotEqualError(b"\x04", self.block_size, self._io, u"/types/ext_graphic_control/seq/0")
|
||||
self.flags = self._io.read_u1()
|
||||
self.delay_time = self._io.read_u2le()
|
||||
self.transparent_idx = self._io.read_u1()
|
||||
self.terminator = self._io.read_bytes(1)
|
||||
if not self.terminator == b"\x00":
|
||||
raise kaitaistruct.ValidationNotEqualError(b"\x00", self.terminator, self._io, u"/types/ext_graphic_control/seq/4")
|
||||
|
||||
@property
|
||||
def transparent_color_flag(self):
|
||||
if hasattr(self, '_m_transparent_color_flag'):
|
||||
return self._m_transparent_color_flag
|
||||
|
||||
self._m_transparent_color_flag = (self.flags & 1) != 0
|
||||
return getattr(self, '_m_transparent_color_flag', None)
|
||||
|
||||
@property
|
||||
def user_input_flag(self):
|
||||
if hasattr(self, '_m_user_input_flag'):
|
||||
return self._m_user_input_flag
|
||||
|
||||
self._m_user_input_flag = (self.flags & 2) != 0
|
||||
return getattr(self, '_m_user_input_flag', None)
|
||||
|
||||
|
||||
class Subblock(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.len_bytes = self._io.read_u1()
|
||||
self.bytes = self._io.read_bytes(self.len_bytes)
|
||||
|
||||
|
||||
class ApplicationId(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.len_bytes = self._io.read_u1()
|
||||
if not self.len_bytes == 11:
|
||||
raise kaitaistruct.ValidationNotEqualError(11, self.len_bytes, self._io, u"/types/application_id/seq/0")
|
||||
self.application_identifier = (self._io.read_bytes(8)).decode(u"ASCII")
|
||||
self.application_auth_code = self._io.read_bytes(3)
|
||||
|
||||
|
||||
class ExtApplication(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.application_id = Gif.ApplicationId(self._io, self, self._root)
|
||||
self.subblocks = []
|
||||
i = 0
|
||||
while True:
|
||||
_ = Gif.Subblock(self._io, self, self._root)
|
||||
self.subblocks.append(_)
|
||||
if _.len_bytes == 0:
|
||||
break
|
||||
i += 1
|
||||
|
||||
|
||||
class Subblocks(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.entries = []
|
||||
i = 0
|
||||
while True:
|
||||
_ = Gif.Subblock(self._io, self, self._root)
|
||||
self.entries.append(_)
|
||||
if _.len_bytes == 0:
|
||||
break
|
||||
i += 1
|
||||
|
||||
|
||||
class Extension(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.label = KaitaiStream.resolve_enum(Gif.ExtensionLabel, self._io.read_u1())
|
||||
_on = self.label
|
||||
if _on == Gif.ExtensionLabel.application:
|
||||
self.body = Gif.ExtApplication(self._io, self, self._root)
|
||||
elif _on == Gif.ExtensionLabel.comment:
|
||||
self.body = Gif.Subblocks(self._io, self, self._root)
|
||||
elif _on == Gif.ExtensionLabel.graphic_control:
|
||||
self.body = Gif.ExtGraphicControl(self._io, self, self._root)
|
||||
else:
|
||||
self.body = Gif.Subblocks(self._io, self, self._root)
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,126 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
|
||||
import kaitaistruct
|
||||
from kaitaistruct import KaitaiStream, KaitaiStruct
|
||||
from enum import Enum
|
||||
|
||||
|
||||
if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9):
|
||||
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
|
||||
|
||||
from . import vlq_base128_le
|
||||
class GoogleProtobuf(KaitaiStruct):
|
||||
"""Google Protocol Buffers (AKA protobuf) is a popular data
|
||||
serialization scheme used for communication protocols, data storage,
|
||||
etc. There are implementations are available for almost every
|
||||
popular language. The focus points of this scheme are brevity (data
|
||||
is encoded in a very size-efficient manner) and extensibility (one
|
||||
can add keys to the structure, while keeping it readable in previous
|
||||
version of software).
|
||||
|
||||
Protobuf uses semi-self-describing encoding scheme for its
|
||||
messages. It means that it is possible to parse overall structure of
|
||||
the message (skipping over fields one can't understand), but to
|
||||
fully understand the message, one needs a protocol definition file
|
||||
(`.proto`). To be specific:
|
||||
|
||||
* "Keys" in key-value pairs provided in the message are identified
|
||||
only with an integer "field tag". `.proto` file provides info on
|
||||
which symbolic field names these field tags map to.
|
||||
* "Keys" also provide something called "wire type". It's not a data
|
||||
type in its common sense (i.e. you can't, for example, distinguish
|
||||
`sint32` vs `uint32` vs some enum, or `string` from `bytes`), but
|
||||
it's enough information to determine how many bytes to
|
||||
parse. Interpretation of the value should be done according to the
|
||||
type specified in `.proto` file.
|
||||
* There's no direct information on which fields are optional /
|
||||
required, which fields may be repeated or constitute a map, what
|
||||
restrictions are placed on fields usage in a single message, what
|
||||
are the fields' default values, etc, etc.
|
||||
|
||||
.. seealso::
|
||||
Source - https://developers.google.com/protocol-buffers/docs/encoding
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.pairs = []
|
||||
i = 0
|
||||
while not self._io.is_eof():
|
||||
self.pairs.append(GoogleProtobuf.Pair(self._io, self, self._root))
|
||||
i += 1
|
||||
|
||||
|
||||
class Pair(KaitaiStruct):
|
||||
"""Key-value pair."""
|
||||
|
||||
class WireTypes(Enum):
|
||||
varint = 0
|
||||
bit_64 = 1
|
||||
len_delimited = 2
|
||||
group_start = 3
|
||||
group_end = 4
|
||||
bit_32 = 5
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.key = vlq_base128_le.VlqBase128Le(self._io)
|
||||
_on = self.wire_type
|
||||
if _on == GoogleProtobuf.Pair.WireTypes.varint:
|
||||
self.value = vlq_base128_le.VlqBase128Le(self._io)
|
||||
elif _on == GoogleProtobuf.Pair.WireTypes.len_delimited:
|
||||
self.value = GoogleProtobuf.DelimitedBytes(self._io, self, self._root)
|
||||
elif _on == GoogleProtobuf.Pair.WireTypes.bit_64:
|
||||
self.value = self._io.read_u8le()
|
||||
elif _on == GoogleProtobuf.Pair.WireTypes.bit_32:
|
||||
self.value = self._io.read_u4le()
|
||||
|
||||
@property
|
||||
def wire_type(self):
|
||||
""""Wire type" is a part of the "key" that carries enough
|
||||
information to parse value from the wire, i.e. read correct
|
||||
amount of bytes, but there's not enough informaton to
|
||||
interprete in unambiguously. For example, one can't clearly
|
||||
distinguish 64-bit fixed-sized integers from 64-bit floats,
|
||||
signed zigzag-encoded varints from regular unsigned varints,
|
||||
arbitrary bytes from UTF-8 encoded strings, etc.
|
||||
"""
|
||||
if hasattr(self, '_m_wire_type'):
|
||||
return self._m_wire_type
|
||||
|
||||
self._m_wire_type = KaitaiStream.resolve_enum(GoogleProtobuf.Pair.WireTypes, (self.key.value & 7))
|
||||
return getattr(self, '_m_wire_type', None)
|
||||
|
||||
@property
|
||||
def field_tag(self):
|
||||
"""Identifies a field of protocol. One can look up symbolic
|
||||
field name in a `.proto` file by this field tag.
|
||||
"""
|
||||
if hasattr(self, '_m_field_tag'):
|
||||
return self._m_field_tag
|
||||
|
||||
self._m_field_tag = (self.key.value >> 3)
|
||||
return getattr(self, '_m_field_tag', None)
|
||||
|
||||
|
||||
class DelimitedBytes(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.len = vlq_base128_le.VlqBase128Le(self._io)
|
||||
self.body = self._io.read_bytes(self.len.value)
|
||||
|
||||
|
||||
|
||||
93
venv/Lib/site-packages/mitmproxy/contrib/kaitaistruct/ico.py
Normal file
93
venv/Lib/site-packages/mitmproxy/contrib/kaitaistruct/ico.py
Normal file
@@ -0,0 +1,93 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
|
||||
import kaitaistruct
|
||||
from kaitaistruct import KaitaiStruct
|
||||
|
||||
|
||||
if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9):
|
||||
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
|
||||
|
||||
class Ico(KaitaiStruct):
|
||||
"""Microsoft Windows uses specific file format to store applications
|
||||
icons - ICO. This is a container that contains one or more image
|
||||
files (effectively, DIB parts of BMP files or full PNG files are
|
||||
contained inside).
|
||||
|
||||
.. seealso::
|
||||
Source - https://docs.microsoft.com/en-us/previous-versions/ms997538(v=msdn.10)
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.magic = self._io.read_bytes(4)
|
||||
if not self.magic == b"\x00\x00\x01\x00":
|
||||
raise kaitaistruct.ValidationNotEqualError(b"\x00\x00\x01\x00", self.magic, self._io, u"/seq/0")
|
||||
self.num_images = self._io.read_u2le()
|
||||
self.images = []
|
||||
for i in range(self.num_images):
|
||||
self.images.append(Ico.IconDirEntry(self._io, self, self._root))
|
||||
|
||||
|
||||
class IconDirEntry(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.width = self._io.read_u1()
|
||||
self.height = self._io.read_u1()
|
||||
self.num_colors = self._io.read_u1()
|
||||
self.reserved = self._io.read_bytes(1)
|
||||
if not self.reserved == b"\x00":
|
||||
raise kaitaistruct.ValidationNotEqualError(b"\x00", self.reserved, self._io, u"/types/icon_dir_entry/seq/3")
|
||||
self.num_planes = self._io.read_u2le()
|
||||
self.bpp = self._io.read_u2le()
|
||||
self.len_img = self._io.read_u4le()
|
||||
self.ofs_img = self._io.read_u4le()
|
||||
|
||||
@property
|
||||
def img(self):
|
||||
"""Raw image data. Use `is_png` to determine whether this is an
|
||||
embedded PNG file (true) or a DIB bitmap (false) and call a
|
||||
relevant parser, if needed to parse image data further.
|
||||
"""
|
||||
if hasattr(self, '_m_img'):
|
||||
return self._m_img
|
||||
|
||||
_pos = self._io.pos()
|
||||
self._io.seek(self.ofs_img)
|
||||
self._m_img = self._io.read_bytes(self.len_img)
|
||||
self._io.seek(_pos)
|
||||
return getattr(self, '_m_img', None)
|
||||
|
||||
@property
|
||||
def png_header(self):
|
||||
"""Pre-reads first 8 bytes of the image to determine if it's an
|
||||
embedded PNG file.
|
||||
"""
|
||||
if hasattr(self, '_m_png_header'):
|
||||
return self._m_png_header
|
||||
|
||||
_pos = self._io.pos()
|
||||
self._io.seek(self.ofs_img)
|
||||
self._m_png_header = self._io.read_bytes(8)
|
||||
self._io.seek(_pos)
|
||||
return getattr(self, '_m_png_header', None)
|
||||
|
||||
@property
|
||||
def is_png(self):
|
||||
"""True if this image is in PNG format."""
|
||||
if hasattr(self, '_m_is_png'):
|
||||
return self._m_is_png
|
||||
|
||||
self._m_is_png = self.png_header == b"\x89\x50\x4E\x47\x0D\x0A\x1A\x0A"
|
||||
return getattr(self, '_m_is_png', None)
|
||||
|
||||
|
||||
|
||||
258
venv/Lib/site-packages/mitmproxy/contrib/kaitaistruct/jpeg.py
Normal file
258
venv/Lib/site-packages/mitmproxy/contrib/kaitaistruct/jpeg.py
Normal file
@@ -0,0 +1,258 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
|
||||
import kaitaistruct
|
||||
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
|
||||
from enum import Enum
|
||||
|
||||
|
||||
if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9):
|
||||
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
|
||||
|
||||
from . import exif
|
||||
class Jpeg(KaitaiStruct):
|
||||
"""JPEG File Interchange Format, or JFIF, or, more colloquially known
|
||||
as just "JPEG" or "JPG", is a popular 2D bitmap image file format,
|
||||
offering lossy compression which works reasonably well with
|
||||
photographic images.
|
||||
|
||||
Format is organized as a container format, serving multiple
|
||||
"segments", each starting with a magic and a marker. JFIF standard
|
||||
dictates order and mandatory apperance of segments:
|
||||
|
||||
* SOI
|
||||
* APP0 (with JFIF magic)
|
||||
* APP0 (with JFXX magic, optional)
|
||||
* everything else
|
||||
* SOS
|
||||
* JPEG-compressed stream
|
||||
* EOI
|
||||
"""
|
||||
|
||||
class ComponentId(Enum):
|
||||
y = 1
|
||||
cb = 2
|
||||
cr = 3
|
||||
i = 4
|
||||
q = 5
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.segments = []
|
||||
i = 0
|
||||
while not self._io.is_eof():
|
||||
self.segments.append(Jpeg.Segment(self._io, self, self._root))
|
||||
i += 1
|
||||
|
||||
|
||||
class Segment(KaitaiStruct):
|
||||
|
||||
class MarkerEnum(Enum):
|
||||
tem = 1
|
||||
sof0 = 192
|
||||
sof1 = 193
|
||||
sof2 = 194
|
||||
sof3 = 195
|
||||
dht = 196
|
||||
sof5 = 197
|
||||
sof6 = 198
|
||||
sof7 = 199
|
||||
soi = 216
|
||||
eoi = 217
|
||||
sos = 218
|
||||
dqt = 219
|
||||
dnl = 220
|
||||
dri = 221
|
||||
dhp = 222
|
||||
app0 = 224
|
||||
app1 = 225
|
||||
app2 = 226
|
||||
app3 = 227
|
||||
app4 = 228
|
||||
app5 = 229
|
||||
app6 = 230
|
||||
app7 = 231
|
||||
app8 = 232
|
||||
app9 = 233
|
||||
app10 = 234
|
||||
app11 = 235
|
||||
app12 = 236
|
||||
app13 = 237
|
||||
app14 = 238
|
||||
app15 = 239
|
||||
com = 254
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.magic = self._io.read_bytes(1)
|
||||
if not self.magic == b"\xFF":
|
||||
raise kaitaistruct.ValidationNotEqualError(b"\xFF", self.magic, self._io, u"/types/segment/seq/0")
|
||||
self.marker = KaitaiStream.resolve_enum(Jpeg.Segment.MarkerEnum, self._io.read_u1())
|
||||
if ((self.marker != Jpeg.Segment.MarkerEnum.soi) and (self.marker != Jpeg.Segment.MarkerEnum.eoi)) :
|
||||
self.length = self._io.read_u2be()
|
||||
|
||||
if ((self.marker != Jpeg.Segment.MarkerEnum.soi) and (self.marker != Jpeg.Segment.MarkerEnum.eoi)) :
|
||||
_on = self.marker
|
||||
if _on == Jpeg.Segment.MarkerEnum.app1:
|
||||
self._raw_data = self._io.read_bytes((self.length - 2))
|
||||
_io__raw_data = KaitaiStream(BytesIO(self._raw_data))
|
||||
self.data = Jpeg.SegmentApp1(_io__raw_data, self, self._root)
|
||||
elif _on == Jpeg.Segment.MarkerEnum.app0:
|
||||
self._raw_data = self._io.read_bytes((self.length - 2))
|
||||
_io__raw_data = KaitaiStream(BytesIO(self._raw_data))
|
||||
self.data = Jpeg.SegmentApp0(_io__raw_data, self, self._root)
|
||||
elif _on == Jpeg.Segment.MarkerEnum.sof0:
|
||||
self._raw_data = self._io.read_bytes((self.length - 2))
|
||||
_io__raw_data = KaitaiStream(BytesIO(self._raw_data))
|
||||
self.data = Jpeg.SegmentSof0(_io__raw_data, self, self._root)
|
||||
elif _on == Jpeg.Segment.MarkerEnum.sos:
|
||||
self._raw_data = self._io.read_bytes((self.length - 2))
|
||||
_io__raw_data = KaitaiStream(BytesIO(self._raw_data))
|
||||
self.data = Jpeg.SegmentSos(_io__raw_data, self, self._root)
|
||||
else:
|
||||
self.data = self._io.read_bytes((self.length - 2))
|
||||
|
||||
if self.marker == Jpeg.Segment.MarkerEnum.sos:
|
||||
self.image_data = self._io.read_bytes_full()
|
||||
|
||||
|
||||
|
||||
class SegmentSos(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.num_components = self._io.read_u1()
|
||||
self.components = []
|
||||
for i in range(self.num_components):
|
||||
self.components.append(Jpeg.SegmentSos.Component(self._io, self, self._root))
|
||||
|
||||
self.start_spectral_selection = self._io.read_u1()
|
||||
self.end_spectral = self._io.read_u1()
|
||||
self.appr_bit_pos = self._io.read_u1()
|
||||
|
||||
class Component(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.id = KaitaiStream.resolve_enum(Jpeg.ComponentId, self._io.read_u1())
|
||||
self.huffman_table = self._io.read_u1()
|
||||
|
||||
|
||||
|
||||
class SegmentApp1(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.magic = (self._io.read_bytes_term(0, False, True, True)).decode(u"ASCII")
|
||||
_on = self.magic
|
||||
if _on == u"Exif":
|
||||
self.body = Jpeg.ExifInJpeg(self._io, self, self._root)
|
||||
|
||||
|
||||
class SegmentSof0(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.bits_per_sample = self._io.read_u1()
|
||||
self.image_height = self._io.read_u2be()
|
||||
self.image_width = self._io.read_u2be()
|
||||
self.num_components = self._io.read_u1()
|
||||
self.components = []
|
||||
for i in range(self.num_components):
|
||||
self.components.append(Jpeg.SegmentSof0.Component(self._io, self, self._root))
|
||||
|
||||
|
||||
class Component(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.id = KaitaiStream.resolve_enum(Jpeg.ComponentId, self._io.read_u1())
|
||||
self.sampling_factors = self._io.read_u1()
|
||||
self.quantization_table_id = self._io.read_u1()
|
||||
|
||||
@property
|
||||
def sampling_x(self):
|
||||
if hasattr(self, '_m_sampling_x'):
|
||||
return self._m_sampling_x
|
||||
|
||||
self._m_sampling_x = ((self.sampling_factors & 240) >> 4)
|
||||
return getattr(self, '_m_sampling_x', None)
|
||||
|
||||
@property
|
||||
def sampling_y(self):
|
||||
if hasattr(self, '_m_sampling_y'):
|
||||
return self._m_sampling_y
|
||||
|
||||
self._m_sampling_y = (self.sampling_factors & 15)
|
||||
return getattr(self, '_m_sampling_y', None)
|
||||
|
||||
|
||||
|
||||
class ExifInJpeg(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.extra_zero = self._io.read_bytes(1)
|
||||
if not self.extra_zero == b"\x00":
|
||||
raise kaitaistruct.ValidationNotEqualError(b"\x00", self.extra_zero, self._io, u"/types/exif_in_jpeg/seq/0")
|
||||
self._raw_data = self._io.read_bytes_full()
|
||||
_io__raw_data = KaitaiStream(BytesIO(self._raw_data))
|
||||
self.data = exif.Exif(_io__raw_data)
|
||||
|
||||
|
||||
class SegmentApp0(KaitaiStruct):
|
||||
|
||||
class DensityUnit(Enum):
|
||||
no_units = 0
|
||||
pixels_per_inch = 1
|
||||
pixels_per_cm = 2
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.magic = (self._io.read_bytes(5)).decode(u"ASCII")
|
||||
self.version_major = self._io.read_u1()
|
||||
self.version_minor = self._io.read_u1()
|
||||
self.density_units = KaitaiStream.resolve_enum(Jpeg.SegmentApp0.DensityUnit, self._io.read_u1())
|
||||
self.density_x = self._io.read_u2be()
|
||||
self.density_y = self._io.read_u2be()
|
||||
self.thumbnail_x = self._io.read_u1()
|
||||
self.thumbnail_y = self._io.read_u1()
|
||||
self.thumbnail = self._io.read_bytes(((self.thumbnail_x * self.thumbnail_y) * 3))
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
wget -N https://raw.githubusercontent.com/kaitai-io/kaitai_struct_formats/master/image/exif.ksy
|
||||
wget -N https://raw.githubusercontent.com/kaitai-io/kaitai_struct_formats/master/image/gif.ksy
|
||||
wget -N https://raw.githubusercontent.com/kaitai-io/kaitai_struct_formats/master/image/jpeg.ksy
|
||||
wget -N https://raw.githubusercontent.com/kaitai-io/kaitai_struct_formats/master/image/png.ksy
|
||||
wget -N https://raw.githubusercontent.com/kaitai-io/kaitai_struct_formats/master/image/ico.ksy
|
||||
wget -N -P common/ https://raw.githubusercontent.com/kaitai-io/kaitai_struct_formats/master/common/vlq_base128_le.ksy
|
||||
wget -N https://raw.githubusercontent.com/kaitai-io/kaitai_struct_formats/master/serialization/google_protobuf.ksy
|
||||
wget -N https://raw.githubusercontent.com/kaitai-io/kaitai_struct_formats/master/network/tls_client_hello.ksy
|
||||
|
||||
kaitai-struct-compiler --target python --opaque-types=true -I . --python-package . ./*.ksy
|
||||
527
venv/Lib/site-packages/mitmproxy/contrib/kaitaistruct/png.py
Normal file
527
venv/Lib/site-packages/mitmproxy/contrib/kaitaistruct/png.py
Normal file
@@ -0,0 +1,527 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
|
||||
import kaitaistruct
|
||||
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
|
||||
from enum import Enum
|
||||
import zlib
|
||||
|
||||
|
||||
if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9):
|
||||
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
|
||||
|
||||
class Png(KaitaiStruct):
|
||||
"""Test files for APNG can be found at the following locations:
|
||||
|
||||
* <https://philip.html5.org/tests/apng/tests.html>
|
||||
* <http://littlesvr.ca/apng/>
|
||||
"""
|
||||
|
||||
class PhysUnit(Enum):
|
||||
unknown = 0
|
||||
meter = 1
|
||||
|
||||
class BlendOpValues(Enum):
|
||||
source = 0
|
||||
over = 1
|
||||
|
||||
class CompressionMethods(Enum):
|
||||
zlib = 0
|
||||
|
||||
class DisposeOpValues(Enum):
|
||||
none = 0
|
||||
background = 1
|
||||
previous = 2
|
||||
|
||||
class ColorType(Enum):
|
||||
greyscale = 0
|
||||
truecolor = 2
|
||||
indexed = 3
|
||||
greyscale_alpha = 4
|
||||
truecolor_alpha = 6
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.magic = self._io.read_bytes(8)
|
||||
if not self.magic == b"\x89\x50\x4E\x47\x0D\x0A\x1A\x0A":
|
||||
raise kaitaistruct.ValidationNotEqualError(b"\x89\x50\x4E\x47\x0D\x0A\x1A\x0A", self.magic, self._io, u"/seq/0")
|
||||
self.ihdr_len = self._io.read_u4be()
|
||||
if not self.ihdr_len == 13:
|
||||
raise kaitaistruct.ValidationNotEqualError(13, self.ihdr_len, self._io, u"/seq/1")
|
||||
self.ihdr_type = self._io.read_bytes(4)
|
||||
if not self.ihdr_type == b"\x49\x48\x44\x52":
|
||||
raise kaitaistruct.ValidationNotEqualError(b"\x49\x48\x44\x52", self.ihdr_type, self._io, u"/seq/2")
|
||||
self.ihdr = Png.IhdrChunk(self._io, self, self._root)
|
||||
self.ihdr_crc = self._io.read_bytes(4)
|
||||
self.chunks = []
|
||||
i = 0
|
||||
while True:
|
||||
_ = Png.Chunk(self._io, self, self._root)
|
||||
self.chunks.append(_)
|
||||
if ((_.type == u"IEND") or (self._io.is_eof())) :
|
||||
break
|
||||
i += 1
|
||||
|
||||
class Rgb(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.r = self._io.read_u1()
|
||||
self.g = self._io.read_u1()
|
||||
self.b = self._io.read_u1()
|
||||
|
||||
|
||||
class Chunk(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.len = self._io.read_u4be()
|
||||
self.type = (self._io.read_bytes(4)).decode(u"UTF-8")
|
||||
_on = self.type
|
||||
if _on == u"iTXt":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = Png.InternationalTextChunk(_io__raw_body, self, self._root)
|
||||
elif _on == u"gAMA":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = Png.GamaChunk(_io__raw_body, self, self._root)
|
||||
elif _on == u"tIME":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = Png.TimeChunk(_io__raw_body, self, self._root)
|
||||
elif _on == u"PLTE":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = Png.PlteChunk(_io__raw_body, self, self._root)
|
||||
elif _on == u"bKGD":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = Png.BkgdChunk(_io__raw_body, self, self._root)
|
||||
elif _on == u"pHYs":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = Png.PhysChunk(_io__raw_body, self, self._root)
|
||||
elif _on == u"fdAT":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = Png.FrameDataChunk(_io__raw_body, self, self._root)
|
||||
elif _on == u"tEXt":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = Png.TextChunk(_io__raw_body, self, self._root)
|
||||
elif _on == u"cHRM":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = Png.ChrmChunk(_io__raw_body, self, self._root)
|
||||
elif _on == u"acTL":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = Png.AnimationControlChunk(_io__raw_body, self, self._root)
|
||||
elif _on == u"sRGB":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = Png.SrgbChunk(_io__raw_body, self, self._root)
|
||||
elif _on == u"zTXt":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = Png.CompressedTextChunk(_io__raw_body, self, self._root)
|
||||
elif _on == u"fcTL":
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = Png.FrameControlChunk(_io__raw_body, self, self._root)
|
||||
else:
|
||||
self.body = self._io.read_bytes(self.len)
|
||||
self.crc = self._io.read_bytes(4)
|
||||
|
||||
|
||||
class BkgdIndexed(KaitaiStruct):
|
||||
"""Background chunk for images with indexed palette."""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.palette_index = self._io.read_u1()
|
||||
|
||||
|
||||
class Point(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.x_int = self._io.read_u4be()
|
||||
self.y_int = self._io.read_u4be()
|
||||
|
||||
@property
|
||||
def x(self):
|
||||
if hasattr(self, '_m_x'):
|
||||
return self._m_x
|
||||
|
||||
self._m_x = (self.x_int / 100000.0)
|
||||
return getattr(self, '_m_x', None)
|
||||
|
||||
@property
|
||||
def y(self):
|
||||
if hasattr(self, '_m_y'):
|
||||
return self._m_y
|
||||
|
||||
self._m_y = (self.y_int / 100000.0)
|
||||
return getattr(self, '_m_y', None)
|
||||
|
||||
|
||||
class BkgdGreyscale(KaitaiStruct):
|
||||
"""Background chunk for greyscale images."""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.value = self._io.read_u2be()
|
||||
|
||||
|
||||
class ChrmChunk(KaitaiStruct):
|
||||
"""
|
||||
.. seealso::
|
||||
Source - https://www.w3.org/TR/PNG/#11cHRM
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.white_point = Png.Point(self._io, self, self._root)
|
||||
self.red = Png.Point(self._io, self, self._root)
|
||||
self.green = Png.Point(self._io, self, self._root)
|
||||
self.blue = Png.Point(self._io, self, self._root)
|
||||
|
||||
|
||||
class IhdrChunk(KaitaiStruct):
|
||||
"""
|
||||
.. seealso::
|
||||
Source - https://www.w3.org/TR/PNG/#11IHDR
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.width = self._io.read_u4be()
|
||||
self.height = self._io.read_u4be()
|
||||
self.bit_depth = self._io.read_u1()
|
||||
self.color_type = KaitaiStream.resolve_enum(Png.ColorType, self._io.read_u1())
|
||||
self.compression_method = self._io.read_u1()
|
||||
self.filter_method = self._io.read_u1()
|
||||
self.interlace_method = self._io.read_u1()
|
||||
|
||||
|
||||
class PlteChunk(KaitaiStruct):
|
||||
"""
|
||||
.. seealso::
|
||||
Source - https://www.w3.org/TR/PNG/#11PLTE
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.entries = []
|
||||
i = 0
|
||||
while not self._io.is_eof():
|
||||
self.entries.append(Png.Rgb(self._io, self, self._root))
|
||||
i += 1
|
||||
|
||||
|
||||
|
||||
class SrgbChunk(KaitaiStruct):
|
||||
"""
|
||||
.. seealso::
|
||||
Source - https://www.w3.org/TR/PNG/#11sRGB
|
||||
"""
|
||||
|
||||
class Intent(Enum):
|
||||
perceptual = 0
|
||||
relative_colorimetric = 1
|
||||
saturation = 2
|
||||
absolute_colorimetric = 3
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.render_intent = KaitaiStream.resolve_enum(Png.SrgbChunk.Intent, self._io.read_u1())
|
||||
|
||||
|
||||
class CompressedTextChunk(KaitaiStruct):
|
||||
"""Compressed text chunk effectively allows to store key-value
|
||||
string pairs in PNG container, compressing "value" part (which
|
||||
can be quite lengthy) with zlib compression.
|
||||
|
||||
.. seealso::
|
||||
Source - https://www.w3.org/TR/PNG/#11zTXt
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode(u"UTF-8")
|
||||
self.compression_method = KaitaiStream.resolve_enum(Png.CompressionMethods, self._io.read_u1())
|
||||
self._raw_text_datastream = self._io.read_bytes_full()
|
||||
self.text_datastream = zlib.decompress(self._raw_text_datastream)
|
||||
|
||||
|
||||
class FrameDataChunk(KaitaiStruct):
|
||||
"""
|
||||
.. seealso::
|
||||
Source - https://wiki.mozilla.org/APNG_Specification#.60fdAT.60:_The_Frame_Data_Chunk
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.sequence_number = self._io.read_u4be()
|
||||
self.frame_data = self._io.read_bytes_full()
|
||||
|
||||
|
||||
class BkgdTruecolor(KaitaiStruct):
|
||||
"""Background chunk for truecolor images."""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.red = self._io.read_u2be()
|
||||
self.green = self._io.read_u2be()
|
||||
self.blue = self._io.read_u2be()
|
||||
|
||||
|
||||
class GamaChunk(KaitaiStruct):
|
||||
"""
|
||||
.. seealso::
|
||||
Source - https://www.w3.org/TR/PNG/#11gAMA
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.gamma_int = self._io.read_u4be()
|
||||
|
||||
@property
|
||||
def gamma_ratio(self):
|
||||
if hasattr(self, '_m_gamma_ratio'):
|
||||
return self._m_gamma_ratio
|
||||
|
||||
self._m_gamma_ratio = (100000.0 / self.gamma_int)
|
||||
return getattr(self, '_m_gamma_ratio', None)
|
||||
|
||||
|
||||
class BkgdChunk(KaitaiStruct):
|
||||
"""Background chunk stores default background color to display this
|
||||
image against. Contents depend on `color_type` of the image.
|
||||
|
||||
.. seealso::
|
||||
Source - https://www.w3.org/TR/PNG/#11bKGD
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
_on = self._root.ihdr.color_type
|
||||
if _on == Png.ColorType.indexed:
|
||||
self.bkgd = Png.BkgdIndexed(self._io, self, self._root)
|
||||
elif _on == Png.ColorType.truecolor_alpha:
|
||||
self.bkgd = Png.BkgdTruecolor(self._io, self, self._root)
|
||||
elif _on == Png.ColorType.greyscale_alpha:
|
||||
self.bkgd = Png.BkgdGreyscale(self._io, self, self._root)
|
||||
elif _on == Png.ColorType.truecolor:
|
||||
self.bkgd = Png.BkgdTruecolor(self._io, self, self._root)
|
||||
elif _on == Png.ColorType.greyscale:
|
||||
self.bkgd = Png.BkgdGreyscale(self._io, self, self._root)
|
||||
|
||||
|
||||
class PhysChunk(KaitaiStruct):
|
||||
""""Physical size" chunk stores data that allows to translate
|
||||
logical pixels into physical units (meters, etc) and vice-versa.
|
||||
|
||||
.. seealso::
|
||||
Source - https://www.w3.org/TR/PNG/#11pHYs
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.pixels_per_unit_x = self._io.read_u4be()
|
||||
self.pixels_per_unit_y = self._io.read_u4be()
|
||||
self.unit = KaitaiStream.resolve_enum(Png.PhysUnit, self._io.read_u1())
|
||||
|
||||
|
||||
class FrameControlChunk(KaitaiStruct):
|
||||
"""
|
||||
.. seealso::
|
||||
Source - https://wiki.mozilla.org/APNG_Specification#.60fcTL.60:_The_Frame_Control_Chunk
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.sequence_number = self._io.read_u4be()
|
||||
self.width = self._io.read_u4be()
|
||||
if not self.width >= 1:
|
||||
raise kaitaistruct.ValidationLessThanError(1, self.width, self._io, u"/types/frame_control_chunk/seq/1")
|
||||
if not self.width <= self._root.ihdr.width:
|
||||
raise kaitaistruct.ValidationGreaterThanError(self._root.ihdr.width, self.width, self._io, u"/types/frame_control_chunk/seq/1")
|
||||
self.height = self._io.read_u4be()
|
||||
if not self.height >= 1:
|
||||
raise kaitaistruct.ValidationLessThanError(1, self.height, self._io, u"/types/frame_control_chunk/seq/2")
|
||||
if not self.height <= self._root.ihdr.height:
|
||||
raise kaitaistruct.ValidationGreaterThanError(self._root.ihdr.height, self.height, self._io, u"/types/frame_control_chunk/seq/2")
|
||||
self.x_offset = self._io.read_u4be()
|
||||
if not self.x_offset <= (self._root.ihdr.width - self.width):
|
||||
raise kaitaistruct.ValidationGreaterThanError((self._root.ihdr.width - self.width), self.x_offset, self._io, u"/types/frame_control_chunk/seq/3")
|
||||
self.y_offset = self._io.read_u4be()
|
||||
if not self.y_offset <= (self._root.ihdr.height - self.height):
|
||||
raise kaitaistruct.ValidationGreaterThanError((self._root.ihdr.height - self.height), self.y_offset, self._io, u"/types/frame_control_chunk/seq/4")
|
||||
self.delay_num = self._io.read_u2be()
|
||||
self.delay_den = self._io.read_u2be()
|
||||
self.dispose_op = KaitaiStream.resolve_enum(Png.DisposeOpValues, self._io.read_u1())
|
||||
self.blend_op = KaitaiStream.resolve_enum(Png.BlendOpValues, self._io.read_u1())
|
||||
|
||||
@property
|
||||
def delay(self):
|
||||
"""Time to display this frame, in seconds."""
|
||||
if hasattr(self, '_m_delay'):
|
||||
return self._m_delay
|
||||
|
||||
self._m_delay = (self.delay_num / (100.0 if self.delay_den == 0 else self.delay_den))
|
||||
return getattr(self, '_m_delay', None)
|
||||
|
||||
|
||||
class InternationalTextChunk(KaitaiStruct):
|
||||
"""International text chunk effectively allows to store key-value string pairs in
|
||||
PNG container. Both "key" (keyword) and "value" (text) parts are
|
||||
given in pre-defined subset of iso8859-1 without control
|
||||
characters.
|
||||
|
||||
.. seealso::
|
||||
Source - https://www.w3.org/TR/PNG/#11iTXt
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode(u"UTF-8")
|
||||
self.compression_flag = self._io.read_u1()
|
||||
self.compression_method = KaitaiStream.resolve_enum(Png.CompressionMethods, self._io.read_u1())
|
||||
self.language_tag = (self._io.read_bytes_term(0, False, True, True)).decode(u"ASCII")
|
||||
self.translated_keyword = (self._io.read_bytes_term(0, False, True, True)).decode(u"UTF-8")
|
||||
self.text = (self._io.read_bytes_full()).decode(u"UTF-8")
|
||||
|
||||
|
||||
class TextChunk(KaitaiStruct):
|
||||
"""Text chunk effectively allows to store key-value string pairs in
|
||||
PNG container. Both "key" (keyword) and "value" (text) parts are
|
||||
given in pre-defined subset of iso8859-1 without control
|
||||
characters.
|
||||
|
||||
.. seealso::
|
||||
Source - https://www.w3.org/TR/PNG/#11tEXt
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.keyword = (self._io.read_bytes_term(0, False, True, True)).decode(u"iso8859-1")
|
||||
self.text = (self._io.read_bytes_full()).decode(u"iso8859-1")
|
||||
|
||||
|
||||
class AnimationControlChunk(KaitaiStruct):
|
||||
"""
|
||||
.. seealso::
|
||||
Source - https://wiki.mozilla.org/APNG_Specification#.60acTL.60:_The_Animation_Control_Chunk
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.num_frames = self._io.read_u4be()
|
||||
self.num_plays = self._io.read_u4be()
|
||||
|
||||
|
||||
class TimeChunk(KaitaiStruct):
|
||||
"""Time chunk stores time stamp of last modification of this image,
|
||||
up to 1 second precision in UTC timezone.
|
||||
|
||||
.. seealso::
|
||||
Source - https://www.w3.org/TR/PNG/#11tIME
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.year = self._io.read_u2be()
|
||||
self.month = self._io.read_u1()
|
||||
self.day = self._io.read_u1()
|
||||
self.hour = self._io.read_u1()
|
||||
self.minute = self._io.read_u1()
|
||||
self.second = self._io.read_u1()
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,132 @@
|
||||
meta:
|
||||
id: tls_client_hello
|
||||
xref:
|
||||
rfc: 5246 # TLS 1.2
|
||||
wikidata: Q206494 # TLS
|
||||
license: MIT
|
||||
endian: be
|
||||
|
||||
seq:
|
||||
- id: version
|
||||
type: version
|
||||
|
||||
- id: random
|
||||
type: random
|
||||
|
||||
- id: session_id
|
||||
type: session_id
|
||||
|
||||
- id: cipher_suites
|
||||
type: cipher_suites
|
||||
|
||||
- id: compression_methods
|
||||
type: compression_methods
|
||||
|
||||
- id: extensions
|
||||
type: extensions
|
||||
if: _io.eof == false
|
||||
|
||||
types:
|
||||
version:
|
||||
seq:
|
||||
- id: major
|
||||
type: u1
|
||||
|
||||
- id: minor
|
||||
type: u1
|
||||
|
||||
random:
|
||||
seq:
|
||||
- id: gmt_unix_time
|
||||
type: u4
|
||||
|
||||
- id: random
|
||||
size: 28
|
||||
|
||||
session_id:
|
||||
seq:
|
||||
- id: len
|
||||
type: u1
|
||||
|
||||
- id: sid
|
||||
size: len
|
||||
|
||||
cipher_suites:
|
||||
seq:
|
||||
- id: len
|
||||
type: u2
|
||||
|
||||
- id: cipher_suites
|
||||
type: u2
|
||||
repeat: expr
|
||||
repeat-expr: len/2
|
||||
|
||||
compression_methods:
|
||||
seq:
|
||||
- id: len
|
||||
type: u1
|
||||
|
||||
- id: compression_methods
|
||||
size: len
|
||||
|
||||
extensions:
|
||||
seq:
|
||||
- id: len
|
||||
type: u2
|
||||
|
||||
- id: extensions
|
||||
type: extension
|
||||
repeat: eos
|
||||
|
||||
extension:
|
||||
seq:
|
||||
- id: type
|
||||
type: u2
|
||||
|
||||
- id: len
|
||||
type: u2
|
||||
|
||||
- id: body
|
||||
size: len
|
||||
type:
|
||||
switch-on: type
|
||||
cases:
|
||||
0: sni
|
||||
16: alpn
|
||||
|
||||
sni:
|
||||
seq:
|
||||
- id: list_length
|
||||
type: u2
|
||||
|
||||
- id: server_names
|
||||
type: server_name
|
||||
repeat: eos
|
||||
|
||||
server_name:
|
||||
seq:
|
||||
- id: name_type
|
||||
type: u1
|
||||
|
||||
- id: length
|
||||
type: u2
|
||||
|
||||
- id: host_name
|
||||
size: length
|
||||
|
||||
alpn:
|
||||
seq:
|
||||
- id: ext_len
|
||||
type: u2
|
||||
|
||||
- id: alpn_protocols
|
||||
type: protocol
|
||||
repeat: eos
|
||||
|
||||
protocol:
|
||||
seq:
|
||||
- id: strlen
|
||||
type: u1
|
||||
|
||||
- id: name
|
||||
size: strlen
|
||||
@@ -0,0 +1,189 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
|
||||
import kaitaistruct
|
||||
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
|
||||
|
||||
|
||||
if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9):
|
||||
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
|
||||
|
||||
class TlsClientHello(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.version = TlsClientHello.Version(self._io, self, self._root)
|
||||
self.random = TlsClientHello.Random(self._io, self, self._root)
|
||||
self.session_id = TlsClientHello.SessionId(self._io, self, self._root)
|
||||
self.cipher_suites = TlsClientHello.CipherSuites(self._io, self, self._root)
|
||||
self.compression_methods = TlsClientHello.CompressionMethods(self._io, self, self._root)
|
||||
if self._io.is_eof() == False:
|
||||
self.extensions = TlsClientHello.Extensions(self._io, self, self._root)
|
||||
|
||||
|
||||
class ServerName(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.name_type = self._io.read_u1()
|
||||
self.length = self._io.read_u2be()
|
||||
self.host_name = self._io.read_bytes(self.length)
|
||||
|
||||
|
||||
class Random(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.gmt_unix_time = self._io.read_u4be()
|
||||
self.random = self._io.read_bytes(28)
|
||||
|
||||
|
||||
class SessionId(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.len = self._io.read_u1()
|
||||
self.sid = self._io.read_bytes(self.len)
|
||||
|
||||
|
||||
class Sni(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.list_length = self._io.read_u2be()
|
||||
self.server_names = []
|
||||
i = 0
|
||||
while not self._io.is_eof():
|
||||
self.server_names.append(TlsClientHello.ServerName(self._io, self, self._root))
|
||||
i += 1
|
||||
|
||||
|
||||
|
||||
class CipherSuites(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.len = self._io.read_u2be()
|
||||
self.cipher_suites = []
|
||||
for i in range(self.len // 2):
|
||||
self.cipher_suites.append(self._io.read_u2be())
|
||||
|
||||
|
||||
|
||||
class CompressionMethods(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.len = self._io.read_u1()
|
||||
self.compression_methods = self._io.read_bytes(self.len)
|
||||
|
||||
|
||||
class Alpn(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.ext_len = self._io.read_u2be()
|
||||
self.alpn_protocols = []
|
||||
i = 0
|
||||
while not self._io.is_eof():
|
||||
self.alpn_protocols.append(TlsClientHello.Protocol(self._io, self, self._root))
|
||||
i += 1
|
||||
|
||||
|
||||
|
||||
class Extensions(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.len = self._io.read_u2be()
|
||||
self.extensions = []
|
||||
i = 0
|
||||
while not self._io.is_eof():
|
||||
self.extensions.append(TlsClientHello.Extension(self._io, self, self._root))
|
||||
i += 1
|
||||
|
||||
|
||||
|
||||
class Version(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.major = self._io.read_u1()
|
||||
self.minor = self._io.read_u1()
|
||||
|
||||
|
||||
class Protocol(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.strlen = self._io.read_u1()
|
||||
self.name = self._io.read_bytes(self.strlen)
|
||||
|
||||
|
||||
class Extension(KaitaiStruct):
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.type = self._io.read_u2be()
|
||||
self.len = self._io.read_u2be()
|
||||
_on = self.type
|
||||
if _on == 0:
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = TlsClientHello.Sni(_io__raw_body, self, self._root)
|
||||
elif _on == 16:
|
||||
self._raw_body = self._io.read_bytes(self.len)
|
||||
_io__raw_body = KaitaiStream(BytesIO(self._raw_body))
|
||||
self.body = TlsClientHello.Alpn(_io__raw_body, self, self._root)
|
||||
else:
|
||||
self.body = self._io.read_bytes(self.len)
|
||||
|
||||
|
||||
|
||||
@@ -0,0 +1,115 @@
|
||||
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
|
||||
|
||||
import kaitaistruct
|
||||
from kaitaistruct import KaitaiStruct
|
||||
|
||||
|
||||
if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9):
|
||||
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
|
||||
|
||||
class VlqBase128Le(KaitaiStruct):
|
||||
"""A variable-length unsigned/signed integer using base128 encoding. 1-byte groups
|
||||
consist of 1-bit flag of continuation and 7-bit value chunk, and are ordered
|
||||
"least significant group first", i.e. in "little-endian" manner.
|
||||
|
||||
This particular encoding is specified and used in:
|
||||
|
||||
* DWARF debug file format, where it's dubbed "unsigned LEB128" or "ULEB128".
|
||||
http://dwarfstd.org/doc/dwarf-2.0.0.pdf - page 139
|
||||
* Google Protocol Buffers, where it's called "Base 128 Varints".
|
||||
https://developers.google.com/protocol-buffers/docs/encoding?csw=1#varints
|
||||
* Apache Lucene, where it's called "VInt"
|
||||
https://lucene.apache.org/core/3_5_0/fileformats.html#VInt
|
||||
* Apache Avro uses this as a basis for integer encoding, adding ZigZag on
|
||||
top of it for signed ints
|
||||
https://avro.apache.org/docs/current/spec.html#binary_encode_primitive
|
||||
|
||||
More information on this encoding is available at https://en.wikipedia.org/wiki/LEB128
|
||||
|
||||
This particular implementation supports serialized values to up 8 bytes long.
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.groups = []
|
||||
i = 0
|
||||
while True:
|
||||
_ = VlqBase128Le.Group(self._io, self, self._root)
|
||||
self.groups.append(_)
|
||||
if not (_.has_next):
|
||||
break
|
||||
i += 1
|
||||
|
||||
class Group(KaitaiStruct):
|
||||
"""One byte group, clearly divided into 7-bit "value" chunk and 1-bit "continuation" flag.
|
||||
"""
|
||||
def __init__(self, _io, _parent=None, _root=None):
|
||||
self._io = _io
|
||||
self._parent = _parent
|
||||
self._root = _root if _root else self
|
||||
self._read()
|
||||
|
||||
def _read(self):
|
||||
self.b = self._io.read_u1()
|
||||
|
||||
@property
|
||||
def has_next(self):
|
||||
"""If true, then we have more bytes to read."""
|
||||
if hasattr(self, '_m_has_next'):
|
||||
return self._m_has_next
|
||||
|
||||
self._m_has_next = (self.b & 128) != 0
|
||||
return getattr(self, '_m_has_next', None)
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
"""The 7-bit (base128) numeric value chunk of this group."""
|
||||
if hasattr(self, '_m_value'):
|
||||
return self._m_value
|
||||
|
||||
self._m_value = (self.b & 127)
|
||||
return getattr(self, '_m_value', None)
|
||||
|
||||
|
||||
@property
|
||||
def len(self):
|
||||
if hasattr(self, '_m_len'):
|
||||
return self._m_len
|
||||
|
||||
self._m_len = len(self.groups)
|
||||
return getattr(self, '_m_len', None)
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
"""Resulting unsigned value as normal integer."""
|
||||
if hasattr(self, '_m_value'):
|
||||
return self._m_value
|
||||
|
||||
self._m_value = (((((((self.groups[0].value + ((self.groups[1].value << 7) if self.len >= 2 else 0)) + ((self.groups[2].value << 14) if self.len >= 3 else 0)) + ((self.groups[3].value << 21) if self.len >= 4 else 0)) + ((self.groups[4].value << 28) if self.len >= 5 else 0)) + ((self.groups[5].value << 35) if self.len >= 6 else 0)) + ((self.groups[6].value << 42) if self.len >= 7 else 0)) + ((self.groups[7].value << 49) if self.len >= 8 else 0))
|
||||
return getattr(self, '_m_value', None)
|
||||
|
||||
@property
|
||||
def sign_bit(self):
|
||||
if hasattr(self, '_m_sign_bit'):
|
||||
return self._m_sign_bit
|
||||
|
||||
self._m_sign_bit = (1 << ((7 * self.len) - 1))
|
||||
return getattr(self, '_m_sign_bit', None)
|
||||
|
||||
@property
|
||||
def value_signed(self):
|
||||
"""
|
||||
.. seealso::
|
||||
Source - https://graphics.stanford.edu/~seander/bithacks.html#VariableSignExtend
|
||||
"""
|
||||
if hasattr(self, '_m_value_signed'):
|
||||
return self._m_value_signed
|
||||
|
||||
self._m_value_signed = ((self.value ^ self.sign_bit) - self.sign_bit)
|
||||
return getattr(self, '_m_value_signed', None)
|
||||
|
||||
|
||||
@@ -0,0 +1,73 @@
|
||||
#!/usr/bin/env python3
|
||||
'''
|
||||
@author: David Shaw, shawd@vmware.com
|
||||
|
||||
Inspired by EAS Inspector for Fiddler
|
||||
https://easinspectorforfiddler.codeplex.com
|
||||
|
||||
----- The MIT License (MIT) -----
|
||||
Filename: ASCommandResponse.py
|
||||
Copyright (c) 2014, David P. Shaw
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
'''
|
||||
from .ASWBXML import ASWBXML
|
||||
import logging
|
||||
|
||||
class ASCommandResponse:
|
||||
|
||||
def __init__(self, response):
|
||||
self.wbxmlBody = response
|
||||
try:
|
||||
if ( len(response) > 0):
|
||||
self.xmlString = self.decodeWBXML(self.wbxmlBody)
|
||||
else:
|
||||
raise ValueError("Empty WBXML body passed")
|
||||
except Exception as e:
|
||||
self.xmlString = None
|
||||
raise ValueError("Error: {0}".format(e))
|
||||
|
||||
def getWBXMLBytes(self):
|
||||
return self.wbxmlBytes
|
||||
|
||||
def getXMLString(self):
|
||||
return self.xmlString
|
||||
|
||||
def decodeWBXML(self, body):
|
||||
self.instance = ASWBXML()
|
||||
self.instance.loadBytes(body)
|
||||
return self.instance.getXml()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import os
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
|
||||
projectDir = os.path.dirname(os.path.realpath("."))
|
||||
samplesDir = os.path.join(projectDir, "Samples/")
|
||||
listOfSamples = os.listdir(samplesDir)
|
||||
|
||||
for filename in listOfSamples:
|
||||
with open(samplesDir + os.sep + filename, "rb") as f:
|
||||
byteWBXML = f.read()
|
||||
|
||||
logging.info("-"*100)
|
||||
logging.info(filename)
|
||||
logging.info("-"*100)
|
||||
instance = ASCommandResponse(byteWBXML)
|
||||
logging.info(instance.xmlString)
|
||||
903
venv/Lib/site-packages/mitmproxy/contrib/wbxml/ASWBXML.py
Normal file
903
venv/Lib/site-packages/mitmproxy/contrib/wbxml/ASWBXML.py
Normal file
@@ -0,0 +1,903 @@
|
||||
#!/usr/bin/env python3
|
||||
'''
|
||||
@author: David Shaw, shawd@vmware.com
|
||||
|
||||
Inspired by EAS Inspector for Fiddler
|
||||
https://easinspectorforfiddler.codeplex.com
|
||||
|
||||
----- The MIT License (MIT) -----
|
||||
Filename: ASWBXML.py
|
||||
Copyright (c) 2014, David P. Shaw
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
'''
|
||||
import xml.dom.minidom
|
||||
import logging
|
||||
|
||||
from .ASWBXMLCodePage import ASWBXMLCodePage
|
||||
from .ASWBXMLByteQueue import ASWBXMLByteQueue
|
||||
from .GlobalTokens import GlobalTokens
|
||||
from .InvalidDataException import InvalidDataException
|
||||
|
||||
class ASWBXML:
|
||||
versionByte = 0x03
|
||||
publicIdentifierByte = 0x01
|
||||
characterSetByte = 0x6A
|
||||
stringTableLengthByte = 0x00
|
||||
|
||||
def __init__(self):
|
||||
|
||||
# empty on init
|
||||
self.xmlDoc = xml.dom.minidom.Document()
|
||||
self.currentCodePage = 0
|
||||
self.defaultCodePage = -1
|
||||
|
||||
# Load up code pages
|
||||
# Currently there are 25 code pages as per MS-ASWBXML
|
||||
self.codePages = []
|
||||
|
||||
# region Code Page Initialization
|
||||
# Code Page 0: AirSync
|
||||
# region AirSync Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "AirSync:"
|
||||
page.xmlns = "airsync"
|
||||
|
||||
page.addToken(0x05, "Sync")
|
||||
page.addToken(0x06, "Responses")
|
||||
page.addToken(0x07, "Add")
|
||||
page.addToken(0x08, "Change")
|
||||
page.addToken(0x09, "Delete")
|
||||
page.addToken(0x0A, "Fetch")
|
||||
page.addToken(0x0B, "SyncKey")
|
||||
page.addToken(0x0C, "ClientId")
|
||||
page.addToken(0x0D, "ServerId")
|
||||
page.addToken(0x0E, "Status")
|
||||
page.addToken(0x0F, "Collection")
|
||||
page.addToken(0x10, "Class")
|
||||
page.addToken(0x12, "CollectionId")
|
||||
page.addToken(0x13, "GetChanges")
|
||||
page.addToken(0x14, "MoreAvailable")
|
||||
page.addToken(0x15, "WindowSize")
|
||||
page.addToken(0x16, "Commands")
|
||||
page.addToken(0x17, "Options")
|
||||
page.addToken(0x18, "FilterType")
|
||||
page.addToken(0x1B, "Conflict")
|
||||
page.addToken(0x1C, "Collections")
|
||||
page.addToken(0x1D, "ApplicationData")
|
||||
page.addToken(0x1E, "DeletesAsMoves")
|
||||
page.addToken(0x20, "Supported")
|
||||
page.addToken(0x21, "SoftDelete")
|
||||
page.addToken(0x22, "MIMESupport")
|
||||
page.addToken(0x23, "MIMETruncation")
|
||||
page.addToken(0x24, "Wait")
|
||||
page.addToken(0x25, "Limit")
|
||||
page.addToken(0x26, "Partial")
|
||||
page.addToken(0x27, "ConversationMode")
|
||||
page.addToken(0x28, "MaxItems")
|
||||
page.addToken(0x29, "HeartbeatInterval")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 1: Contacts
|
||||
# region Contacts Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "Contacts:"
|
||||
page.xmlns = "contacts"
|
||||
|
||||
page.addToken(0x05, "Anniversary")
|
||||
page.addToken(0x06, "AssistantName")
|
||||
page.addToken(0x07, "AssistantTelephoneNumber")
|
||||
page.addToken(0x08, "Birthday")
|
||||
page.addToken(0x0C, "Business2PhoneNumber")
|
||||
page.addToken(0x0D, "BusinessCity")
|
||||
page.addToken(0x0E, "BusinessCountry")
|
||||
page.addToken(0x0F, "BusinessPostalCode")
|
||||
page.addToken(0x10, "BusinessState")
|
||||
page.addToken(0x11, "BusinessStreet")
|
||||
page.addToken(0x12, "BusinessFaxNumber")
|
||||
page.addToken(0x13, "BusinessPhoneNumber")
|
||||
page.addToken(0x14, "CarPhoneNumber")
|
||||
page.addToken(0x15, "Categories")
|
||||
page.addToken(0x16, "Category")
|
||||
page.addToken(0x17, "Children")
|
||||
page.addToken(0x18, "Child")
|
||||
page.addToken(0x19, "CompanyName")
|
||||
page.addToken(0x1A, "Department")
|
||||
page.addToken(0x1B, "Email1Address")
|
||||
page.addToken(0x1C, "Email2Address")
|
||||
page.addToken(0x1D, "Email3Address")
|
||||
page.addToken(0x1E, "FileAs")
|
||||
page.addToken(0x1F, "FirstName")
|
||||
page.addToken(0x20, "Home2PhoneNumber")
|
||||
page.addToken(0x21, "HomeCity")
|
||||
page.addToken(0x22, "HomeCountry")
|
||||
page.addToken(0x23, "HomePostalCode")
|
||||
page.addToken(0x24, "HomeState")
|
||||
page.addToken(0x25, "HomeStreet")
|
||||
page.addToken(0x26, "HomeFaxNumber")
|
||||
page.addToken(0x27, "HomePhoneNumber")
|
||||
page.addToken(0x28, "JobTitle")
|
||||
page.addToken(0x29, "LastName")
|
||||
page.addToken(0x2A, "MiddleName")
|
||||
page.addToken(0x2B, "MobilePhoneNumber")
|
||||
page.addToken(0x2C, "OfficeLocation")
|
||||
page.addToken(0x2D, "OtherCity")
|
||||
page.addToken(0x2E, "OtherCountry")
|
||||
page.addToken(0x2F, "OtherPostalCode")
|
||||
page.addToken(0x30, "OtherState")
|
||||
page.addToken(0x31, "OtherStreet")
|
||||
page.addToken(0x32, "PagerNumber")
|
||||
page.addToken(0x33, "RadioPhoneNumber")
|
||||
page.addToken(0x34, "Spouse")
|
||||
page.addToken(0x35, "Suffix")
|
||||
page.addToken(0x36, "Title")
|
||||
page.addToken(0x37, "Webpage")
|
||||
page.addToken(0x38, "YomiCompanyName")
|
||||
page.addToken(0x39, "YomiFirstName")
|
||||
page.addToken(0x3A, "YomiLastName")
|
||||
page.addToken(0x3C, "Picture")
|
||||
page.addToken(0x3D, "Alias")
|
||||
page.addToken(0x3E, "WeightedRank")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 2: Email
|
||||
# region Email Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "Email:"
|
||||
page.xmlns = "email"
|
||||
|
||||
page.addToken(0x0F, "DateReceived")
|
||||
page.addToken(0x11, "DisplayTo")
|
||||
page.addToken(0x12, "Importance")
|
||||
page.addToken(0x13, "MessageClass")
|
||||
page.addToken(0x14, "Subject")
|
||||
page.addToken(0x15, "Read")
|
||||
page.addToken(0x16, "To")
|
||||
page.addToken(0x17, "CC")
|
||||
page.addToken(0x18, "From")
|
||||
page.addToken(0x19, "ReplyTo")
|
||||
page.addToken(0x1A, "AllDayEvent")
|
||||
page.addToken(0x1B, "Categories")
|
||||
page.addToken(0x1C, "Category")
|
||||
page.addToken(0x1D, "DTStamp")
|
||||
page.addToken(0x1E, "EndTime")
|
||||
page.addToken(0x1F, "InstanceType")
|
||||
page.addToken(0x20, "BusyStatus")
|
||||
page.addToken(0x21, "Location")
|
||||
page.addToken(0x22, "MeetingRequest")
|
||||
page.addToken(0x23, "Organizer")
|
||||
page.addToken(0x24, "RecurrenceId")
|
||||
page.addToken(0x25, "Reminder")
|
||||
page.addToken(0x26, "ResponseRequested")
|
||||
page.addToken(0x27, "Recurrences")
|
||||
page.addToken(0x28, "Recurrence")
|
||||
page.addToken(0x29, "Recurrence_Type")
|
||||
page.addToken(0x2A, "Recurrence_Until")
|
||||
page.addToken(0x2B, "Recurrence_Occurrences")
|
||||
page.addToken(0x2C, "Recurrence_Interval")
|
||||
page.addToken(0x2D, "Recurrence_DayOfWeek")
|
||||
page.addToken(0x2E, "Recurrence_DayOfMonth")
|
||||
page.addToken(0x2F, "Recurrence_WeekOfMonth")
|
||||
page.addToken(0x30, "Recurrence_MonthOfYear")
|
||||
page.addToken(0x31, "StartTime")
|
||||
page.addToken(0x32, "Sensitivity")
|
||||
page.addToken(0x33, "TimeZone")
|
||||
page.addToken(0x34, "GlobalObjId")
|
||||
page.addToken(0x35, "ThreadTopic")
|
||||
page.addToken(0x39, "InternetCPID")
|
||||
page.addToken(0x3A, "Flag")
|
||||
page.addToken(0x3B, "FlagStatus")
|
||||
page.addToken(0x3C, "ContentClass")
|
||||
page.addToken(0x3D, "FlagType")
|
||||
page.addToken(0x3E, "CompleteTime")
|
||||
page.addToken(0x3F, "DisallowNewTimeProposal")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 3: AirNotify - retired
|
||||
# region AirNotify Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = ""
|
||||
page.xmlns = ""
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 4: Calendar
|
||||
# region Calendar Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "Calendar:"
|
||||
page.xmlns = "calendar"
|
||||
|
||||
page.addToken(0x05, "TimeZone")
|
||||
page.addToken(0x06, "AllDayEvent")
|
||||
page.addToken(0x07, "Attendees")
|
||||
page.addToken(0x08, "Attendee")
|
||||
page.addToken(0x09, "Attendee_Email")
|
||||
page.addToken(0x0A, "Attendee_Name")
|
||||
page.addToken(0x0D, "BusyStatus")
|
||||
page.addToken(0x0E, "Categories")
|
||||
page.addToken(0x0F, "Category")
|
||||
page.addToken(0x11, "DTStamp")
|
||||
page.addToken(0x12, "EndTime")
|
||||
page.addToken(0x13, "Exception")
|
||||
page.addToken(0x14, "Exceptions")
|
||||
page.addToken(0x15, "Exception_Deleted")
|
||||
page.addToken(0x16, "Exception_StartTime")
|
||||
page.addToken(0x17, "Location")
|
||||
page.addToken(0x18, "MeetingStatus")
|
||||
page.addToken(0x19, "Organizer_Email")
|
||||
page.addToken(0x1A, "Organizer_Name")
|
||||
page.addToken(0x1B, "Recurrence")
|
||||
page.addToken(0x1C, "Recurrence_Type")
|
||||
page.addToken(0x1D, "Recurrence_Until")
|
||||
page.addToken(0x1E, "Recurrence_Occurrences")
|
||||
page.addToken(0x1F, "Recurrence_Interval")
|
||||
page.addToken(0x20, "Recurrence_DayOfWeek")
|
||||
page.addToken(0x21, "Recurrence_DayOfMonth")
|
||||
page.addToken(0x22, "Recurrence_WeekOfMonth")
|
||||
page.addToken(0x23, "Recurrence_MonthOfYear")
|
||||
page.addToken(0x24, "Reminder")
|
||||
page.addToken(0x25, "Sensitivity")
|
||||
page.addToken(0x26, "Subject")
|
||||
page.addToken(0x27, "StartTime")
|
||||
page.addToken(0x28, "UID")
|
||||
page.addToken(0x29, "Attendee_Status")
|
||||
page.addToken(0x2A, "Attendee_Type")
|
||||
page.addToken(0x33, "DisallowNewTimeProposal")
|
||||
page.addToken(0x34, "ResponseRequested")
|
||||
page.addToken(0x35, "AppointmentReplyTime")
|
||||
page.addToken(0x36, "ResponseType")
|
||||
page.addToken(0x37, "CalendarType")
|
||||
page.addToken(0x38, "IsLeapMonth")
|
||||
page.addToken(0x39, "FirstDayOfWeek")
|
||||
page.addToken(0x3A, "OnlineMeetingConfLink")
|
||||
page.addToken(0x3B, "OnlineMeetingExternalLink")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 5: Move
|
||||
# region Move Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "Move:"
|
||||
page.xmlns = "move"
|
||||
|
||||
page.addToken(0x05, "MoveItems")
|
||||
page.addToken(0x06, "Move")
|
||||
page.addToken(0x07, "SrcMsgId")
|
||||
page.addToken(0x08, "SrcFldId")
|
||||
page.addToken(0x09, "DstFldId")
|
||||
page.addToken(0x0A, "Response")
|
||||
page.addToken(0x0B, "Status")
|
||||
page.addToken(0x0C, "DstMsgId")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 6: ItemEstimate
|
||||
# region ItemEstimate Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "GetItemEstimate:"
|
||||
page.xmlns = "getitemestimate"
|
||||
|
||||
page.addToken(0x05, "GetItemEstimate")
|
||||
page.addToken(0x06, "Version")
|
||||
page.addToken(0x07, "Collections")
|
||||
page.addToken(0x08, "Collection")
|
||||
page.addToken(0x09, "Class")
|
||||
page.addToken(0x0A, "CollectionId")
|
||||
page.addToken(0x0B, "DateTime")
|
||||
page.addToken(0x0C, "Estimate")
|
||||
page.addToken(0x0D, "Response")
|
||||
page.addToken(0x0E, "Status")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 7: FolderHierarchy
|
||||
# region FolderHierarchy Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "FolderHierarchy:"
|
||||
page.xmlns = "folderhierarchy"
|
||||
|
||||
page.addToken(0x07, "DisplayName")
|
||||
page.addToken(0x08, "ServerId")
|
||||
page.addToken(0x09, "ParentId")
|
||||
page.addToken(0x0A, "Type")
|
||||
page.addToken(0x0C, "Status")
|
||||
page.addToken(0x0E, "Changes")
|
||||
page.addToken(0x0F, "Add")
|
||||
page.addToken(0x10, "Delete")
|
||||
page.addToken(0x11, "Update")
|
||||
page.addToken(0x12, "SyncKey")
|
||||
page.addToken(0x13, "FolderCreate")
|
||||
page.addToken(0x14, "FolderDelete")
|
||||
page.addToken(0x15, "FolderUpdate")
|
||||
page.addToken(0x16, "FolderSync")
|
||||
page.addToken(0x17, "Count")
|
||||
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 8: MeetingResponse
|
||||
# region MeetingResponse Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "MeetingResponse:"
|
||||
page.xmlns = "meetingresponse"
|
||||
|
||||
page.addToken(0x05, "CalendarId")
|
||||
page.addToken(0x06, "CollectionId")
|
||||
page.addToken(0x07, "MeetingResponse")
|
||||
page.addToken(0x08, "RequestId")
|
||||
page.addToken(0x09, "Request")
|
||||
page.addToken(0x0A, "Result")
|
||||
page.addToken(0x0B, "Status")
|
||||
page.addToken(0x0C, "UserResponse")
|
||||
page.addToken(0x0E, "InstanceId")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 9: Tasks
|
||||
# region Tasks Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "Tasks:"
|
||||
page.xmlns = "tasks"
|
||||
|
||||
page.addToken(0x08, "Categories")
|
||||
page.addToken(0x09, "Category")
|
||||
page.addToken(0x0A, "Complete")
|
||||
page.addToken(0x0B, "DateCompleted")
|
||||
page.addToken(0x0C, "DueDate")
|
||||
page.addToken(0x0D, "UTCDueDate")
|
||||
page.addToken(0x0E, "Importance")
|
||||
page.addToken(0x0F, "Recurrence")
|
||||
page.addToken(0x10, "Recurrence_Type")
|
||||
page.addToken(0x11, "Recurrence_Start")
|
||||
page.addToken(0x12, "Recurrence_Until")
|
||||
page.addToken(0x13, "Recurrence_Occurrences")
|
||||
page.addToken(0x14, "Recurrence_Interval")
|
||||
page.addToken(0x15, "Recurrence_DayOfMonth")
|
||||
page.addToken(0x16, "Recurrence_DayOfWeek")
|
||||
page.addToken(0x17, "Recurrence_WeekOfMonth")
|
||||
page.addToken(0x18, "Recurrence_MonthOfYear")
|
||||
page.addToken(0x19, "Recurrence_Regenerate")
|
||||
page.addToken(0x1A, "Recurrence_DeadOccur")
|
||||
page.addToken(0x1B, "ReminderSet")
|
||||
page.addToken(0x1C, "ReminderTime")
|
||||
page.addToken(0x1D, "Sensitivity")
|
||||
page.addToken(0x1E, "StartDate")
|
||||
page.addToken(0x1F, "UTCStartDate")
|
||||
page.addToken(0x20, "Subject")
|
||||
page.addToken(0x22, "OrdinalDate")
|
||||
page.addToken(0x23, "SubOrdinalDate")
|
||||
page.addToken(0x24, "CalendarType")
|
||||
page.addToken(0x25, "IsLeapMonth")
|
||||
page.addToken(0x26, "FirstDayOfWeek")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 10: ResolveRecipients
|
||||
# region ResolveRecipients Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "ResolveRecipients:"
|
||||
page.xmlns = "resolverecipients"
|
||||
|
||||
page.addToken(0x05, "ResolveRecipients")
|
||||
page.addToken(0x06, "Response")
|
||||
page.addToken(0x07, "Status")
|
||||
page.addToken(0x08, "Type")
|
||||
page.addToken(0x09, "Recipient")
|
||||
page.addToken(0x0A, "DisplayName")
|
||||
page.addToken(0x0B, "EmailAddress")
|
||||
page.addToken(0x0C, "Certificates")
|
||||
page.addToken(0x0D, "Certificate")
|
||||
page.addToken(0x0E, "MiniCertificate")
|
||||
page.addToken(0x0F, "Options")
|
||||
page.addToken(0x10, "To")
|
||||
page.addToken(0x11, "CertificateRetrieval")
|
||||
page.addToken(0x12, "RecipientCount")
|
||||
page.addToken(0x13, "MaxCertificates")
|
||||
page.addToken(0x14, "MaxAmbiguousRecipients")
|
||||
page.addToken(0x15, "CertificateCount")
|
||||
page.addToken(0x16, "Availability")
|
||||
page.addToken(0x17, "StartTime")
|
||||
page.addToken(0x18, "EndTime")
|
||||
page.addToken(0x19, "MergedFreeBusy")
|
||||
page.addToken(0x1A, "Picture")
|
||||
page.addToken(0x1B, "MaxSize")
|
||||
page.addToken(0x1C, "Data")
|
||||
page.addToken(0x1D, "MaxPictures")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 11: ValidateCert
|
||||
# region ValidateCert Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "ValidateCert:"
|
||||
page.xmlns = "validatecert"
|
||||
|
||||
page.addToken(0x05, "ValidateCert")
|
||||
page.addToken(0x06, "Certificates")
|
||||
page.addToken(0x07, "Certificate")
|
||||
page.addToken(0x08, "CertificateChain")
|
||||
page.addToken(0x09, "CheckCRL")
|
||||
page.addToken(0x0A, "Status")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 12: Contacts2
|
||||
# region Contacts2 Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "Contacts2:"
|
||||
page.xmlns = "contacts2"
|
||||
|
||||
page.addToken(0x05, "CustomerId")
|
||||
page.addToken(0x06, "GovernmentId")
|
||||
page.addToken(0x07, "IMAddress")
|
||||
page.addToken(0x08, "IMAddress2")
|
||||
page.addToken(0x09, "IMAddress3")
|
||||
page.addToken(0x0A, "ManagerName")
|
||||
page.addToken(0x0B, "CompanyMainPhone")
|
||||
page.addToken(0x0C, "AccountName")
|
||||
page.addToken(0x0D, "NickName")
|
||||
page.addToken(0x0E, "MMS")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 13: Ping
|
||||
# region Ping Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "Ping:"
|
||||
page.xmlns = "ping"
|
||||
|
||||
page.addToken(0x05, "Ping")
|
||||
page.addToken(0x06, "AutdState") # Per MS-ASWBXML, this tag is not used by protocol
|
||||
page.addToken(0x07, "Status")
|
||||
page.addToken(0x08, "HeartbeatInterval")
|
||||
page.addToken(0x09, "Folders")
|
||||
page.addToken(0x0A, "Folder")
|
||||
page.addToken(0x0B, "Id")
|
||||
page.addToken(0x0C, "Class")
|
||||
page.addToken(0x0D, "MaxFolders")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 14: Provision
|
||||
# region Provision Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "Provision:"
|
||||
page.xmlns = "provision"
|
||||
|
||||
page.addToken(0x05, "Provision")
|
||||
page.addToken(0x06, "Policies")
|
||||
page.addToken(0x07, "Policy")
|
||||
page.addToken(0x08, "PolicyType")
|
||||
page.addToken(0x09, "PolicyKey")
|
||||
page.addToken(0x0A, "Data")
|
||||
page.addToken(0x0B, "Status")
|
||||
page.addToken(0x0C, "RemoteWipe")
|
||||
page.addToken(0x0D, "EASProvisionDoc")
|
||||
page.addToken(0x0E, "DevicePasswordEnabled")
|
||||
page.addToken(0x0F, "AlphanumericDevicePasswordRequired")
|
||||
page.addToken(0x10, "RequireStorageCardEncryption")
|
||||
page.addToken(0x11, "PasswordRecoveryEnabled")
|
||||
page.addToken(0x13, "AttachmentsEnabled")
|
||||
page.addToken(0x14, "MinDevicePasswordLength")
|
||||
page.addToken(0x15, "MaxInactivityTimeDeviceLock")
|
||||
page.addToken(0x16, "MaxDevicePasswordFailedAttempts")
|
||||
page.addToken(0x17, "MaxAttachmentSize")
|
||||
page.addToken(0x18, "AllowSimpleDevicePassword")
|
||||
page.addToken(0x19, "DevicePasswordExpiration")
|
||||
page.addToken(0x1A, "DevicePasswordHistory")
|
||||
page.addToken(0x1B, "AllowStorageCard")
|
||||
page.addToken(0x1C, "AllowCamera")
|
||||
page.addToken(0x1D, "RequireDeviceEncryption")
|
||||
page.addToken(0x1E, "AllowUnsignedApplications")
|
||||
page.addToken(0x1F, "AllowUnsignedInstallationPackages")
|
||||
page.addToken(0x20, "MinDevicePasswordComplexCharacters")
|
||||
page.addToken(0x21, "AllowWiFi")
|
||||
page.addToken(0x22, "AllowTextMessaging")
|
||||
page.addToken(0x23, "AllowPOPIMAPEmail")
|
||||
page.addToken(0x24, "AllowBluetooth")
|
||||
page.addToken(0x25, "AllowIrDA")
|
||||
page.addToken(0x26, "RequireManualSyncWhenRoaming")
|
||||
page.addToken(0x27, "AllowDesktopSync")
|
||||
page.addToken(0x28, "MaxCalendarAgeFilter")
|
||||
page.addToken(0x29, "AllowHTMLEmail")
|
||||
page.addToken(0x2A, "MaxEmailAgeFilter")
|
||||
page.addToken(0x2B, "MaxEmailBodyTruncationSize")
|
||||
page.addToken(0x2C, "MaxEmailHTMLBodyTruncationSize")
|
||||
page.addToken(0x2D, "RequireSignedSMIMEMessages")
|
||||
page.addToken(0x2E, "RequireEncryptedSMIMEMessages")
|
||||
page.addToken(0x2F, "RequireSignedSMIMEAlgorithm")
|
||||
page.addToken(0x30, "RequireEncryptionSMIMEAlgorithm")
|
||||
page.addToken(0x31, "AllowSMIMEEncryptionAlgorithmNegotiation")
|
||||
page.addToken(0x32, "AllowSMIMESoftCerts")
|
||||
page.addToken(0x33, "AllowBrowser")
|
||||
page.addToken(0x34, "AllowConsumerEmail")
|
||||
page.addToken(0x35, "AllowRemoteDesktop")
|
||||
page.addToken(0x36, "AllowInternetSharing")
|
||||
page.addToken(0x37, "UnapprovedInROMApplicationList")
|
||||
page.addToken(0x38, "ApplicationName")
|
||||
page.addToken(0x39, "ApprovedApplicationList")
|
||||
page.addToken(0x3A, "Hash")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 15: Search
|
||||
# region Search Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "Search:"
|
||||
page.xmlns = "search"
|
||||
|
||||
page.addToken(0x05, "Search")
|
||||
page.addToken(0x07, "Store")
|
||||
page.addToken(0x08, "Name")
|
||||
page.addToken(0x09, "Query")
|
||||
page.addToken(0x0A, "Options")
|
||||
page.addToken(0x0B, "Range")
|
||||
page.addToken(0x0C, "Status")
|
||||
page.addToken(0x0D, "Response")
|
||||
page.addToken(0x0E, "Result")
|
||||
page.addToken(0x0F, "Properties")
|
||||
page.addToken(0x10, "Total")
|
||||
page.addToken(0x11, "EqualTo")
|
||||
page.addToken(0x12, "Value")
|
||||
page.addToken(0x13, "And")
|
||||
page.addToken(0x14, "Or")
|
||||
page.addToken(0x15, "FreeText")
|
||||
page.addToken(0x17, "DeepTraversal")
|
||||
page.addToken(0x18, "LongId")
|
||||
page.addToken(0x19, "RebuildResults")
|
||||
page.addToken(0x1A, "LessThan")
|
||||
page.addToken(0x1B, "GreaterThan")
|
||||
page.addToken(0x1E, "UserName")
|
||||
page.addToken(0x1F, "Password")
|
||||
page.addToken(0x20, "ConversationId")
|
||||
page.addToken(0x21, "Picture")
|
||||
page.addToken(0x22, "MaxSize")
|
||||
page.addToken(0x23, "MaxPictures")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 16: GAL
|
||||
# region GAL Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "GAL:"
|
||||
page.xmlns = "gal"
|
||||
|
||||
page.addToken(0x05, "DisplayName")
|
||||
page.addToken(0x06, "Phone")
|
||||
page.addToken(0x07, "Office")
|
||||
page.addToken(0x08, "Title")
|
||||
page.addToken(0x09, "Company")
|
||||
page.addToken(0x0A, "Alias")
|
||||
page.addToken(0x0B, "FirstName")
|
||||
page.addToken(0x0C, "LastName")
|
||||
page.addToken(0x0D, "HomePhone")
|
||||
page.addToken(0x0E, "MobilePhone")
|
||||
page.addToken(0x0F, "EmailAddress")
|
||||
page.addToken(0x10, "Picture")
|
||||
page.addToken(0x11, "Status")
|
||||
page.addToken(0x12, "Data")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 17: AirSyncBase
|
||||
# region AirSyncBase Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "AirSyncBase:"
|
||||
page.xmlns = "airsyncbase"
|
||||
|
||||
page.addToken(0x05, "BodyPreference")
|
||||
page.addToken(0x06, "Type")
|
||||
page.addToken(0x07, "TruncationSize")
|
||||
page.addToken(0x08, "AllOrNone")
|
||||
page.addToken(0x0A, "Body")
|
||||
page.addToken(0x0B, "Data")
|
||||
page.addToken(0x0C, "EstimatedDataSize")
|
||||
page.addToken(0x0D, "Truncated")
|
||||
page.addToken(0x0E, "Attachments")
|
||||
page.addToken(0x0F, "Attachment")
|
||||
page.addToken(0x10, "DisplayName")
|
||||
page.addToken(0x11, "FileReference")
|
||||
page.addToken(0x12, "Method")
|
||||
page.addToken(0x13, "ContentId")
|
||||
page.addToken(0x14, "ContentLocation")
|
||||
page.addToken(0x15, "IsInline")
|
||||
page.addToken(0x16, "NativeBodyType")
|
||||
page.addToken(0x17, "ContentType")
|
||||
page.addToken(0x18, "Preview")
|
||||
page.addToken(0x19, "BodyPartPreference")
|
||||
page.addToken(0x1A, "BodyPart")
|
||||
page.addToken(0x1B, "Status")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 18: Settings
|
||||
# region Settings Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "Settings:"
|
||||
page.xmlns = "settings"
|
||||
|
||||
page.addToken(0x05, "Settings")
|
||||
page.addToken(0x06, "Status")
|
||||
page.addToken(0x07, "Get")
|
||||
page.addToken(0x08, "Set")
|
||||
page.addToken(0x09, "Oof")
|
||||
page.addToken(0x0A, "OofState")
|
||||
page.addToken(0x0B, "StartTime")
|
||||
page.addToken(0x0C, "EndTime")
|
||||
page.addToken(0x0D, "OofMessage")
|
||||
page.addToken(0x0E, "AppliesToInternal")
|
||||
page.addToken(0x0F, "AppliesToExternalKnown")
|
||||
page.addToken(0x10, "AppliesToExternalUnknown")
|
||||
page.addToken(0x11, "Enabled")
|
||||
page.addToken(0x12, "ReplyMessage")
|
||||
page.addToken(0x13, "BodyType")
|
||||
page.addToken(0x14, "DevicePassword")
|
||||
page.addToken(0x15, "Password")
|
||||
page.addToken(0x16, "DeviceInformation")
|
||||
page.addToken(0x17, "Model")
|
||||
page.addToken(0x18, "IMEI")
|
||||
page.addToken(0x19, "FriendlyName")
|
||||
page.addToken(0x1A, "OS")
|
||||
page.addToken(0x1B, "OSLanguage")
|
||||
page.addToken(0x1C, "PhoneNumber")
|
||||
page.addToken(0x1D, "UserInformation")
|
||||
page.addToken(0x1E, "EmailAddresses")
|
||||
page.addToken(0x1F, "SmtpAddress")
|
||||
page.addToken(0x20, "UserAgent")
|
||||
page.addToken(0x21, "EnableOutboundSMS")
|
||||
page.addToken(0x22, "MobileOperator")
|
||||
page.addToken(0x23, "PrimarySmtpAddress")
|
||||
page.addToken(0x24, "Accounts")
|
||||
page.addToken(0x25, "Account")
|
||||
page.addToken(0x26, "AccountId")
|
||||
page.addToken(0x27, "AccountName")
|
||||
page.addToken(0x28, "UserDisplayName")
|
||||
page.addToken(0x29, "SendDisabled")
|
||||
page.addToken(0x2B, "RightsManagementInformation")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 19: DocumentLibrary
|
||||
# region DocumentLibrary Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "DocumentLibrary:"
|
||||
page.xmlns = "documentlibrary"
|
||||
|
||||
page.addToken(0x05, "LinkId")
|
||||
page.addToken(0x06, "DisplayName")
|
||||
page.addToken(0x07, "IsFolder")
|
||||
page.addToken(0x08, "CreationDate")
|
||||
page.addToken(0x09, "LastModifiedDate")
|
||||
page.addToken(0x0A, "IsHidden")
|
||||
page.addToken(0x0B, "ContentLength")
|
||||
page.addToken(0x0C, "ContentType")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 20: ItemOperations
|
||||
# region ItemOperations Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "ItemOperations:"
|
||||
page.xmlns = "itemoperations"
|
||||
|
||||
page.addToken(0x05, "ItemOperations")
|
||||
page.addToken(0x06, "Fetch")
|
||||
page.addToken(0x07, "Store")
|
||||
page.addToken(0x08, "Options")
|
||||
page.addToken(0x09, "Range")
|
||||
page.addToken(0x0A, "Total")
|
||||
page.addToken(0x0B, "Properties")
|
||||
page.addToken(0x0C, "Data")
|
||||
page.addToken(0x0D, "Status")
|
||||
page.addToken(0x0E, "Response")
|
||||
page.addToken(0x0F, "Version")
|
||||
page.addToken(0x10, "Schema")
|
||||
page.addToken(0x11, "Part")
|
||||
page.addToken(0x12, "EmptyFolderContents")
|
||||
page.addToken(0x13, "DeleteSubFolders")
|
||||
page.addToken(0x14, "UserName")
|
||||
page.addToken(0x15, "Password")
|
||||
page.addToken(0x16, "Move")
|
||||
page.addToken(0x17, "DstFldId")
|
||||
page.addToken(0x18, "ConversationId")
|
||||
page.addToken(0x19, "MoveAlways")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 21: ComposeMail
|
||||
# region ComposeMail Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "ComposeMail:"
|
||||
page.xmlns = "composemail"
|
||||
|
||||
page.addToken(0x05, "SendMail")
|
||||
page.addToken(0x06, "SmartForward")
|
||||
page.addToken(0x07, "SmartReply")
|
||||
page.addToken(0x08, "SaveInSentItems")
|
||||
page.addToken(0x09, "ReplaceMime")
|
||||
page.addToken(0x0B, "Source")
|
||||
page.addToken(0x0C, "FolderId")
|
||||
page.addToken(0x0D, "ItemId")
|
||||
page.addToken(0x0E, "LongId")
|
||||
page.addToken(0x0F, "InstanceId")
|
||||
page.addToken(0x10, "MIME")
|
||||
page.addToken(0x11, "ClientId")
|
||||
page.addToken(0x12, "Status")
|
||||
page.addToken(0x13, "AccountId")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 22: Email2
|
||||
# region Email2 Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "Email2:"
|
||||
page.xmlns = "email2"
|
||||
|
||||
page.addToken(0x05, "UmCallerID")
|
||||
page.addToken(0x06, "UmUserNotes")
|
||||
page.addToken(0x07, "UmAttDuration")
|
||||
page.addToken(0x08, "UmAttOrder")
|
||||
page.addToken(0x09, "ConversationId")
|
||||
page.addToken(0x0A, "ConversationIndex")
|
||||
page.addToken(0x0B, "LastVerbExecuted")
|
||||
page.addToken(0x0C, "LastVerbExecutionTime")
|
||||
page.addToken(0x0D, "ReceivedAsBcc")
|
||||
page.addToken(0x0E, "Sender")
|
||||
page.addToken(0x0F, "CalendarType")
|
||||
page.addToken(0x10, "IsLeapMonth")
|
||||
page.addToken(0x11, "AccountId")
|
||||
page.addToken(0x12, "FirstDayOfWeek")
|
||||
page.addToken(0x13, "MeetingMessageType")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 23: Notes
|
||||
# region Notes Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "Notes:"
|
||||
page.xmlns = "notes"
|
||||
|
||||
page.addToken(0x05, "Subject")
|
||||
page.addToken(0x06, "MessageClass")
|
||||
page.addToken(0x07, "LastModifiedDate")
|
||||
page.addToken(0x08, "Categories")
|
||||
page.addToken(0x09, "Category")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
|
||||
# Code Page 24: RightsManagement
|
||||
# region RightsManagement Code Page
|
||||
page = ASWBXMLCodePage()
|
||||
page.namespace = "RightsManagement:"
|
||||
page.xmlns = "rightsmanagement"
|
||||
|
||||
page.addToken(0x05, "RightsManagementSupport")
|
||||
page.addToken(0x06, "RightsManagementTemplates")
|
||||
page.addToken(0x07, "RightsManagementTemplate")
|
||||
page.addToken(0x08, "RightsManagementLicense")
|
||||
page.addToken(0x09, "EditAllowed")
|
||||
page.addToken(0x0A, "ReplyAllowed")
|
||||
page.addToken(0x0B, "ReplyAllAllowed")
|
||||
page.addToken(0x0C, "ForwardAllowed")
|
||||
page.addToken(0x0D, "ModifyRecipientsAllowed")
|
||||
page.addToken(0x0E, "ExtractAllowed")
|
||||
page.addToken(0x0F, "PrintAllowed")
|
||||
page.addToken(0x10, "ExportAllowed")
|
||||
page.addToken(0x11, "ProgrammaticAccessAllowed")
|
||||
page.addToken(0x12, "RMOwner")
|
||||
page.addToken(0x13, "ContentExpiryDate")
|
||||
page.addToken(0x14, "TemplateID")
|
||||
page.addToken(0x15, "TemplateName")
|
||||
page.addToken(0x16, "TemplateDescription")
|
||||
page.addToken(0x17, "ContentOwner")
|
||||
page.addToken(0x18, "RemoveRightsManagementDistribution")
|
||||
self.codePages.append(page)
|
||||
# endregion
|
||||
# endregion
|
||||
|
||||
def loadXml(self, strXML):
|
||||
# note xmlDoc has .childNodes and .parentNode
|
||||
self.xmlDoc = xml.dom.minidom.parseString(strXML)
|
||||
|
||||
def getXml(self):
|
||||
if (self.xmlDoc != None):
|
||||
try:
|
||||
return self.xmlDoc.toprettyxml(indent=" ", newl="\n")
|
||||
except:
|
||||
return self.xmlDoc.toxml()
|
||||
|
||||
def loadBytes(self, byteWBXML):
|
||||
|
||||
currentNode = self.xmlDoc
|
||||
|
||||
wbXMLBytes = ASWBXMLByteQueue(byteWBXML)
|
||||
# Version is ignored
|
||||
version = wbXMLBytes.dequeueAndLog()
|
||||
|
||||
# Public Identifier is ignored
|
||||
publicId = wbXMLBytes.dequeueMultibyteInt()
|
||||
|
||||
logging.debug("Version: %d, Public Identifier: %d" % (version, publicId))
|
||||
|
||||
# Character set
|
||||
# Currently only UTF-8 is supported, throw if something else
|
||||
charset = wbXMLBytes.dequeueMultibyteInt()
|
||||
if (charset != 0x6A):
|
||||
raise InvalidDataException("ASWBXML only supports UTF-8 encoded XML.")
|
||||
|
||||
# String table length
|
||||
# This should be 0, MS-ASWBXML does not use string tables
|
||||
stringTableLength = wbXMLBytes.dequeueMultibyteInt()
|
||||
if (stringTableLength != 0):
|
||||
raise InvalidDataException("WBXML data contains a string table.")
|
||||
|
||||
# Now we should be at the body of the data.
|
||||
# Add the declaration
|
||||
unusedArray = [GlobalTokens.ENTITY, GlobalTokens.EXT_0, GlobalTokens.EXT_1, GlobalTokens.EXT_2, GlobalTokens.EXT_I_0, GlobalTokens.EXT_I_1, GlobalTokens.EXT_I_2, GlobalTokens.EXT_T_0, GlobalTokens.EXT_T_1, GlobalTokens.EXT_T_2, GlobalTokens.LITERAL, GlobalTokens.LITERAL_A, GlobalTokens.LITERAL_AC, GlobalTokens.LITERAL_C, GlobalTokens.PI, GlobalTokens.STR_T]
|
||||
|
||||
while ( wbXMLBytes.qsize() > 0):
|
||||
currentByte = wbXMLBytes.dequeueAndLog()
|
||||
if ( currentByte == GlobalTokens.SWITCH_PAGE ):
|
||||
newCodePage = wbXMLBytes.dequeueAndLog()
|
||||
if (newCodePage >= 0 and newCodePage < 25):
|
||||
self.currentCodePage = newCodePage
|
||||
else:
|
||||
raise InvalidDataException("Unknown code page ID 0x{0:X} encountered in WBXML".format(currentByte))
|
||||
elif ( currentByte == GlobalTokens.END ):
|
||||
if (currentNode != None and currentNode.parentNode != None):
|
||||
currentNode = currentNode.parentNode
|
||||
else:
|
||||
raise InvalidDataException("END global token encountered out of sequence")
|
||||
break
|
||||
elif ( currentByte == GlobalTokens.OPAQUE ):
|
||||
CDATALength = wbXMLBytes.dequeueMultibyteInt()
|
||||
newOpaqueNode = self.xmlDoc.createCDATASection(wbXMLBytes.dequeueString(CDATALength))
|
||||
currentNode.appendChild(newOpaqueNode)
|
||||
|
||||
elif ( currentByte == GlobalTokens.STR_I ):
|
||||
newTextNode = self.xmlDoc.createTextNode(wbXMLBytes.dequeueString())
|
||||
currentNode.appendChild(newTextNode)
|
||||
|
||||
elif ( currentByte in unusedArray):
|
||||
raise InvalidDataException("Encountered unknown global token 0x{0:X}.".format(currentByte))
|
||||
else:
|
||||
hasAttributes = (currentByte & 0x80) > 0
|
||||
hasContent = (currentByte & 0x40) > 0
|
||||
|
||||
token = currentByte & 0x3F
|
||||
if (hasAttributes):
|
||||
raise InvalidDataException("Token 0x{0:X} has attributes.".format(token))
|
||||
|
||||
strTag = self.codePages[self.currentCodePage].getTag(token)
|
||||
if (strTag == None):
|
||||
strTag = "UNKNOWN_TAG_{0,2:X}".format(token)
|
||||
|
||||
newNode = self.xmlDoc.createElement(strTag)
|
||||
# not sure if this should be set on every node or not
|
||||
#newNode.setAttribute("xmlns", self.codePages[self.currentCodePage].xmlns)
|
||||
|
||||
currentNode.appendChild(newNode)
|
||||
|
||||
if (hasContent):
|
||||
currentNode = newNode
|
||||
|
||||
logging.debug("Total bytes dequeued: %d" % wbXMLBytes.bytesDequeued)
|
||||
@@ -0,0 +1,102 @@
|
||||
#!/usr/bin/env python3
|
||||
'''
|
||||
@author: David Shaw, shawd@vmware.com
|
||||
|
||||
Inspired by EAS Inspector for Fiddler
|
||||
https://easinspectorforfiddler.codeplex.com
|
||||
|
||||
----- The MIT License (MIT) -----
|
||||
Filename: ASWBXMLByteQueue.py
|
||||
Copyright (c) 2014, David P. Shaw
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
'''
|
||||
from queue import Queue
|
||||
import logging
|
||||
|
||||
class ASWBXMLByteQueue(Queue):
|
||||
|
||||
def __init__(self, wbxmlBytes):
|
||||
|
||||
self.bytesDequeued = 0
|
||||
self.bytesEnqueued = 0
|
||||
|
||||
Queue.__init__(self)
|
||||
|
||||
for byte in wbxmlBytes:
|
||||
self.put(byte)
|
||||
self.bytesEnqueued += 1
|
||||
|
||||
|
||||
logging.debug("Array byte count: %d, enqueued: %d" % (self.qsize(), self.bytesEnqueued))
|
||||
|
||||
"""
|
||||
Created to debug the dequeueing of bytes
|
||||
"""
|
||||
def dequeueAndLog(self):
|
||||
singleByte = self.get()
|
||||
self.bytesDequeued += 1
|
||||
logging.debug("Dequeued byte 0x{0:X} ({1} total)".format(singleByte, self.bytesDequeued))
|
||||
return singleByte
|
||||
|
||||
"""
|
||||
Return true if the continuation bit is set in the byte
|
||||
"""
|
||||
def checkContinuationBit(self, byteval):
|
||||
continuationBitmask = 0x80
|
||||
return (continuationBitmask & byteval) != 0
|
||||
|
||||
def dequeueMultibyteInt(self):
|
||||
iReturn = 0
|
||||
singleByte = 0xFF
|
||||
|
||||
while True:
|
||||
iReturn <<= 7
|
||||
if (self.qsize() == 0):
|
||||
break
|
||||
else:
|
||||
singleByte = self.dequeueAndLog()
|
||||
iReturn += int(singleByte & 0x7F)
|
||||
if not self.checkContinuationBit(singleByte):
|
||||
return iReturn
|
||||
|
||||
def dequeueString(self, length=None):
|
||||
if ( length != None):
|
||||
currentByte = 0x00
|
||||
strReturn = ""
|
||||
for i in range(0, length):
|
||||
# TODO: Improve this handling. We are technically UTF-8, meaning
|
||||
# that characters could be more than one byte long. This will fail if we have
|
||||
# characters outside of the US-ASCII range
|
||||
if ( self.qsize() == 0 ):
|
||||
break
|
||||
currentByte = self.dequeueAndLog()
|
||||
strReturn += chr(currentByte)
|
||||
|
||||
else:
|
||||
currentByte = 0x00
|
||||
strReturn = ""
|
||||
while True:
|
||||
currentByte = self.dequeueAndLog()
|
||||
if (currentByte != 0x00):
|
||||
strReturn += chr(currentByte)
|
||||
else:
|
||||
break
|
||||
|
||||
return strReturn
|
||||
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env python3
|
||||
'''
|
||||
@author: David Shaw, shawd@vmware.com
|
||||
|
||||
Inspired by EAS Inspector for Fiddler
|
||||
https://easinspectorforfiddler.codeplex.com
|
||||
|
||||
----- The MIT License (MIT) -----
|
||||
Filename: ASWBXMLCodePage.py
|
||||
Copyright (c) 2014, David P. Shaw
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
'''
|
||||
class ASWBXMLCodePage:
|
||||
def __init__(self):
|
||||
self.namespace = ""
|
||||
self.xmlns = ""
|
||||
self.tokenLookup = {}
|
||||
self.tagLookup = {}
|
||||
|
||||
def addToken(self, token, tag):
|
||||
self.tokenLookup[token] = tag
|
||||
self.tagLookup[tag] = token
|
||||
|
||||
def getToken(self, tag):
|
||||
if tag in self.tagLookup:
|
||||
return self.tagLookup[tag]
|
||||
return 0xFF
|
||||
|
||||
def getTag(self, token):
|
||||
if token in self.tokenLookup:
|
||||
return self.tokenLookup[token]
|
||||
return None
|
||||
|
||||
def __repr__(self):
|
||||
return str(self.tokenLookup)
|
||||
@@ -0,0 +1,50 @@
|
||||
#!/usr/bin/env python3
|
||||
'''
|
||||
@author: David Shaw, shawd@vmware.com
|
||||
|
||||
Inspired by EAS Inspector for Fiddler
|
||||
https://easinspectorforfiddler.codeplex.com
|
||||
|
||||
----- The MIT License (MIT) -----
|
||||
Filename: GlobalTokens.py
|
||||
Copyright (c) 2014, David P. Shaw
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
'''
|
||||
class GlobalTokens:
|
||||
SWITCH_PAGE = 0x00
|
||||
END = 0x01
|
||||
ENTITY = 0x02
|
||||
STR_I = 0x03
|
||||
LITERAL = 0x04
|
||||
EXT_I_0 = 0x40
|
||||
EXT_I_1 = 0x41
|
||||
EXT_I_2 = 0x42
|
||||
PI = 0x43
|
||||
LITERAL_C = 0x44
|
||||
EXT_T_0 = 0x80
|
||||
EXT_T_1 = 0x81
|
||||
EXT_T_2 = 0x82
|
||||
STR_T = 0x83
|
||||
LITERAL_A = 0x84
|
||||
EXT_0 = 0xC0
|
||||
EXT_1 = 0xC1
|
||||
EXT_2 = 0xC2
|
||||
OPAQUE = 0xC3
|
||||
LITERAL_AC = 0xC4
|
||||
@@ -0,0 +1,31 @@
|
||||
#!/usr/bin/env python3
|
||||
'''
|
||||
@author: David Shaw, shawd@vmware.com
|
||||
|
||||
Inspired by EAS Inspector for Fiddler
|
||||
https://easinspectorforfiddler.codeplex.com
|
||||
|
||||
----- The MIT License (MIT) -----
|
||||
Filename: InvalidDataException.py
|
||||
Copyright (c) 2014, David P. Shaw
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
'''
|
||||
class InvalidDataException(Exception):
|
||||
pass
|
||||
Reference in New Issue
Block a user