2025-12-25 upload

This commit is contained in:
“shengyudong”
2025-12-25 11:16:59 +08:00
commit 322ac74336
2241 changed files with 639966 additions and 0 deletions

View File

@@ -0,0 +1,3 @@
from mitmproxy.tools.web import master
__all__ = ["master"]

View File

@@ -0,0 +1,930 @@
from __future__ import annotations
import asyncio
import functools
import hashlib
import json
import logging
import os.path
import re
import secrets
import sys
from collections.abc import Callable
from collections.abc import Sequence
from io import BytesIO
from typing import Any
from typing import Awaitable
from typing import ClassVar
from typing import Concatenate
from typing import Literal
from typing import Optional
import tornado.escape
import tornado.web
import tornado.websocket
import mitmproxy.flow
import mitmproxy.tools.web.master
import mitmproxy_rs
from mitmproxy import certs
from mitmproxy import command
from mitmproxy import contentviews
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy import io
from mitmproxy import log
from mitmproxy import optmanager
from mitmproxy import version
from mitmproxy.dns import DNSFlow
from mitmproxy.http import HTTPFlow
from mitmproxy.tcp import TCPFlow
from mitmproxy.tcp import TCPMessage
from mitmproxy.tools.web.webaddons import WebAuth
from mitmproxy.udp import UDPFlow
from mitmproxy.udp import UDPMessage
from mitmproxy.utils import asyncio_utils
from mitmproxy.utils.emoji import emoji
from mitmproxy.utils.strutils import always_str
from mitmproxy.utils.strutils import cut_after_n_lines
from mitmproxy.websocket import WebSocketMessage
TRANSPARENT_PNG = (
b"\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x01\x00\x00\x00\x01\x08"
b"\x04\x00\x00\x00\xb5\x1c\x0c\x02\x00\x00\x00\x0bIDATx\xdac\xfc\xff\x07"
b"\x00\x02\x00\x01\xfc\xa8Q\rh\x00\x00\x00\x00IEND\xaeB`\x82"
)
logger = logging.getLogger(__name__)
def cert_to_json(certs: Sequence[certs.Cert]) -> dict | None:
if not certs:
return None
cert = certs[0]
return {
"keyinfo": cert.keyinfo,
"sha256": cert.fingerprint().hex(),
"notbefore": int(cert.notbefore.timestamp()),
"notafter": int(cert.notafter.timestamp()),
"serial": str(cert.serial),
"subject": cert.subject,
"issuer": cert.issuer,
"altnames": [str(x.value) for x in cert.altnames],
}
def flow_to_json(flow: mitmproxy.flow.Flow) -> dict:
"""
Remove flow message content and cert to save transmission space.
Args:
flow: The original flow.
Sync with web/src/flow.ts.
"""
f = {
"id": flow.id,
"intercepted": flow.intercepted,
"is_replay": flow.is_replay,
"type": flow.type,
"modified": flow.modified(),
"marked": emoji.get(flow.marked, "🔴") if flow.marked else "",
"comment": flow.comment,
"timestamp_created": flow.timestamp_created,
}
if flow.client_conn:
f["client_conn"] = {
"id": flow.client_conn.id,
"peername": flow.client_conn.peername,
"sockname": flow.client_conn.sockname,
"tls_established": flow.client_conn.tls_established,
"cert": cert_to_json(flow.client_conn.certificate_list),
"sni": flow.client_conn.sni,
"cipher": flow.client_conn.cipher,
"alpn": always_str(flow.client_conn.alpn, "ascii", "backslashreplace"),
"tls_version": flow.client_conn.tls_version,
"timestamp_start": flow.client_conn.timestamp_start,
"timestamp_tls_setup": flow.client_conn.timestamp_tls_setup,
"timestamp_end": flow.client_conn.timestamp_end,
}
if flow.server_conn:
f["server_conn"] = {
"id": flow.server_conn.id,
"peername": flow.server_conn.peername,
"sockname": flow.server_conn.sockname,
"address": flow.server_conn.address,
"tls_established": flow.server_conn.tls_established,
"cert": cert_to_json(flow.server_conn.certificate_list),
"sni": flow.server_conn.sni,
"cipher": flow.server_conn.cipher,
"alpn": always_str(flow.server_conn.alpn, "ascii", "backslashreplace"),
"tls_version": flow.server_conn.tls_version,
"timestamp_start": flow.server_conn.timestamp_start,
"timestamp_tcp_setup": flow.server_conn.timestamp_tcp_setup,
"timestamp_tls_setup": flow.server_conn.timestamp_tls_setup,
"timestamp_end": flow.server_conn.timestamp_end,
}
if flow.error:
f["error"] = flow.error.get_state()
if isinstance(flow, HTTPFlow):
content_length: int | None
content_hash: str | None
if flow.request.raw_content is not None:
content_length = len(flow.request.raw_content)
content_hash = hashlib.sha256(flow.request.raw_content).hexdigest()
else:
content_length = None
content_hash = None
f["request"] = {
"method": flow.request.method,
"scheme": flow.request.scheme,
"host": flow.request.host,
"port": flow.request.port,
"path": flow.request.path,
"http_version": flow.request.http_version,
"headers": tuple(flow.request.headers.items(True)),
"contentLength": content_length,
"contentHash": content_hash,
"timestamp_start": flow.request.timestamp_start,
"timestamp_end": flow.request.timestamp_end,
"pretty_host": flow.request.pretty_host,
}
if flow.response:
if flow.response.raw_content is not None:
content_length = len(flow.response.raw_content)
content_hash = hashlib.sha256(flow.response.raw_content).hexdigest()
else:
content_length = None
content_hash = None
f["response"] = {
"http_version": flow.response.http_version,
"status_code": flow.response.status_code,
"reason": flow.response.reason,
"headers": tuple(flow.response.headers.items(True)),
"contentLength": content_length,
"contentHash": content_hash,
"timestamp_start": flow.response.timestamp_start,
"timestamp_end": flow.response.timestamp_end,
}
if flow.response.data.trailers:
f["response"]["trailers"] = tuple(
flow.response.data.trailers.items(True)
)
if flow.websocket:
f["websocket"] = {
"messages_meta": {
"contentLength": sum(
len(x.content) for x in flow.websocket.messages
),
"count": len(flow.websocket.messages),
"timestamp_last": flow.websocket.messages[-1].timestamp
if flow.websocket.messages
else None,
},
"closed_by_client": flow.websocket.closed_by_client,
"close_code": flow.websocket.close_code,
"close_reason": flow.websocket.close_reason,
"timestamp_end": flow.websocket.timestamp_end,
}
elif isinstance(flow, (TCPFlow, UDPFlow)):
f["messages_meta"] = {
"contentLength": sum(len(x.content) for x in flow.messages),
"count": len(flow.messages),
"timestamp_last": flow.messages[-1].timestamp if flow.messages else None,
}
elif isinstance(flow, DNSFlow):
f["request"] = flow.request.to_json()
if flow.response:
f["response"] = flow.response.to_json()
return f
def logentry_to_json(e: log.LogEntry) -> dict:
return {
"id": id(e), # we just need some kind of id.
"message": e.msg,
"level": e.level,
}
class APIError(tornado.web.HTTPError):
pass
class AuthRequestHandler(tornado.web.RequestHandler):
AUTH_COOKIE_VALUE = b"y"
def __init_subclass__(cls, **kwargs):
"""Automatically wrap all request handlers with `_require_auth`."""
for method in cls.SUPPORTED_METHODS:
method = method.lower()
fn = getattr(cls, method)
if fn is not tornado.web.RequestHandler._unimplemented_method:
setattr(cls, method, AuthRequestHandler._require_auth(fn))
def auth_fail(self, invalid_password: bool) -> None:
"""
Will be called when returning a 403.
May write a login form as the response.
"""
@staticmethod
def _require_auth[**P, R](
fn: Callable[Concatenate[AuthRequestHandler, P], R],
) -> Callable[Concatenate[AuthRequestHandler, P], R | None]:
@functools.wraps(fn)
def wrapper(
self: AuthRequestHandler, *args: P.args, **kwargs: P.kwargs
) -> R | None:
if not self.current_user:
password = ""
if auth_header := self.request.headers.get("Authorization"):
auth_scheme, _, auth_params = auth_header.partition(" ")
if auth_scheme == "Bearer":
password = auth_params
if not password:
password = self.get_argument("token", default="")
if not self.settings["is_valid_password"](password):
self.set_status(403)
self.auth_fail(bool(password))
return None
self.set_signed_cookie(
self.settings["auth_cookie_name"](),
self.AUTH_COOKIE_VALUE,
expires_days=400,
httponly=True,
samesite="Strict",
)
return fn(self, *args, **kwargs)
return wrapper
def get_current_user(self) -> bool:
return (
self.get_signed_cookie(self.settings["auth_cookie_name"](), min_version=2)
== self.AUTH_COOKIE_VALUE
)
class RequestHandler(AuthRequestHandler):
application: Application
def prepare(self):
if (
self.request.method not in ("GET", "HEAD", "OPTIONS")
and "Sec-Fetch-Site" in self.request.headers
and self.request.headers["Sec-Fetch-Site"] not in ("same-origin", "none")
):
raise tornado.httpclient.HTTPError(403)
def write(self, chunk: str | bytes | dict | list):
# Writing arrays on the top level is ok nowadays.
# http://flask.pocoo.org/docs/0.11/security/#json-security
if isinstance(chunk, list):
chunk = tornado.escape.json_encode(chunk)
self.set_header("Content-Type", "application/json; charset=UTF-8")
super().write(chunk)
def set_default_headers(self):
super().set_default_headers()
self.set_header("Server", version.MITMPROXY)
self.set_header("X-Frame-Options", "DENY")
self.add_header("X-XSS-Protection", "1; mode=block")
self.add_header("X-Content-Type-Options", "nosniff")
self.add_header(
"Content-Security-Policy",
"default-src 'self'; "
"connect-src 'self' ws:; "
"img-src 'self' data:; "
"style-src 'self' 'unsafe-inline'",
)
@property
def json(self):
if not self.request.headers.get("Content-Type", "").startswith(
"application/json"
):
raise APIError(400, "Invalid Content-Type, expected application/json.")
try:
return json.loads(self.request.body.decode())
except Exception as e:
raise APIError(400, f"Malformed JSON: {e}")
@property
def filecontents(self):
"""
Accept either a multipart/form file upload or just take the plain request body.
"""
if self.request.files:
return next(iter(self.request.files.values()))[0].body
else:
return self.request.body
@property
def view(self) -> mitmproxy.addons.view.View:
return self.application.master.view
@property
def master(self) -> mitmproxy.tools.web.master.WebMaster:
return self.application.master
@property
def flow(self) -> mitmproxy.flow.Flow:
flow_id = str(self.path_kwargs["flow_id"])
# FIXME: Add a facility to addon.view to safely access the store
flow = self.view.get_by_id(flow_id)
if flow:
return flow
else:
raise APIError(404, "Flow not found.")
def write_error(self, status_code: int, **kwargs):
if "exc_info" in kwargs and isinstance(kwargs["exc_info"][1], APIError):
self.finish(kwargs["exc_info"][1].log_message)
else:
super().write_error(status_code, **kwargs)
class IndexHandler(RequestHandler):
def auth_fail(self, invalid_password: bool) -> None:
self.render("login.html", invalid_password=invalid_password)
def get(self):
self.render("../index.html")
post = get # login form
class FilterHelp(RequestHandler):
def get(self):
self.write(dict(commands=flowfilter.help))
class WebSocketEventBroadcaster(tornado.websocket.WebSocketHandler, AuthRequestHandler):
# raise an error if inherited class doesn't specify its own instance.
connections: ClassVar[set[WebSocketEventBroadcaster]]
_send_queue: asyncio.Queue[bytes]
_send_task: asyncio.Task[None]
def prepare(self) -> Optional[Awaitable[None]]:
token = self.xsrf_token # https://github.com/tornadoweb/tornado/issues/645
assert token
return None
def open(self, *args, **kwargs):
self.connections.add(self)
self._send_queue = asyncio.Queue()
# Python 3.13+: use _send_queue.shutdown() and we can use keep_ref=True here.
self._send_task = asyncio_utils.create_task(
self.send_task(),
name="WebSocket send task",
keep_ref=False,
)
def on_close(self):
self.connections.discard(self)
self._send_task.cancel()
@classmethod
def broadcast(cls, **kwargs):
message = cls._json_dumps(kwargs)
for conn in cls.connections:
conn.send(message)
def send(self, message: bytes):
self._send_queue.put_nowait(message)
async def send_task(self):
while True:
message = await self._send_queue.get()
try:
await self.write_message(message)
except tornado.websocket.WebSocketClosedError:
self.on_close()
@staticmethod
def _json_dumps(d):
return json.dumps(d, ensure_ascii=False).encode("utf8", "surrogateescape")
class ClientConnection(WebSocketEventBroadcaster):
connections: ClassVar[set[ClientConnection]] = set() # type: ignore
application: Application
def __init__(self, application: Application, request, **kwargs):
super().__init__(application, request, **kwargs)
self.filters: dict[str, flowfilter.TFilter] = {} # filters per connection
@classmethod
def broadcast_flow_reset(cls) -> None:
for conn in cls.connections:
conn.send(cls._json_dumps({"type": "flows/reset"}))
for name, expr in conn.filters.copy().items():
conn.update_filter(name, expr.pattern)
@classmethod
def broadcast_flow(
cls,
type: Literal["flows/add", "flows/update"],
f: mitmproxy.flow.Flow,
) -> None:
flow_json = flow_to_json(f)
for conn in cls.connections:
conn._broadcast_flow(type, f, flow_json)
def _broadcast_flow(
self,
type: Literal["flows/add", "flows/update"],
f: mitmproxy.flow.Flow,
flow_json: dict, # Passing the flow_json dictionary to avoid recalculating it for each client
) -> None:
filters = {name: bool(expr(f)) for name, expr in self.filters.items()}
message = self._json_dumps(
{
"type": type,
"payload": {
"flow": flow_json,
"matching_filters": filters,
},
},
)
self.send(message)
def update_filter(self, name: str, expr: str) -> None:
if expr:
filt = flowfilter.parse(expr)
self.filters[name] = filt
matching_flow_ids = [f.id for f in self.application.master.view if filt(f)]
else:
self.filters.pop(name, None)
matching_flow_ids = None
message = self._json_dumps(
{
"type": "flows/filterUpdate",
"payload": {
"name": name,
"matching_flow_ids": matching_flow_ids,
},
},
)
self.send(message=message)
async def on_message(self, message: str | bytes):
try:
data = json.loads(message)
match data["type"]:
case "flows/updateFilter":
self.update_filter(data["payload"]["name"], data["payload"]["expr"])
case other:
raise ValueError(f"Unsupported command: {other}")
except Exception as e:
logger.error(f"Error processing message from {self}: {e}")
self.close(code=1011, reason="Internal server error.")
class Flows(RequestHandler):
def get(self):
self.write([flow_to_json(f) for f in self.view])
class DumpFlows(RequestHandler):
def get(self) -> None:
self.set_header("Content-Disposition", "attachment; filename=flows")
self.set_header("Content-Type", "application/octet-stream")
match: Callable[[mitmproxy.flow.Flow], bool]
try:
match = flowfilter.parse(self.request.arguments["filter"][0].decode())
except ValueError: # thrown py flowfilter.parse if filter is invalid
raise APIError(400, f"Invalid filter argument / regex")
except (
KeyError,
IndexError,
): # Key+Index: ["filter"][0] can fail, if it's not set
def match(_) -> bool:
return True
with BytesIO() as bio:
fw = io.FlowWriter(bio)
for f in self.view:
if match(f):
fw.add(f)
self.write(bio.getvalue())
async def post(self):
self.view.clear()
bio = BytesIO(self.filecontents)
for f in io.FlowReader(bio).stream():
await self.master.load_flow(f)
bio.close()
class ClearAll(RequestHandler):
def post(self):
self.view.clear()
self.master.events.clear()
class ResumeFlows(RequestHandler):
def post(self):
for f in self.view:
if not f.intercepted:
continue
f.resume()
self.view.update([f])
class KillFlows(RequestHandler):
def post(self):
for f in self.view:
if f.killable:
f.kill()
self.view.update([f])
class ResumeFlow(RequestHandler):
def post(self, flow_id):
self.flow.resume()
self.view.update([self.flow])
class KillFlow(RequestHandler):
def post(self, flow_id):
if self.flow.killable:
self.flow.kill()
self.view.update([self.flow])
class FlowHandler(RequestHandler):
def delete(self, flow_id):
if self.flow.killable:
self.flow.kill()
self.view.remove([self.flow])
def put(self, flow_id) -> None:
flow: mitmproxy.flow.Flow = self.flow
flow.backup()
try:
for a, b in self.json.items():
if a == "request" and hasattr(flow, "request"):
request: mitmproxy.http.Request = flow.request
for k, v in b.items():
if k in ["method", "scheme", "host", "path", "http_version"]:
setattr(request, k, str(v))
elif k == "port":
request.port = int(v)
elif k == "headers":
request.headers.clear()
for header in v:
request.headers.add(*header)
elif k == "trailers":
if request.trailers is not None:
request.trailers.clear()
else:
request.trailers = mitmproxy.http.Headers()
for trailer in v:
request.trailers.add(*trailer)
elif k == "content":
request.text = v
else:
raise APIError(400, f"Unknown update request.{k}: {v}")
elif a == "response" and hasattr(flow, "response"):
response: mitmproxy.http.Response = flow.response
for k, v in b.items():
if k in ["msg", "http_version"]:
setattr(response, k, str(v))
elif k == "code":
response.status_code = int(v)
elif k == "headers":
response.headers.clear()
for header in v:
response.headers.add(*header)
elif k == "trailers":
if response.trailers is not None:
response.trailers.clear()
else:
response.trailers = mitmproxy.http.Headers()
for trailer in v:
response.trailers.add(*trailer)
elif k == "content":
response.text = v
else:
raise APIError(400, f"Unknown update response.{k}: {v}")
elif a == "marked":
flow.marked = b
elif a == "comment":
flow.comment = b
else:
raise APIError(400, f"Unknown update {a}: {b}")
except APIError:
flow.revert()
raise
self.view.update([flow])
class DuplicateFlow(RequestHandler):
def post(self, flow_id):
f = self.flow.copy()
self.view.add([f])
self.write(f.id)
class RevertFlow(RequestHandler):
def post(self, flow_id):
if self.flow.modified():
self.flow.revert()
self.view.update([self.flow])
class ReplayFlow(RequestHandler):
def post(self, flow_id):
self.master.commands.call("replay.client", [self.flow])
class FlowContent(RequestHandler):
def post(self, flow_id, message):
self.flow.backup()
message = getattr(self.flow, message)
message.content = self.filecontents
self.view.update([self.flow])
def get(self, flow_id, message):
message = getattr(self.flow, message)
assert isinstance(self.flow, HTTPFlow)
original_cd = message.headers.get("Content-Disposition", None)
filename = None
if original_cd:
if m := re.search(r'filename=([-\w" .()]+)', original_cd):
filename = m.group(1)
if not filename:
filename = self.flow.request.path.split("?")[0].split("/")[-1]
filename = re.sub(r'[^-\w" .()]', "", filename)
cd = f"attachment; {filename=!s}"
self.set_header("Content-Disposition", cd)
self.set_header("Content-Type", "application/text")
self.set_header("X-Content-Type-Options", "nosniff")
self.set_header("X-Frame-Options", "DENY")
self.write(message.get_content(strict=False))
class FlowContentView(RequestHandler):
def message_to_json(
self,
view_name: str,
message: http.Message | TCPMessage | UDPMessage | WebSocketMessage,
flow: HTTPFlow | TCPFlow | UDPFlow,
max_lines: int | None = None,
from_client: bool | None = None,
timestamp: float | None = None,
):
if view_name and view_name.lower() == "auto":
view_name = "auto"
pretty = contentviews.prettify_message(message, flow, view_name=view_name)
if max_lines:
pretty.text = cut_after_n_lines(pretty.text, max_lines)
ret: dict[str, Any] = dict(
text=pretty.text,
view_name=pretty.view_name,
syntax_highlight=pretty.syntax_highlight,
description=pretty.description,
)
if from_client is not None:
ret["from_client"] = from_client
if timestamp is not None:
ret["timestamp"] = timestamp
return ret
def get(self, flow_id, message, content_view) -> None:
flow = self.flow
assert isinstance(flow, (HTTPFlow, TCPFlow, UDPFlow))
if self.request.arguments.get("lines"):
max_lines = int(self.request.arguments["lines"][0])
else:
max_lines = None
if message == "messages":
messages: list[TCPMessage] | list[UDPMessage] | list[WebSocketMessage]
if isinstance(flow, HTTPFlow) and flow.websocket:
messages = flow.websocket.messages
elif isinstance(flow, (TCPFlow, UDPFlow)):
messages = flow.messages
else:
raise APIError(400, f"This flow has no messages.")
msgs = []
for m in messages:
d = self.message_to_json(
view_name=content_view,
message=m,
flow=flow,
max_lines=max_lines,
from_client=m.from_client,
timestamp=m.timestamp,
)
msgs.append(d)
if max_lines:
max_lines -= d["text"].count("\n") + 1
assert max_lines is not None
if max_lines <= 0:
break
self.write(msgs)
else:
message = getattr(self.flow, message)
self.write(self.message_to_json(content_view, message, flow, max_lines))
class Commands(RequestHandler):
def get(self) -> None:
commands = {}
for name, cmd in self.master.commands.commands.items():
commands[name] = {
"help": cmd.help,
"parameters": [
{
"name": param.name,
"type": command.typename(param.type),
"kind": str(param.kind),
}
for param in cmd.parameters
],
"return_type": command.typename(cmd.return_type)
if cmd.return_type
else None,
"signature_help": cmd.signature_help(),
}
self.write(commands)
class ExecuteCommand(RequestHandler):
def post(self, cmd: str):
# TODO: We should parse query strings here, this API is painful.
try:
args = self.json["arguments"]
except APIError:
args = []
try:
result = self.master.commands.call_strings(cmd, args)
except Exception as e:
self.write({"error": str(e)})
else:
self.write(
{
"value": result,
# "type": command.typename(type(result)) if result is not None else "none"
}
)
class Events(RequestHandler):
def get(self):
self.write([logentry_to_json(e) for e in self.master.events.data])
class Options(RequestHandler):
def get(self):
self.write(optmanager.dump_dicts(self.master.options))
def put(self):
update = self.json
try:
self.master.options.update(**update)
except Exception as err:
raise APIError(400, f"{err}")
class SaveOptions(RequestHandler):
def post(self):
# try:
# optmanager.save(self.master.options, CONFIG_PATH, True)
# except Exception as err:
# raise APIError(400, "{}".format(err))
pass
class State(RequestHandler):
# Separate method for testability.
@staticmethod
def get_json(master: mitmproxy.tools.web.master.WebMaster):
return {
"version": version.VERSION,
"contentViews": [
v for v in contentviews.registry.available_views() if v != "query"
],
"servers": {
s.mode.full_spec: s.to_json() for s in master.proxyserver.servers
},
"platform": sys.platform,
"localModeUnavailable": mitmproxy_rs.local.LocalRedirector.unavailable_reason(),
}
def get(self):
self.write(State.get_json(self.master))
class ProcessList(RequestHandler):
@staticmethod
def get_json():
processes = mitmproxy_rs.process_info.active_executables()
return [
{
"is_visible": process.is_visible,
"executable": str(process.executable),
"is_system": process.is_system,
"display_name": process.display_name,
}
for process in processes
]
def get(self):
self.write(ProcessList.get_json())
class ProcessImage(RequestHandler):
def get(self):
path = self.get_query_argument("path", None)
if not path:
raise APIError(400, "Missing 'path' parameter.")
try:
icon_bytes = mitmproxy_rs.process_info.executable_icon(path)
except Exception:
icon_bytes = TRANSPARENT_PNG
self.set_header("Content-Type", "image/png")
self.set_header("X-Content-Type-Options", "nosniff")
self.set_header("Cache-Control", "max-age=604800")
self.write(icon_bytes)
class GZipContentAndFlowFiles(tornado.web.GZipContentEncoding):
CONTENT_TYPES = {
"application/octet-stream",
*tornado.web.GZipContentEncoding.CONTENT_TYPES,
}
handlers = [
(r"/", IndexHandler),
(r"/filter-help(?:\.json)?", FilterHelp),
(r"/updates", ClientConnection),
(r"/commands(?:\.json)?", Commands),
(r"/commands/(?P<cmd>[a-z.]+)", ExecuteCommand),
(r"/events(?:\.json)?", Events),
(r"/flows(?:\.json)?", Flows),
(r"/flows/dump", DumpFlows),
(r"/flows/resume", ResumeFlows),
(r"/flows/kill", KillFlows),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)", FlowHandler),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)/resume", ResumeFlow),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)/kill", KillFlow),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)/duplicate", DuplicateFlow),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)/replay", ReplayFlow),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)/revert", RevertFlow),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)/(?P<message>request|response|messages)/content.data", FlowContent),
(r"/flows/(?P<flow_id>[0-9a-f\-]+)/(?P<message>request|response|messages)/content/(?P<content_view>[0-9a-zA-Z\-\_%]+)(?:\.json)?", FlowContentView),
(r"/clear", ClearAll),
(r"/options(?:\.json)?", Options),
(r"/options/save", SaveOptions),
(r"/state(?:\.json)?", State),
(r"/processes", ProcessList),
(r"/executable-icon", ProcessImage),
] # fmt: skip
class Application(tornado.web.Application):
master: mitmproxy.tools.web.master.WebMaster
def __init__(
self, master: mitmproxy.tools.web.master.WebMaster, debug: bool
) -> None:
self.master = master
auth_addon: WebAuth = master.addons.get("webauth")
super().__init__(
handlers=handlers, # type: ignore # https://github.com/tornadoweb/tornado/pull/3455
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path=os.path.join(os.path.dirname(__file__), "static"),
xsrf_cookies=True,
xsrf_cookie_kwargs=dict(samesite="Strict"),
cookie_secret=secrets.token_bytes(32),
debug=debug,
autoreload=False,
transforms=[GZipContentAndFlowFiles],
is_valid_password=auth_addon.is_valid_password,
auth_cookie_name=auth_addon.auth_cookie_name,
compiled_template_cache=False, # Vite
)

View File

@@ -0,0 +1,16 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8" />
<title>mitmproxy</title>
<link rel="icon" href="static/favicon.ico" type="image/x-icon" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<script type="module" crossorigin src="./static/index-Bt--WL1e.js"></script>
<link rel="modulepreload" crossorigin href="./static/vendor-BS4xPthR.js">
<link rel="stylesheet" crossorigin href="./static/vendor-Cg3S-P9H.css">
<link rel="stylesheet" crossorigin href="./static/index-DhPPoJ7G.css">
</head>
<body>
<div id="mitmproxy"></div>
</body>
</html>

View File

@@ -0,0 +1,122 @@
import errno
import logging
from typing import cast
import tornado.httpserver
import tornado.ioloop
from mitmproxy import addons
from mitmproxy import flow
from mitmproxy import log
from mitmproxy import master
from mitmproxy import options
from mitmproxy import optmanager
from mitmproxy.addons import errorcheck
from mitmproxy.addons import eventstore
from mitmproxy.addons import intercept
from mitmproxy.addons import readfile
from mitmproxy.addons import view
from mitmproxy.addons.proxyserver import Proxyserver
from mitmproxy.tools.web import app
from mitmproxy.tools.web import static_viewer
from mitmproxy.tools.web import webaddons
logger = logging.getLogger(__name__)
class WebMaster(master.Master):
def __init__(self, opts: options.Options, with_termlog: bool = True):
super().__init__(opts, with_termlog=with_termlog)
self.view = view.View()
self.view.sig_view_add.connect(self._sig_view_add)
self.view.sig_view_remove.connect(self._sig_view_remove)
self.view.sig_view_update.connect(self._sig_view_update)
self.view.sig_view_refresh.connect(self._sig_view_refresh)
self.events = eventstore.EventStore()
self.events.sig_add.connect(self._sig_events_add)
self.events.sig_refresh.connect(self._sig_events_refresh)
self.options.changed.connect(self._sig_options_update)
self.addons.add(*addons.default_addons())
self.addons.add(
webaddons.WebAddon(),
webaddons.WebAuth(),
intercept.Intercept(),
readfile.ReadFileStdin(),
static_viewer.StaticViewer(),
self.view,
self.events,
errorcheck.ErrorCheck(),
)
self.app = app.Application(self, self.options.web_debug)
self.proxyserver: Proxyserver = self.addons.get("proxyserver")
self.proxyserver.servers.changed.connect(self._sig_servers_changed)
def _sig_view_add(self, flow: flow.Flow) -> None:
app.ClientConnection.broadcast_flow("flows/add", flow)
def _sig_view_update(self, flow: flow.Flow) -> None:
app.ClientConnection.broadcast_flow("flows/update", flow)
def _sig_view_remove(self, flow: flow.Flow, index: int) -> None:
app.ClientConnection.broadcast(
type="flows/remove",
payload=flow.id,
)
def _sig_view_refresh(self) -> None:
app.ClientConnection.broadcast_flow_reset()
def _sig_events_add(self, entry: log.LogEntry) -> None:
app.ClientConnection.broadcast(
type="events/add",
payload=app.logentry_to_json(entry),
)
def _sig_events_refresh(self) -> None:
app.ClientConnection.broadcast(
type="events/reset",
)
def _sig_options_update(self, updated: set[str]) -> None:
options_dict = optmanager.dump_dicts(self.options, updated)
app.ClientConnection.broadcast(
type="options/update",
payload=options_dict,
)
def _sig_servers_changed(self) -> None:
app.ClientConnection.broadcast(
type="state/update",
payload={
"servers": {
s.mode.full_spec: s.to_json() for s in self.proxyserver.servers
}
},
)
@property
def web_url(self) -> str:
return cast(webaddons.WebAuth, self.addons.get("webauth")).web_url
async def running(self):
# Register tornado with the current event loop
tornado.ioloop.IOLoop.current()
# Add our web app.
http_server = tornado.httpserver.HTTPServer(
self.app, max_buffer_size=2**32
) # 4GB
try:
http_server.listen(self.options.web_port, self.options.web_host)
except OSError as e:
message = f"Web server failed to listen on {self.options.web_host or '*'}:{self.options.web_port} with {e}"
if e.errno == errno.EADDRINUSE:
message += f"\nTry specifying a different port by using `--set web_port={self.options.web_port + 2}`."
raise OSError(e.errno, message, e.filename) from e
logger.info(f"Web server listening at {self.web_url}")
return await super().running()

Binary file not shown.

After

Width:  |  Height:  |  Size: 357 KiB

File diff suppressed because it is too large Load Diff

After

Width:  |  Height:  |  Size: 434 KiB

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,118 @@
import json
import logging
import os.path
import pathlib
import shutil
import time
from collections.abc import Iterable
from typing import Optional
from mitmproxy import contentviews
from mitmproxy import ctx
from mitmproxy import flow
from mitmproxy import flowfilter
from mitmproxy import http
from mitmproxy import io
from mitmproxy import version
from mitmproxy.tools.web.app import flow_to_json
web_dir = pathlib.Path(__file__).absolute().parent
def save_static(path: pathlib.Path) -> None:
"""
Save the files for the static web view.
"""
# We want to overwrite the static files to keep track of the update.
if (path / "static").exists():
shutil.rmtree(str(path / "static"))
shutil.copytree(str(web_dir / "static"), str(path / "static"))
shutil.copyfile(str(web_dir / "index.html"), str(path / "index.html"))
with open(str(path / "static" / "static.js"), "w") as f:
f.write("MITMWEB_STATIC = true;")
def save_filter_help(path: pathlib.Path) -> None:
with open(str(path / "filter-help.json"), "w") as f:
json.dump(dict(commands=flowfilter.help), f)
def save_settings(path: pathlib.Path) -> None:
with open(str(path / "settings.json"), "w") as f:
json.dump(dict(version=version.VERSION), f)
def save_flows(path: pathlib.Path, flows: Iterable[flow.Flow]) -> None:
with open(str(path / "flows.json"), "w") as f:
json.dump([flow_to_json(f) for f in flows], f)
def save_flows_content(path: pathlib.Path, flows: Iterable[flow.Flow]) -> None:
for f in flows:
assert isinstance(f, http.HTTPFlow)
for m in ("request", "response"):
message = getattr(f, m)
message_path = path / "flows" / f.id / m
os.makedirs(str(message_path / "content"), exist_ok=True)
with open(str(message_path / "content.data"), "wb") as content_file:
# don't use raw_content here as this is served with a default content type
if message:
content_file.write(message.content)
else:
content_file.write(b"No content.")
# content_view
t = time.time()
if message:
pretty = contentviews.prettify_message(
message=message,
flow=f,
)
else:
pretty = contentviews.ContentviewResult(
text="No content.",
syntax_highlight="none",
view_name="/",
description="",
)
if time.time() - t > 0.1:
logging.info(
f"Slow content view: {pretty.view_name} took {round(time.time() - t, 1)}s",
)
with (message_path / "content" / "Auto.json").open(
"w"
) as content_view_file:
json.dump(
dict(
text=pretty.text,
syntax_highlight=pretty.syntax_highlight,
view_name=pretty.view_name,
description=pretty.description,
),
content_view_file,
)
class StaticViewer:
# TODO: make this a command at some point.
def load(self, loader):
loader.add_option(
"web_static_viewer",
Optional[str],
"",
"The path to output a static viewer.",
)
def configure(self, updated):
if "web_static_viewer" in updated and ctx.options.web_static_viewer:
flows = io.read_flows_from_paths([ctx.options.rfile])
p = pathlib.Path(ctx.options.web_static_viewer).expanduser()
self.export(p, flows)
def export(self, path: pathlib.Path, flows: Iterable[flow.Flow]) -> None:
save_static(path)
save_filter_help(path)
save_flows(path, flows)
save_flows_content(path, flows)

View File

@@ -0,0 +1,35 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>mitmproxy</title>
<link rel="icon" href=".{{ static_url('images/favicon.ico') }}" type="image/x-icon"/>
<meta name="viewport" content="width=device-width, initial-scale=1" />
<style>
body {
font-family: sans-serif;
display: flex;
flex-direction: column;
align-items: center;
}
input {
font-family: monospace;
}
</style>
</head>
<body>
{% if invalid_password %}
<h1 style="color: darkred">403 Invalid Password</h1>
{% else %}
<h1>403 Authentication Required</h1>
{% end %}
<p>To access mitmproxy, please enter the password or authentication token printed in the console.</p>
<form method="POST">
<label>
<input type="password" name="token" size="32" placeholder="" />
</label>
{% module xsrf_form_html() %}
<input type="submit" />
</form>
</body>
</html>

View File

@@ -0,0 +1,15 @@
# Auto-generated by web/gen/web_columns.py
AVAILABLE_WEB_COLUMNS = [
"icon",
"index",
"method",
"version",
"path",
"quickactions",
"size",
"status",
"time",
"timestamp",
"tls",
"comment",
]

View File

@@ -0,0 +1,151 @@
from __future__ import annotations
import hmac
import logging
import secrets
import webbrowser
from collections.abc import Sequence
from typing import TYPE_CHECKING
import argon2
from mitmproxy import ctx
from mitmproxy import exceptions
from mitmproxy.tools.web.web_columns import AVAILABLE_WEB_COLUMNS
if TYPE_CHECKING:
from mitmproxy.tools.web.master import WebMaster
logger = logging.getLogger(__name__)
class WebAuth:
_password: str
_hasher: argon2.PasswordHasher
def __init__(self):
self._password = secrets.token_hex(16)
self._hasher = argon2.PasswordHasher()
def load(self, loader):
loader.add_option(
"web_password",
str,
"",
"Password to protect the mitmweb user interface. "
"Values starting with `$` are interpreted as an argon2 hash, "
"everything else is considered a plaintext password. "
"If no password is provided, a random token is generated on startup."
"For automated calls, you can pass the password as token query parameter"
"or as `Authorization: Bearer ...` header.",
)
def configure(self, updated) -> None:
if "web_password" in updated:
if ctx.options.web_password.startswith("$"):
try:
argon2.extract_parameters(ctx.options.web_password)
except argon2.exceptions.InvalidHashError:
raise exceptions.OptionsError(
"`web_password` starts with `$`, but it's not a valid argon2 hash."
)
elif ctx.options.web_password:
logger.warning(
"Using a plaintext password to protect the mitmweb user interface. "
"Consider using an argon2 hash for `web_password` instead."
)
self._password = ctx.options.web_password or secrets.token_hex(16)
@property
def web_url(self) -> str:
if ctx.options.web_password:
auth = "" # We don't want to print plaintext passwords (and it doesn't work for argon2 anyhow).
else:
auth = f"?token={self._password}"
web_host = ctx.options.web_host
if ":" in web_host: # ipv6
web_host = f"[{web_host}]"
# noinspection HttpUrlsUsage
return f"http://{web_host}:{ctx.options.web_port}/{auth}"
@staticmethod
def auth_cookie_name() -> str:
return f"mitmproxy-auth-{ctx.options.web_port}"
def is_valid_password(self, password: str) -> bool:
if self._password.startswith("$"):
try:
return self._hasher.verify(self._password, password)
except argon2.exceptions.VerificationError:
return False
else:
return hmac.compare_digest(
self._password,
password,
)
class WebAddon:
def load(self, loader):
loader.add_option("web_open_browser", bool, True, "Start a browser.")
loader.add_option("web_debug", bool, False, "Enable mitmweb debugging.")
loader.add_option("web_port", int, 8081, "Web UI port.")
loader.add_option("web_host", str, "127.0.0.1", "Web UI host.")
loader.add_option(
"web_columns",
Sequence[str],
["tls", "icon", "path", "method", "status", "size", "time"],
f"Columns to show in the flow list. Can be one of the following: {', '.join(AVAILABLE_WEB_COLUMNS)}",
)
def running(self):
if hasattr(ctx.options, "web_open_browser") and ctx.options.web_open_browser:
master: WebMaster = ctx.master # type: ignore
success = open_browser(master.web_url)
if not success:
logger.info(
f"No web browser found. Please open a browser and point it to {master.web_url}",
)
if not success and not ctx.options.web_password:
logger.info(
f"You can configure a fixed authentication token by setting the `web_password` option "
f"(https://docs.mitmproxy.org/stable/concepts-options/#web_password).",
)
def open_browser(url: str) -> bool:
"""
Open a URL in a browser window.
In contrast to webbrowser.open, we limit the list of suitable browsers.
This gracefully degrades to a no-op on headless servers, where webbrowser.open
would otherwise open lynx.
Returns:
True, if a browser has been opened
False, if no suitable browser has been found.
"""
browsers = (
"windows-default",
"macosx",
"wslview %s",
"gio",
"x-www-browser",
"gnome-open %s",
"xdg-open",
"google-chrome",
"chrome",
"chromium",
"chromium-browser",
"firefox",
"opera",
"safari",
)
for browser in browsers:
try:
b = webbrowser.get(browser)
except webbrowser.Error:
pass
else:
if b.open(url):
return True
return False