下面是一个完整的系统,包含两个脚本:
-
mitmproxy_recorder.py
- 录制流量到 CSV 文件 -
mitmproxy_replayer.py
- 从 CSV 文件回放流量并记录结果
第一部分:流量录制脚本 (mitmproxy_recorder.py)
import csv
import time
import os
import json
from datetime import datetime
from mitmproxy import http, ctx
from urllib.parse import urlparse, parse_qsclass TrafficRecorder:def __init__(self):self.output_dir = "mitmproxy_records"os.makedirs(self.output_dir, exist_ok=True)timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")self.record_file = os.path.join(self.output_dir, f"record_{timestamp}.csv")self.summary_file = os.path.join(self.output_dir, f"summary_{timestamp}.csv")# CSV 文件头self.record_headers = ["timestamp", "request_method", "request_url", "request_headers", "request_params", "request_body","response_status", "response_headers", "response_body","response_time_ms", "content_type"]self.summary_headers = ["timestamp", "request_url", "request_method","response_status", "response_time_ms", "success"]# 初始化文件with open(self.record_file, 'w', newline='', encoding='utf-8') as f:writer = csv.writer(f)writer.writerow(self.record_headers)with open(self.summary_file, 'w', newline='', encoding='utf-8') as f:writer = csv.writer(f)writer.writerow(self.summary_headers)ctx.log.info(f"Recording traffic to {self.record_file}")ctx.log.info(f"Summary will be saved to {self.summary_file}")def _get_params(self, url):parsed = urlparse(url)return parse_qs(parsed.query)def _format_headers(self, headers):return json.dumps(dict(headers))def request(self, flow: http.HTTPFlow):flow.start_time = time.time()def response(self, flow: http.HTTPFlow):try:response_time = (time.time() - flow.start_time) * 1000# 准备详细记录record_row = [datetime.now().isoformat(),flow