changes to mitmaddon
This commit is contained in:
parent
462f2811b9
commit
3dbd506427
2 changed files with 137 additions and 24 deletions
|
@ -8,6 +8,7 @@ import zmq
|
|||
import json
|
||||
from enum import Enum
|
||||
|
||||
# this method is used to convert flow states (generated with get_state()) to json
|
||||
def convert_to_strings(obj):
|
||||
if isinstance(obj, dict):
|
||||
return {convert_to_strings(key): convert_to_strings(value)
|
||||
|
@ -19,34 +20,135 @@ def convert_to_strings(obj):
|
|||
try:
|
||||
data = obj.decode('unicode-escape').encode('latin1').decode('utf-8')
|
||||
except:
|
||||
print(obj)
|
||||
data = str(obj)[2:-1]
|
||||
return data
|
||||
|
||||
return obj
|
||||
|
||||
# bigsnitch Request type
|
||||
class bRequest:
|
||||
server_ip_address = ""
|
||||
|
||||
tls = ""
|
||||
content = ""
|
||||
scheme = ""
|
||||
method = ""
|
||||
host = ""
|
||||
port = 0
|
||||
http_version = ""
|
||||
path = ""
|
||||
timestamp_start = 0.0
|
||||
timestamp_end = 0.0
|
||||
# [("Header","Data")]
|
||||
headers = []
|
||||
|
||||
error = ""
|
||||
|
||||
class bResponse:
|
||||
status_code = 0
|
||||
http_version = ""
|
||||
reason = ""
|
||||
content = ""
|
||||
timestamp_start = 0.0
|
||||
timestamp_end = 0.0
|
||||
# [("Header","Data")]
|
||||
headers = []
|
||||
|
||||
class bFlow:
|
||||
uid = "";
|
||||
request = None
|
||||
response = None
|
||||
|
||||
class FlowState(Enum):
|
||||
UNSENT_REQ = 0
|
||||
SENT_REQ = 1
|
||||
UNSENT_RES = 2
|
||||
SENT_REQ = 3
|
||||
|
||||
"""
|
||||
|
||||
The network thread communicates with the bigsnitch plugin using zeromq.
|
||||
|
||||
"""
|
||||
|
||||
class NetworkThread(threading.Thread):
|
||||
def __init__(self, name, queue):
|
||||
threading.Thread.__init__(self)
|
||||
self.name = name
|
||||
self.q = queue
|
||||
# id : (state, flow, timer, retries left)
|
||||
self.flows = {}
|
||||
self.context = zmq.Context()
|
||||
# timer for sending pings
|
||||
self.timer = time.monotonic()
|
||||
# retries left for reconnecting
|
||||
self.retries = 5
|
||||
|
||||
# get new flows that may occured
|
||||
def update_flows(self):
|
||||
while True:
|
||||
try:
|
||||
i, flow, typ = self.q.get(block=False)
|
||||
if flows.get(i, None):
|
||||
print("flow {} doubled? ignoring...")
|
||||
continue
|
||||
|
||||
if typ == "request":
|
||||
flows[i] = (FlowState.UNSENT_REQ, flow, self.timer, 5)
|
||||
elif typ == "response":
|
||||
flows[i] = (FlowState.UNSENT_RES, flow, self.timer, 5)
|
||||
except Empty:
|
||||
break
|
||||
|
||||
# state machine for flows
|
||||
def handle_flow(self, state, flow):
|
||||
for i, v in flows.items:
|
||||
state, flow, timer, retries = v
|
||||
|
||||
if state == FlowState.UNSENT_REQ:
|
||||
# send it
|
||||
a = convert_to_strings(msg)
|
||||
self.socket.send(str.encode(json.dumps(a)))
|
||||
pass
|
||||
elif state == FlowState.SENT_REQ:
|
||||
# check timer, try resend
|
||||
pass
|
||||
elif state == FlowState.UNSENT_RES:
|
||||
pass
|
||||
elif state == FlowState.SENT_RES:
|
||||
pass
|
||||
|
||||
self.send_msg_and_ack(a)
|
||||
|
||||
def handle_packets(self):
|
||||
while((self.socket.poll(50) & zmq.POLLIN) != 0):
|
||||
msg = self.socket.recv()
|
||||
try:
|
||||
if msg:
|
||||
result = json.loads(msg)
|
||||
# packet statemachine
|
||||
if result["msg"] == "ack":
|
||||
print("m ack received")
|
||||
return result
|
||||
else:
|
||||
print("got unexpected message {result}")
|
||||
except json.JSONDecodeError:
|
||||
print(f"malformed message received {msg}")
|
||||
|
||||
def run(self):
|
||||
print("thread started")
|
||||
self.connect()
|
||||
while True:
|
||||
timer = time.monotonic()
|
||||
a = None
|
||||
if not a:
|
||||
try:
|
||||
a = self.q.get(block=False)
|
||||
except Empty:
|
||||
pass
|
||||
if a:
|
||||
self.send_msg_and_ack(a)
|
||||
timer = time.monotonic()
|
||||
if timer - time.monotonic() < -5:
|
||||
self.send_msg_and_ack({"msg": "ping"})
|
||||
self.timer = time.monotonic()
|
||||
update_flows()
|
||||
|
||||
if self.timer - time.monotonic() < -5:
|
||||
self.send_msg_and_ack({"msg": "ping"})
|
||||
|
||||
def send(self, msg):
|
||||
a = convert_to_strings(msg)
|
||||
self.socket.send(str.encode(json.dumps(a)))
|
||||
|
||||
def disconnect(self):
|
||||
self.socket.setsockopt(zmq.LINGER,0)
|
||||
|
@ -65,6 +167,7 @@ class NetworkThread(threading.Thread):
|
|||
print("successfully connected")
|
||||
|
||||
def send_msg_and_ack(self, msg):
|
||||
self.timer = time.monotonic()
|
||||
while True:
|
||||
print("m sending")
|
||||
a = convert_to_strings(msg)
|
||||
|
@ -93,22 +196,26 @@ class BigSnitchBridge:
|
|||
self.thread.start()
|
||||
|
||||
def request(self, flow):
|
||||
self.q.put({'msg': 'request', 'flow': flow.get_state()})
|
||||
self.q.put_nowait((flow.id, flow, "request"))
|
||||
# intercept until ACK received
|
||||
flow.intercept()
|
||||
|
||||
def response(self, flow):
|
||||
self.q.put_nowait((flow.id, flow, "response"))
|
||||
# intercept until ACK received
|
||||
flow.intercept()
|
||||
|
||||
def error(self, flow):
|
||||
self.q.put_nowait((flow.id, flow, "error"))
|
||||
|
||||
|
||||
"""
|
||||
def requestheaders(self, flow):
|
||||
self.q.put({'msg': 'requestheaders', 'flow': flow.get_state()})
|
||||
|
||||
def response(self, flow):
|
||||
self.q.put({'msg': 'response', 'flow': flow.get_state()})
|
||||
|
||||
def responseheaders(self, flow):
|
||||
self.q.put({'msg': 'responseheaders', 'flow': flow.get_state()})
|
||||
|
||||
def error(self, flow):
|
||||
self.q.put({'msg': 'error', 'flow': flow.get_state()})
|
||||
|
||||
def websocket_handshake(self):
|
||||
self.q.put({'msg': 'websocket_handshake', 'flow': flow.get_state()})
|
||||
|
||||
|
@ -123,6 +230,7 @@ class BigSnitchBridge:
|
|||
|
||||
def websocket_end(self, flow):
|
||||
self.q.put({'msg': 'websocket_end', 'flow': flow.get_state()})
|
||||
"""
|
||||
|
||||
addons = [
|
||||
BigSnitchBridge()
|
||||
|
|
5
mitmaddon/test_bigsnitch.py
Normal file
5
mitmaddon/test_bigsnitch.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import pytest
|
||||
from bigsnitch import NetworkThread
|
||||
|
Loading…
Reference in a new issue