3 # Read a pcap dump containing a single TCP connection and analyze it to
4 # determine as much as possible about the performance of that connection.
5 # (Specifically designed for measuring performance of fetches to Amazon S3.)
7 import impacket, json, pcapy, re, sys
8 import impacket.ImpactDecoder, impacket.ImpactPacket
10 # Estimate of the network RTT
14 return json.dumps(result_list, sort_keys=True, indent=2)
17 def __init__(self, connection, ts, pkt):
18 self.connection = connection
21 self.ip = self.pkt.child()
22 self.tcp = self.ip.child()
24 self.datalen = self.ip.get_ip_len() - self.ip.get_header_size() \
25 - self.tcp.get_header_size()
26 self.data = self.tcp.get_data_as_string()[0:self.datalen]
28 self.seq = (self.tcp.get_th_seq(), self.tcp.get_th_seq() + self.datalen)
29 self.ack = self.tcp.get_th_ack()
30 self.id = self.ip.get_ip_id()
32 if self.tcp.get_th_sport() == 80:
35 elif self.tcp.get_th_dport() == 80:
42 return "<Packet[%s]: id=%d seq=%d..%d ack=%d %s>" % \
43 ({-1: '<', 1: '>', 0: '?'}[self.direction], self.id,
44 self.seq[0], self.seq[1], self.ack, self.ts)
48 self.start_time = None
49 self.decoder = impacket.ImpactDecoder.EthDecoder()
52 def process_file(self, filename):
53 """Load a pcap file and process the packets contained in it."""
55 p = pcapy.open_offline(filename)
56 p.setfilter(r"ip proto \tcp")
57 assert p.datalink() == pcapy.DLT_EN10MB
58 p.loop(0, self.packet_handler)
60 def packet_handler(self, header, data):
61 """Callback function run by the pcap parser for each packet."""
63 (sec, us) = header.getts()
64 ts = sec * 1000000 + us
65 if self.start_time is None:
68 pkt = Packet(self, ts * 1e-6, self.decoder.decode(data))
69 self.packets.append(pkt)
71 def split_trace(packets, predicate, before=True):
72 """Split a sequence of packets apart where packets satisfy the predicate.
74 If before is True (default), the split happens just before the matching
75 packet; otherwise it happens just after.
94 def analyze_get(packets):
95 packets = iter(packets)
101 # Check for connection establishment (SYN/SYN-ACK) and use that to estimate
105 #print "Connection establishment: RTT is", p.ts - start_ts
106 return {'syn_rtt': p.ts - start_ts}
108 # Otherwise, we expect the first packet to be the GET request itself
109 if not(p.direction > 0 and p.data.startswith('GET')):
110 #print "Doesn't seem to be a GET request..."
113 # Find the first response packet containing data
114 while not(p.direction < 0 and p.datalen > 0):
120 tot_bytes = (p.seq[1] - start_seq) & 0xffffffff
123 #print "Response time:", resp_ts - start_ts
125 # Scan through the incoming packets, looking for gaps in either the IP ID
126 # field or in the timing
130 bytenr = (p.seq[1] - start_seq) & 0xffffffff
131 if not p.direction < 0: continue
132 if p.tcp.get_FIN(): continue
133 spacings.append((p.ts - last_ts, bytenr))
134 if p.id != (id_in + 1) & 0xffff:
136 #print "Sequence number gap at", id_in
137 if p.ts - last_ts > 2 * RTT_EST:
139 #print "Long gap of", p.ts - last_ts
140 elif p.ts - last_ts > RTT_EST / 2:
142 #print "Short gap of", p.ts - last_ts
144 #print " [occurred after", p.seq[0] - start_seq, "bytes, time", p.ts, "sec]"
146 if p.datalen not in (1448, 1460):
147 #print "Short packet of", p.datalen, "bytes, brings total to", p.seq[1] - start_seq
149 if (p.seq[0] - start_seq) & 0xffffffff != tot_bytes:
150 #print "Packet out of order; got sequence number %d, expected %d" \
151 # % ((p.seq[0] - start_seq) & 0xffffffff, tot_bytes)
155 tot_bytes = max(tot_bytes, bytenr)
157 #print "Transferred %d bytes in %s seconds, initial response after %s" % (tot_bytes, last_ts - start_ts, resp_ts - start_ts)
158 return {'bytes': tot_bytes,
159 'start_latency': resp_ts - start_ts,
160 'finish_latency': last_ts - start_ts,
161 'interpacket_times': spacings}
163 if __name__ == '__main__':
164 for f in sys.argv[1:]:
168 def request_start(p):
169 return p.direction > 0 and p.datalen > 0
171 for s in split_trace(conn.packets, request_start):
175 #if p.ts - ts > 0.01:
177 #if p.ts - ts > 2 * RTT_EST:
178 #print "LONG DELAY\n----"
181 #if p.direction > 0 and p.datalen > 0:
182 #print "Request:", repr(p.data)
183 results = analyze_get(s)
184 if results is not None:
185 result_list.append(results)
188 print dump_data(result_list)