Merge commit 'cbd2d4342b3d42ab33baa99f5b7a23491b5692f2' into upstream-merge
[qemu-kvm.git] / scripts / simpletrace.py
blobf55e5e63f9f0c28a65b09ba743adb75eaad617a5
1 #!/usr/bin/env python
3 # Pretty-printer for simple trace backend binary trace files
5 # Copyright IBM, Corp. 2010
7 # This work is licensed under the terms of the GNU GPL, version 2. See
8 # the COPYING file in the top-level directory.
10 # For help see docs/tracing.txt
12 import struct
13 import re
14 import inspect
16 header_event_id = 0xffffffffffffffff
17 header_magic = 0xf2b177cb0aa429b4
18 header_version = 0
19 dropped_event_id = 0xfffffffffffffffe
21 trace_fmt = '=QQQQQQQQ'
22 trace_len = struct.calcsize(trace_fmt)
23 event_re = re.compile(r'(disable\s+)?([a-zA-Z0-9_]+)\(([^)]*)\).*')
25 def parse_events(fobj):
26 """Parse a trace-events file into {event_num: (name, arg1, ...)}."""
28 def get_argnames(args):
29 """Extract argument names from a parameter list."""
30 return tuple(arg.split()[-1].lstrip('*') for arg in args.split(','))
32 events = {dropped_event_id: ('dropped', 'count')}
33 event_num = 0
34 for line in fobj:
35 m = event_re.match(line.strip())
36 if m is None:
37 continue
39 disable, name, args = m.groups()
40 events[event_num] = (name,) + get_argnames(args)
41 event_num += 1
42 return events
44 def read_record(fobj):
45 """Deserialize a trace record from a file into a tuple (event_num, timestamp, arg1, ..., arg6)."""
46 s = fobj.read(trace_len)
47 if len(s) != trace_len:
48 return None
49 return struct.unpack(trace_fmt, s)
51 def read_trace_file(fobj):
52 """Deserialize trace records from a file, yielding record tuples (event_num, timestamp, arg1, ..., arg6)."""
53 header = read_record(fobj)
54 if header is None or \
55 header[0] != header_event_id or \
56 header[1] != header_magic or \
57 header[2] != header_version:
58 raise ValueError('not a trace file or incompatible version')
60 while True:
61 rec = read_record(fobj)
62 if rec is None:
63 break
65 yield rec
67 class Analyzer(object):
68 """A trace file analyzer which processes trace records.
70 An analyzer can be passed to run() or process(). The begin() method is
71 invoked, then each trace record is processed, and finally the end() method
72 is invoked.
74 If a method matching a trace event name exists, it is invoked to process
75 that trace record. Otherwise the catchall() method is invoked."""
77 def begin(self):
78 """Called at the start of the trace."""
79 pass
81 def catchall(self, event, rec):
82 """Called if no specific method for processing a trace event has been found."""
83 pass
85 def end(self):
86 """Called at the end of the trace."""
87 pass
89 def process(events, log, analyzer):
90 """Invoke an analyzer on each event in a log."""
91 if isinstance(events, str):
92 events = parse_events(open(events, 'r'))
93 if isinstance(log, str):
94 log = open(log, 'rb')
96 def build_fn(analyzer, event):
97 fn = getattr(analyzer, event[0], None)
98 if fn is None:
99 return analyzer.catchall
101 event_argcount = len(event) - 1
102 fn_argcount = len(inspect.getargspec(fn)[0]) - 1
103 if fn_argcount == event_argcount + 1:
104 # Include timestamp as first argument
105 return lambda _, rec: fn(*rec[1:2 + event_argcount])
106 else:
107 # Just arguments, no timestamp
108 return lambda _, rec: fn(*rec[2:2 + event_argcount])
110 analyzer.begin()
111 fn_cache = {}
112 for rec in read_trace_file(log):
113 event_num = rec[0]
114 event = events[event_num]
115 if event_num not in fn_cache:
116 fn_cache[event_num] = build_fn(analyzer, event)
117 fn_cache[event_num](event, rec)
118 analyzer.end()
120 def run(analyzer):
121 """Execute an analyzer on a trace file given on the command-line.
123 This function is useful as a driver for simple analysis scripts. More
124 advanced scripts will want to call process() instead."""
125 import sys
127 if len(sys.argv) != 3:
128 sys.stderr.write('usage: %s <trace-events> <trace-file>\n' % sys.argv[0])
129 sys.exit(1)
131 events = parse_events(open(sys.argv[1], 'r'))
132 process(events, sys.argv[2], analyzer)
134 if __name__ == '__main__':
135 class Formatter(Analyzer):
136 def __init__(self):
137 self.last_timestamp = None
139 def catchall(self, event, rec):
140 timestamp = rec[1]
141 if self.last_timestamp is None:
142 self.last_timestamp = timestamp
143 delta_ns = timestamp - self.last_timestamp
144 self.last_timestamp = timestamp
146 fields = [event[0], '%0.3f' % (delta_ns / 1000.0)]
147 for i in xrange(1, len(event)):
148 fields.append('%s=0x%x' % (event[i], rec[i + 1]))
149 print ' '.join(fields)
151 run(Formatter())