• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1#!/usr/bin/env python3
2# Copyright 2020 The Pigweed Authors
3#
4# Licensed under the Apache License, Version 2.0 (the "License"); you may not
5# use this file except in compliance with the License. You may obtain a copy of
6# the License at
7#
8#     https://www.apache.org/licenses/LICENSE-2.0
9#
10# Unless required by applicable law or agreed to in writing, software
11# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13# License for the specific language governing permissions and limitations under
14# the License.
15"""
16Generates json trace files viewable using chrome://tracing from binary
17trace files.
18
19Example usage:
20python pw_trace_tokenized/py/trace_tokenized.py -i trace.bin -o trace.json
21out/pw_strict_host_clang_debug/obj/pw_trace_tokenized/bin/trace_tokenized_example_basic
22"""  # pylint: disable=line-too-long
23# pylint: enable=line-too-long
24
25from enum import IntEnum
26import argparse
27import logging
28import struct
29import sys
30from pw_tokenizer import database, tokens
31from pw_trace import trace
32
33_LOG = logging.getLogger('pw_trace_tokenizer')
34
35
36def varint_decode(encoded):
37    # Taken from pw_tokenizer.decode._decode_signed_integer
38    count = 0
39    result = 0
40    shift = 0
41    for byte in encoded:
42        count += 1
43        result |= (byte & 0x7f) << shift
44        if not byte & 0x80:
45            return result, count
46
47        shift += 7
48        if shift >= 64:
49            break  # Error
50    return None
51
52
53# Token string: "event_type|flag|module|group|label|<optional DATA_FMT>"
54class TokenIdx(IntEnum):
55    EVENT_TYPE = 0
56    FLAG = 1
57    MODULE = 2
58    GROUP = 3
59    LABEL = 4
60    DATA_FMT = 5  # optional
61
62
63def get_trace_type(type_str):
64    if type_str == "PW_TRACE_EVENT_TYPE_INSTANT":
65        return trace.TraceType.INSTANTANEOUS
66    if type_str == "PW_TRACE_EVENT_TYPE_INSTANT_GROUP":
67        return trace.TraceType.INSTANTANEOUS_GROUP
68    if type_str == "PW_TRACE_EVENT_TYPE_ASYNC_START":
69        return trace.TraceType.ASYNC_START
70    if type_str == "PW_TRACE_EVENT_TYPE_ASYNC_STEP":
71        return trace.TraceType.ASYNC_STEP
72    if type_str == "PW_TRACE_EVENT_TYPE_ASYNC_END":
73        return trace.TraceType.ASYNC_END
74    if type_str == "PW_TRACE_EVENT_TYPE_DURATION_START":
75        return trace.TraceType.DURATION_START
76    if type_str == "PW_TRACE_EVENT_TYPE_DURATION_END":
77        return trace.TraceType.DURATION_END
78    if type_str == "PW_TRACE_EVENT_TYPE_DURATION_GROUP_START":
79        return trace.TraceType.DURATION_GROUP_START
80    if type_str == "PW_TRACE_EVENT_TYPE_DURATION_GROUP_END":
81        return trace.TraceType.DURATION_GROUP_END
82    return trace.TraceType.INVALID
83
84
85def has_trace_id(token_string):
86    token_values = token_string.split("|")
87    return trace.event_has_trace_id(token_values[TokenIdx.EVENT_TYPE])
88
89
90def has_data(token_string):
91    token_values = token_string.split("|")
92    return len(token_values) > TokenIdx.DATA_FMT
93
94
95def create_trace_event(token_string, timestamp_us, trace_id, data):
96    token_values = token_string.split("|")
97    return trace.TraceEvent(event_type=get_trace_type(
98        token_values[TokenIdx.EVENT_TYPE]),
99                            module=token_values[TokenIdx.MODULE],
100                            label=token_values[TokenIdx.LABEL],
101                            timestamp_us=timestamp_us,
102                            group=token_values[TokenIdx.GROUP],
103                            trace_id=trace_id,
104                            flags=token_values[TokenIdx.FLAG],
105                            has_data=has_data(token_string),
106                            data_fmt=(token_values[TokenIdx.DATA_FMT]
107                                      if has_data(token_string) else ""),
108                            data=data if has_data(token_string) else b'')
109
110
111def parse_trace_event(buffer, db, last_time, ticks_per_second):
112    """Parse a single trace event from bytes"""
113    us_per_tick = 1000000 / ticks_per_second
114    idx = 0
115    # Read token
116    token = struct.unpack('I', buffer[idx:idx + 4])[0]
117    idx += 4
118
119    # Decode token
120    if len(db.token_to_entries[token]) == 0:
121        _LOG.error("token not found: %08x", token)
122        return None
123
124    token_string = str(db.token_to_entries[token][0])
125
126    # Read time
127    time_delta, time_bytes = varint_decode(buffer[idx:])
128    timestamp_us = last_time + us_per_tick * time_delta
129    idx += time_bytes
130
131    # Trace ID
132    trace_id = None
133    if has_trace_id(token_string) and idx < len(buffer):
134        trace_id, trace_id_bytes = varint_decode(buffer[idx:])
135        idx += trace_id_bytes
136
137    # Data
138    data = None
139    if has_data(token_string) and idx < len(buffer):
140        data = buffer[idx:]
141
142    # Create trace event
143    return create_trace_event(token_string, timestamp_us, trace_id, data)
144
145
146def get_trace_events(databases, raw_trace_data, ticks_per_second):
147    """Handles the decoding traces."""
148
149    db = tokens.Database.merged(*databases)
150    last_timestamp = 0
151    events = []
152    idx = 0
153
154    while idx + 1 < len(raw_trace_data):
155        # Read size
156        size = int(raw_trace_data[idx])
157        if idx + size > len(raw_trace_data):
158            _LOG.error("incomplete file")
159            break
160
161        event = parse_trace_event(raw_trace_data[idx + 1:idx + 1 + size], db,
162                                  last_timestamp, ticks_per_second)
163        if event:
164            last_timestamp = event.timestamp_us
165            events.append(event)
166        idx = idx + size + 1
167    return events
168
169
170def get_trace_data_from_file(input_file_name):
171    """Handles the decoding traces."""
172    with open(input_file_name, "rb") as input_file:
173        return input_file.read()
174    return None
175
176
177def save_trace_file(trace_lines, file_name):
178    """Handles generating the trace file."""
179    with open(file_name, 'w') as output_file:
180        output_file.write("[")
181        for line in trace_lines:
182            output_file.write("%s,\n" % line)
183        output_file.write("{}]")
184
185
186def get_trace_events_from_file(databases, input_file_name, ticks_per_second):
187    """Get trace events from a file."""
188    raw_trace_data = get_trace_data_from_file(input_file_name)
189    return get_trace_events(databases, raw_trace_data, ticks_per_second)
190
191
192def _parse_args():
193    """Parse and return command line arguments."""
194
195    parser = argparse.ArgumentParser(
196        description=__doc__,
197        formatter_class=argparse.RawDescriptionHelpFormatter)
198    parser.add_argument(
199        'databases',
200        nargs='+',
201        action=database.LoadTokenDatabases,
202        help='Databases (ELF, binary, or CSV) to use to lookup tokens.')
203    parser.add_argument(
204        '-i',
205        '--input',
206        dest='input_file',
207        help='The binary trace input file, generated using trace_to_file.h.')
208    parser.add_argument('-o',
209                        '--output',
210                        dest='output_file',
211                        help=('The json file to which to write the output.'))
212    parser.add_argument(
213        '-t',
214        '--ticks_per_second',
215        type=int,
216        dest='ticks_per_second',
217        default=1000,
218        help=('The clock rate of the trace events (Default 1000).'))
219
220    return parser.parse_args()
221
222
223def _main(args):
224    events = get_trace_events_from_file(args.databases, args.input_file,
225                                        args.ticks_per_second)
226    json_lines = trace.generate_trace_json(events)
227    save_trace_file(json_lines, args.output_file)
228
229
230if __name__ == '__main__':
231    if sys.version_info[0] < 3:
232        sys.exit('ERROR: The detokenizer command line tools require Python 3.')
233    _main(_parse_args())
234