2 # Copyright 2020 The Pigweed Authors
4 # Licensed under the Apache License, Version 2.0 (the "License"); you may not
5 # use this file except in compliance with the License. You may obtain a copy of
8 # https://www.apache.org/licenses/LICENSE-2.0
10 # Unless required by applicable law or agreed to in writing, software
11 # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
12 # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
13 # License for the specific language governing permissions and limitations under
16 Generates json trace files viewable using chrome://tracing from binary
20 python pw_trace_tokenized/py/trace_tokenized.py -i trace.bin -o trace.json
21 ./out/host_clang_debug/obj/pw_trace_tokenized/bin/trace_tokenized_example_basic
23 from enum import IntEnum
28 from pw_tokenizer import database, tokens
29 from pw_trace import trace
31 _LOG = logging.getLogger('pw_trace_tokenizer')
34 def varint_decode(encoded):
35 # Taken from pw_tokenizer.decode._decode_signed_integer
41 result |= (byte & 0x7f) << shift
51 # Token string: "event_type|flag|module|group|label|<optional data_fmt>"
52 class TokenIdx(IntEnum):
58 data_fmt = 5 # optional
61 def get_trace_type(type_str):
62 if type_str == "PW_TRACE_EVENT_TYPE_INSTANT":
63 return trace.TraceType.Instantaneous
64 if type_str == "PW_TRACE_EVENT_TYPE_INSTANT_GROUP":
65 return trace.TraceType.InstantaneousGroup
66 if type_str == "PW_TRACE_EVENT_TYPE_ASYNC_START":
67 return trace.TraceType.AsyncStart
68 if type_str == "PW_TRACE_EVENT_TYPE_ASYNC_STEP":
69 return trace.TraceType.AsyncStep
70 if type_str == "PW_TRACE_EVENT_TYPE_ASYNC_END":
71 return trace.TraceType.AsyncEnd
72 if type_str == "PW_TRACE_EVENT_TYPE_DURATION_START":
73 return trace.TraceType.DurationStart
74 if type_str == "PW_TRACE_EVENT_TYPE_DURATION_END":
75 return trace.TraceType.DurationEnd
76 if type_str == "PW_TRACE_EVENT_TYPE_DURATION_GROUP_START":
77 return trace.TraceType.DurationGroupStart
78 if type_str == "PW_TRACE_EVENT_TYPE_DURATION_GROUP_END":
79 return trace.TraceType.DurationGroupEnd
80 return trace.TraceType.Invalid
83 def has_trace_id(token_string):
84 token_values = token_string.split("|")
85 return trace.event_has_trace_id(token_values[TokenIdx.EventType])
88 def has_data(token_string):
89 token_values = token_string.split("|")
90 return len(token_values) > TokenIdx.data_fmt
93 def create_trace_event(token_string, timestamp_us, trace_id, data):
94 token_values = token_string.split("|")
95 return trace.TraceEvent(event_type=get_trace_type(
96 token_values[TokenIdx.EventType]),
97 module=token_values[TokenIdx.Module],
98 label=token_values[TokenIdx.Label],
99 timestamp_us=timestamp_us,
100 group=token_values[TokenIdx.Group],
102 flags=token_values[TokenIdx.Flag],
103 has_data=has_data(token_string),
104 data_fmt=(token_values[TokenIdx.data_fmt]
105 if has_data(token_string) else ""),
106 data=data if has_data(token_string) else b'')
109 def parse_trace_event(buffer, db, last_time, ticks_per_second=1000):
110 us_per_tick = 1000000 / ticks_per_second
113 token = struct.unpack('I', buffer[idx:idx + 4])[0]
117 if len(db.token_to_entries[token]) == 0:
118 _LOG.error("token not found: %08x", token)
119 token_string = str(db.token_to_entries[token][0])
122 time_delta, time_bytes = varint_decode(buffer[idx:])
123 timestamp_us = last_time + us_per_tick * time_delta
128 if has_trace_id(token_string) and idx < len(buffer):
129 trace_id, trace_id_bytes = varint_decode(buffer[idx:])
130 idx += trace_id_bytes
134 if has_data(token_string) and idx < len(buffer):
138 return create_trace_event(token_string, timestamp_us, trace_id, data)
141 def get_trace_events_from_file(databases, input_file_name):
142 """Handles the decoding traces."""
144 db = tokens.Database.merged(*databases)
147 with open(input_file_name, "rb") as input_file:
148 bytes_read = input_file.read()
151 while idx + 1 < len(bytes_read):
153 size = int(bytes_read[idx])
154 if idx + size > len(bytes_read):
155 _LOG.error("incomplete file")
158 event = parse_trace_event(bytes_read[idx + 1:idx + 1 + size], db,
160 last_timestamp = event.timestamp_us
167 """Parse and return command line arguments."""
169 parser = argparse.ArgumentParser(
171 formatter_class=argparse.RawDescriptionHelpFormatter)
175 action=database.LoadTokenDatabases,
176 help='Databases (ELF, binary, or CSV) to use to lookup tokens.')
181 help='The binary trace input file, generated using trace_to_file.h.')
182 parser.add_argument('-o',
185 help=('The json file to which to write the output.'))
187 return parser.parse_args()
191 events = get_trace_events_from_file(args.databases, args.input_file)
192 json_lines = trace.generate_trace_json(events)
194 with open(args.output_file, 'w') as output_file:
195 for line in json_lines:
196 output_file.write("%s,\n" % line)
199 if __name__ == '__main__':
200 if sys.version_info[0] < 3:
201 sys.exit('ERROR: The detokenizer command line tools require Python 3.')