4 example program that dumps a Mavlink log file. The log file is 5 assumed to be in the format that qgroundcontrol uses, which consists 6 of a series of MAVLink packets, each with a 64 bit timestamp 7 header. The timestamp is in microseconds since 1970 (unix epoch) 9 from __future__
import print_function
20 print(
"WARNING: Numpy missing, mathematical notation will not be supported..")
22 from argparse
import ArgumentParser
23 parser = ArgumentParser(description=__doc__)
25 parser.add_argument(
"--no-timestamps", dest=
"notimestamps", action=
'store_true', help=
"Log doesn't have timestamps")
26 parser.add_argument(
"--planner", action=
'store_true', help=
"use planner file format")
27 parser.add_argument(
"--robust", action=
'store_true', help=
"Enable robust parsing (skip over bad data)")
28 parser.add_argument(
"-f",
"--follow", action=
'store_true', help=
"keep waiting for more data at end of file")
29 parser.add_argument(
"--condition", default=
None, help=
"select packets by condition")
30 parser.add_argument(
"-q",
"--quiet", action=
'store_true', help=
"don't display packets")
31 parser.add_argument(
"-o",
"--output", default=
None, help=
"output matching packets to give file")
32 parser.add_argument(
"-p",
"--parms", action=
'store_true', help=
"preserve parameters in output with -o")
33 parser.add_argument(
"--format", default=
None, help=
"Change the output format between 'standard', 'json', and 'csv'. For the CSV output, you must supply types that you want.")
34 parser.add_argument(
"--csv_sep", dest=
"csv_sep", default=
",", help=
"Select the delimiter between columns for the output CSV file. Use 'tab' to specify tabs. Only applies when --format=csv")
35 parser.add_argument(
"--types", default=
None, help=
"types of messages (comma separated with wildcard)")
36 parser.add_argument(
"--nottypes", default=
None, help=
"types of messages not to include (comma separated with wildcard)")
37 parser.add_argument(
"--dialect", default=
"ardupilotmega", help=
"MAVLink dialect")
38 parser.add_argument(
"--zero-time-base", action=
'store_true', help=
"use Z time base for DF logs")
39 parser.add_argument(
"--no-bad-data", action=
'store_true', help=
"Don't output corrupted messages")
40 parser.add_argument(
"--show-source", action=
'store_true', help=
"Show source system ID and component ID")
41 parser.add_argument(
"--show-seq", action=
'store_true', help=
"Show sequence numbers")
42 parser.add_argument(
"--show-types", action=
'store_true', help=
"Shows all message types available on opened log")
43 parser.add_argument(
"--source-system", type=int, default=
None, help=
"filter by source system ID")
44 parser.add_argument(
"--source-component", type=int, default=
None, help=
"filter by source component ID")
45 parser.add_argument(
"--link", type=int, default=
None, help=
"filter by comms link ID")
46 parser.add_argument(
"--mav10", action=
'store_true', help=
"parse as MAVLink1")
47 parser.add_argument(
"log", metavar=
"LOG")
48 args = parser.parse_args()
51 os.environ[
'MAVLINK20'] =
'1' 55 from pymavlink
import mavutil
60 mlog = mavutil.mavlink_connection(filename, planner_format=args.planner,
61 notimestamps=args.notimestamps,
62 robust_parsing=args.robust,
64 zero_time_base=args.zero_time_base)
68 output = open(args.output, mode=
'wb')
72 types = types.split(
',')
74 nottypes = args.nottypes
75 if nottypes
is not None:
76 nottypes = nottypes.split(
',')
78 ext = os.path.splitext(filename)[1]
79 isbin = ext
in [
'.bin',
'.BIN',
'.px4log']
80 islog = ext
in [
'.log',
'.LOG']
81 istlog = ext
in [
'.tlog',
'.TLOG']
83 if args.csv_sep ==
"tab":
87 '''return True if mtype matches pattern''' 89 if fnmatch.fnmatch(mtype, p):
94 fields = [
'timestamp']
96 if istlog
and args.format ==
'csv':
101 typeClass =
"MAVLink_{0}_message".format(type.lower())
102 fields += [type +
'.' + x
for x
in inspect.getargspec(getattr(mavutil.mavlink, typeClass).__init__).args[1:]]
103 offsets[type] = currentOffset
104 currentOffset += len(fields)
108 print(
"You must specify a list of message types if outputting CSV format via the --types argument.")
112 csv_out = [
"" for x
in fields]
113 print(args.csv_sep.join(fields))
115 if isbin
and args.format ==
'csv':
116 if types
is None or len(types) != 1:
117 print(
"Need exactly one type when dumping CSV from bin file")
121 last_timestamp =
None 124 available_types = set()
128 if types
is not None and hasattr(mlog,
'name_to_id'):
129 for k
in mlog.name_to_id.keys():
131 if nottypes
is not None and match_type(k, nottypes):
133 if match_types
is None:
135 match_types.append(k)
139 m = mlog.recv_match(blocking=args.follow, type=match_types)
143 available_types.add(m.get_type())
144 if isbin
and m.get_type() ==
"FMT" and args.format ==
'csv':
145 if m.Name == types[0]:
146 fields += m.Columns.split(
',')
147 csv_out = [
"" for x
in fields]
148 print(args.csv_sep.join(fields))
150 if output
is not None:
151 if (isbin
or islog)
and m.get_type() ==
"FMT":
152 output.write(m.get_msgbuf())
154 if (isbin
or islog)
and (m.get_type() ==
"PARM" and args.parms):
155 output.write(m.get_msgbuf())
157 if m.get_type() ==
'PARAM_VALUE' and args.parms:
158 timestamp = getattr(m,
'_timestamp',
None)
159 output.write(struct.pack(
'>Q',
int(timestamp*1.0e6)) + m.get_msgbuf())
162 if not mavutil.evaluate_condition(args.condition, mlog.messages):
164 if args.source_system
is not None and args.source_system != m.get_srcSystem():
166 if args.source_component
is not None and args.source_component != m.get_srcComponent():
168 if args.link
is not None and args.link != m._link:
171 if types
is not None and m.get_type() !=
'BAD_DATA' and not match_type(m.get_type(), types):
174 if nottypes
is not None and match_type(m.get_type(), nottypes):
179 if m.get_type() ==
'BAD_DATA' and (args.no_bad_data
is True or m.reason ==
"Bad prefix"):
183 timestamp = getattr(m,
'_timestamp', 0.0)
187 if not (isbin
or islog):
188 output.write(struct.pack(
'>Q',
int(timestamp*1.0e6)))
190 output.write(m.get_msgbuf())
191 except Exception
as ex:
192 print(
"Failed to write msg %s: %s" % (m.get_type(),
str(ex)))
199 if args.format ==
'json':
204 del data[
'mavpackettype']
207 if 'data' in data
and type(data[
'data'])
is not dict:
208 data[
'data'] = list(data[
'data'])
212 meta = {
"type": m.get_type(),
"timestamp": timestamp}
214 meta[
"srcSystem"] = m.get_srcSystem()
215 meta[
"srcComponent"] = m.get_srcComponent()
216 outMsg = {
"meta": meta,
"data": data}
219 print(json.dumps(outMsg))
221 elif args.format ==
'csv':
227 if timestamp == last_timestamp
or last_timestamp
is None:
229 newData = [
str(data[y])
if y !=
"timestamp" else "" for y
in fields]
231 newData = [
str(data[y.split(
'.')[-1]])
if y.split(
'.')[0] == type
and y.split(
'.')[-1]
in data
else "" for y
in fields]
233 for i, val
in enumerate(newData):
239 csv_out[0] =
"{:.8f}".format(last_timestamp)
240 print(args.csv_sep.join(csv_out))
242 csv_out = [
str(data[y])
if y !=
"timestamp" else "" for y
in fields]
244 csv_out = [
str(data[y.split(
'.')[-1]])
if y.split(
'.')[0] == type
and y.split(
'.')[-1]
in data
else "" for y
in fields]
247 s =
"%s.%02u: %s" % (time.strftime(
"%Y-%m-%d %H:%M:%S",
248 time.localtime(timestamp)),
249 int(timestamp*100.0)%100, m)
251 s +=
" srcSystem=%u srcComponent=%u" % (m.get_srcSystem(), m.get_srcComponent())
253 s +=
" seq=%u" % m.get_seq()
254 if not args.show_types:
258 last_timestamp = timestamp
261 for msgType
in available_types: