moveit_benchmark_statistics.py
Go to the documentation of this file.
1 #!/usr/bin/env python
2 
3 ######################################################################
4 # Software License Agreement (BSD License)
5 #
6 # Copyright (c) 2010, Rice University
7 # All rights reserved.
8 #
9 # Redistribution and use in source and binary forms, with or without
10 # modification, are permitted provided that the following conditions
11 # are met:
12 #
13 # * Redistributions of source code must retain the above copyright
14 # notice, this list of conditions and the following disclaimer.
15 # * Redistributions in binary form must reproduce the above
16 # copyright notice, this list of conditions and the following
17 # disclaimer in the documentation and/or other materials provided
18 # with the distribution.
19 # * Neither the name of the Rice University nor the names of its
20 # contributors may be used to endorse or promote products derived
21 # from this software without specific prior written permission.
22 #
23 # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
24 # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
25 # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
26 # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
27 # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
28 # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
29 # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
30 # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
31 # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
32 # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
33 # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
34 # POSSIBILITY OF SUCH DAMAGE.
35 ######################################################################
36 
37 # Author: Mark Moll, Ioan Sucan, Luis G. Torres
38 
39 from sys import argv, exit
40 from os.path import basename, splitext
41 import sqlite3
42 import datetime
43 import matplotlib
44 matplotlib.use('pdf')
45 from matplotlib import __version__ as matplotlibversion
46 from matplotlib.backends.backend_pdf import PdfPages
47 import matplotlib.pyplot as plt
48 import numpy as np
49 from math import floor
50 from optparse import OptionParser, OptionGroup
51 
52 # Given a text line, split it into tokens (by space) and return the token
53 # at the desired index. Additionally, test that some expected tokens exist.
54 # Return None if they do not.
55 def readLogValue(filevar, desired_token_index, expected_tokens) :
56  start_pos = filevar.tell()
57  tokens = filevar.readline().split()
58  for token_index in expected_tokens:
59  if not tokens[token_index] == expected_tokens[token_index]:
60  # undo the read, if we failed to parse.
61  filevar.seek(start_pos)
62  return None
63  return tokens[desired_token_index]
64 
65 def readOptionalLogValue(filevar, desired_token_index, expected_tokens = {}) :
66  return readLogValue(filevar, desired_token_index, expected_tokens)
67 
68 def readRequiredLogValue(name, filevar, desired_token_index, expected_tokens = {}) :
69  result = readLogValue(filevar, desired_token_index, expected_tokens)
70  if result == None:
71  raise Exception("Unable to read " + name)
72  return result
73 
74 def ensurePrefix(line, prefix):
75  if not line.startswith(prefix):
76  raise Exception("Expected prefix " + prefix + " was not found")
77  return line
78 
80  start_pos = filevar.tell()
81  line = filevar.readline()
82  if not line.startswith("<<<|"):
83  filevar.seek(start_pos)
84  return None
85  value = ''
86  line = filevar.readline()
87  while not line.startswith('|>>>'):
88  value = value + line
89  line = filevar.readline()
90  if line == None:
91  raise Exception("Expected token |>>> missing")
92  return value
93 
95  ensurePrefix(filevar.readline(), "<<<|")
96  value = ''
97  line = filevar.readline()
98  while not line.startswith('|>>>'):
99  value = value + line
100  line = filevar.readline()
101  if line == None:
102  raise Exception("Expected token |>>> missing")
103  return value
104 
105 
106 def readBenchmarkLog(dbname, filenames):
107  """Parse benchmark log files and store the parsed data in a sqlite3 database."""
108 
109  conn = sqlite3.connect(dbname)
110  c = conn.cursor()
111  c.execute('PRAGMA FOREIGN_KEYS = ON')
112 
113  # create all tables if they don't already exist
114  c.executescript("""CREATE TABLE IF NOT EXISTS experiments
115  (id INTEGER PRIMARY KEY AUTOINCREMENT, name VARCHAR(512),
116  totaltime REAL, timelimit REAL, memorylimit REAL, runcount INTEGER,
117  version VARCHAR(128), hostname VARCHAR(1024), cpuinfo TEXT,
118  date DATETIME, seed INTEGER, setup TEXT);
119  CREATE TABLE IF NOT EXISTS plannerConfigs
120  (id INTEGER PRIMARY KEY AUTOINCREMENT,
121  name VARCHAR(512) NOT NULL, settings TEXT);
122  CREATE TABLE IF NOT EXISTS enums
123  (name VARCHAR(512), value INTEGER, description TEXT,
124  PRIMARY KEY (name, value));
125  CREATE TABLE IF NOT EXISTS runs
126  (id INTEGER PRIMARY KEY AUTOINCREMENT, experimentid INTEGER, plannerid INTEGER,
127  FOREIGN KEY (experimentid) REFERENCES experiments(id) ON DELETE CASCADE,
128  FOREIGN KEY (plannerid) REFERENCES plannerConfigs(id) ON DELETE CASCADE);
129  CREATE TABLE IF NOT EXISTS progress
130  (runid INTEGER, time REAL, PRIMARY KEY (runid, time),
131  FOREIGN KEY (runid) REFERENCES runs(id) ON DELETE CASCADE)""")
132 
133  for filename in filenames:
134  print('Processing ' + filename)
135  logfile = open(filename,'r')
136  start_pos = logfile.tell()
137  libname = readOptionalLogValue(logfile, 0, {1 : "version"})
138  if libname == None:
139  libname = "OMPL"
140  logfile.seek(start_pos)
141  version = readOptionalLogValue(logfile, -1, {1 : "version"})
142  if version == None:
143  # set the version number to make Planner Arena happy
144  version = "0.0.0"
145  version = ' '.join([libname, version])
146  expname = readRequiredLogValue("experiment name", logfile, -1, {0 : "Experiment"})
147  hostname = readRequiredLogValue("hostname", logfile, -1, {0 : "Running"})
148  date = ' '.join(ensurePrefix(logfile.readline(), "Starting").split()[2:])
149  expsetup = readRequiredMultilineValue(logfile)
150  cpuinfo = readOptionalMultilineValue(logfile)
151  rseed = int(readRequiredLogValue("random seed", logfile, 0, {-2 : "random", -1 : "seed"}))
152  timelimit = float(readRequiredLogValue("time limit", logfile, 0, {-3 : "seconds", -2 : "per", -1 : "run"}))
153  memorylimit = float(readRequiredLogValue("memory limit", logfile, 0, {-3 : "MB", -2 : "per", -1 : "run"}))
154  nrrunsOrNone = readOptionalLogValue(logfile, 0, {-3 : "runs", -2 : "per", -1 : "planner"})
155  nrruns = -1
156  if nrrunsOrNone != None:
157  nrruns = int(nrrunsOrNone)
158  totaltime = float(readRequiredLogValue("total time", logfile, 0, {-3 : "collect", -2 : "the", -1 : "data"}))
159  numEnums = 0
160  numEnumsOrNone = readOptionalLogValue(logfile, 0, {-2 : "enum"})
161  if numEnumsOrNone != None:
162  numEnums = int(numEnumsOrNone)
163  for i in range(numEnums):
164  enum = logfile.readline()[:-1].split('|')
165  c.execute('SELECT * FROM enums WHERE name IS "%s"' % enum[0])
166  if c.fetchone() == None:
167  for j in range(len(enum)-1):
168  c.execute('INSERT INTO enums VALUES (?,?,?)',
169  (enum[0],j,enum[j+1]))
170  c.execute('INSERT INTO experiments VALUES (?,?,?,?,?,?,?,?,?,?,?,?)',
171  (None, expname, totaltime, timelimit, memorylimit, nrruns,
172  version, hostname, cpuinfo, date, rseed, expsetup) )
173  experimentId = c.lastrowid
174  numPlanners = int(readRequiredLogValue("planner count", logfile, 0, {-1 : "planners"}))
175  for i in range(numPlanners):
176  plannerName = logfile.readline()[:-1]
177  print('Parsing data for ' + plannerName)
178 
179  # read common data for planner
180  numCommon = int(logfile.readline().split()[0])
181  settings = ''
182  for j in range(numCommon):
183  settings = settings + logfile.readline() + ';'
184 
185  # find planner id
186  c.execute('SELECT id FROM plannerConfigs WHERE (name=? AND settings=?)',
187  (plannerName, settings,))
188  p = c.fetchone()
189  if p==None:
190  c.execute('INSERT INTO plannerConfigs VALUES (?,?,?)',
191  (None, plannerName, settings,))
192  plannerId = c.lastrowid
193  else:
194  plannerId = p[0]
195 
196  # get current column names
197  c.execute('PRAGMA table_info(runs)')
198  columnNames = [col[1] for col in c.fetchall()]
199 
200  # read properties and add columns as necessary
201  numProperties = int(logfile.readline().split()[0])
202  propertyNames = ['experimentid', 'plannerid']
203  for j in range(numProperties):
204  field = logfile.readline().split()
205  propertyType = field[-1]
206  propertyName = '_'.join(field[:-1])
207  if propertyName not in columnNames:
208  c.execute('ALTER TABLE runs ADD %s %s' % (propertyName, propertyType))
209  propertyNames.append(propertyName)
210  # read measurements
211  insertFmtStr = 'INSERT INTO runs (' + ','.join(propertyNames) + \
212  ') VALUES (' + ','.join('?'*len(propertyNames)) + ')'
213  numRuns = int(logfile.readline().split()[0])
214  runIds = []
215  for j in range(numRuns):
216  values = tuple([experimentId, plannerId] + \
217  [None if len(x) == 0 or x == 'nan' or x == 'inf' else x
218  for x in logfile.readline().split('; ')[:-1]])
219  c.execute(insertFmtStr, values)
220  # extract primary key of each run row so we can reference them
221  # in the planner progress data table if needed
222  runIds.append(c.lastrowid)
223 
224  nextLine = logfile.readline().strip()
225 
226  # read planner progress data if it's supplied
227  if nextLine != '.':
228  # get current column names
229  c.execute('PRAGMA table_info(progress)')
230  columnNames = [col[1] for col in c.fetchall()]
231 
232  # read progress properties and add columns as necesary
233  numProgressProperties = int(nextLine.split()[0])
234  progressPropertyNames = ['runid']
235  for i in range(numProgressProperties):
236  field = logfile.readline().split()
237  progressPropertyType = field[-1]
238  progressPropertyName = "_".join(field[:-1])
239  if progressPropertyName not in columnNames:
240  c.execute('ALTER TABLE progress ADD %s %s' %
241  (progressPropertyName, progressPropertyType))
242  progressPropertyNames.append(progressPropertyName)
243  # read progress measurements
244  insertFmtStr = 'INSERT INTO progress (' + \
245  ','.join(progressPropertyNames) + ') VALUES (' + \
246  ','.join('?'*len(progressPropertyNames)) + ')'
247  numRuns = int(logfile.readline().split()[0])
248  for j in range(numRuns):
249  dataSeries = logfile.readline().split(';')[:-1]
250  for dataSample in dataSeries:
251  values = tuple([runIds[j]] + \
252  [None if len(x) == 0 or x == 'nan' or x == 'inf' else x
253  for x in dataSample.split(',')[:-1]])
254  try:
255  c.execute(insertFmtStr, values)
256  except sqlite3.IntegrityError:
257  print('Ignoring duplicate progress data. Consider increasing ompl::tools::Benchmark::Request::timeBetweenUpdates.')
258  pass
259 
260  logfile.readline()
261  logfile.close()
262  conn.commit()
263  c.close()
264 
265 def plotAttribute(cur, planners, attribute, typename):
266  """Create a plot for a particular attribute. It will include data for
267  all planners that have data for this attribute."""
268  labels = []
269  measurements = []
270  nanCounts = []
271  if typename == 'ENUM':
272  cur.execute('SELECT description FROM enums where name IS "%s"' % attribute)
273  descriptions = [ t[0] for t in cur.fetchall() ]
274  numValues = len(descriptions)
275  for planner in planners:
276  cur.execute('SELECT %s FROM runs WHERE plannerid = %s AND %s IS NOT NULL' \
277  % (attribute, planner[0], attribute))
278  measurement = [ t[0] for t in cur.fetchall() if t[0] != None ]
279  if len(measurement) > 0:
280  cur.execute('SELECT count(*) FROM runs WHERE plannerid = %s AND %s IS NULL' \
281  % (planner[0], attribute))
282  nanCounts.append(cur.fetchone()[0])
283  labels.append(planner[1])
284  if typename == 'ENUM':
285  scale = 100. / len(measurement)
286  measurements.append([measurement.count(i)*scale for i in range(numValues)])
287  else:
288  measurements.append(measurement)
289 
290  if len(measurements)==0:
291  print('Skipping "%s": no available measurements' % attribute)
292  return
293 
294  plt.clf()
295  ax = plt.gca()
296  if typename == 'ENUM':
297  width = .5
298  measurements = np.transpose(np.vstack(measurements))
299  colsum = np.sum(measurements, axis=1)
300  rows = np.where(colsum != 0)[0]
301  heights = np.zeros((1,measurements.shape[1]))
302  ind = range(measurements.shape[1])
303  legend_labels = []
304  for i in rows:
305  plt.bar(ind, measurements[i], width, bottom=heights[0],
306  color=matplotlib.cm.hot(int(floor(i*256/numValues))),
307  label=descriptions[i])
308  heights = heights + measurements[i]
309  xtickNames = plt.xticks([x+width/2. for x in ind], labels, rotation=30)
310  ax.set_ylabel(attribute.replace('_',' ') + ' (%)')
311  box = ax.get_position()
312  ax.set_position([box.x0, box.y0, box.width * 0.8, box.height])
313  props = matplotlib.font_manager.FontProperties()
314  props.set_size('small')
315  ax.legend(loc='center left', bbox_to_anchor=(1, 0.5), prop = props)
316  elif typename == 'BOOLEAN':
317  width = .5
318  measurementsPercentage = [sum(m) * 100. / len(m) for m in measurements]
319  ind = range(len(measurements))
320  plt.bar(ind, measurementsPercentage, width)
321  xtickNames = plt.xticks([x + width / 2. for x in ind], labels, rotation=30, fontsize=8)
322  ax.set_ylabel(attribute.replace('_',' ') + ' (%)')
323  plt.subplots_adjust(bottom=0.3) # Squish the plot into the upper 2/3 of the page. Leave room for labels
324  else:
325  if int(matplotlibversion.split('.')[0])<1:
326  plt.boxplot(measurements, notch=0, sym='k+', vert=1, whis=1.5)
327  else:
328  plt.boxplot(measurements, notch=0, sym='k+', vert=1, whis=1.5, bootstrap=1000)
329  ax.set_ylabel(attribute.replace('_',' '))
330 
331  #xtickNames = plt.xticks(labels, rotation=30, fontsize=10)
332  #plt.subplots_adjust(bottom=0.3) # Squish the plot into the upper 2/3 of the page. Leave room for labels
333 
334  xtickNames = plt.setp(ax,xticklabels=labels)
335  plt.setp(xtickNames, rotation=30)
336  for tick in ax.xaxis.get_major_ticks(): # shrink the font size of the x tick labels
337  tick.label.set_fontsize(8)
338  plt.subplots_adjust(bottom=0.3) # Squish the plot into the upper 2/3 of the page. Leave room for labels
339  ax.set_xlabel('Motion planning algorithm')
340  ax.yaxis.grid(True, linestyle='-', which='major', color='lightgrey', alpha=0.5)
341  if max(nanCounts)>0:
342  maxy = max([max(y) for y in measurements])
343  for i in range(len(labels)):
344  x = i+width/2 if typename=='BOOLEAN' else i+1
345  ax.text(x, .95*maxy, str(nanCounts[i]), horizontalalignment='center', size='small')
346  plt.show()
347 
348 def plotProgressAttribute(cur, planners, attribute):
349  """Plot data for a single planner progress attribute. Will create an
350 average time-plot with error bars of the attribute over all runs for
351 each planner."""
352 
353  import numpy.ma as ma
354 
355  plt.clf()
356  ax = plt.gca()
357  ax.set_xlabel('time (s)')
358  ax.set_ylabel(attribute.replace('_',' '))
359  plannerNames = []
360  for planner in planners:
361  cur.execute("""SELECT count(progress.%s) FROM progress INNER JOIN runs
362  ON progress.runid = runs.id AND runs.plannerid=%s
363  AND progress.%s IS NOT NULL""" \
364  % (attribute, planner[0], attribute))
365  if cur.fetchone()[0] > 0:
366  plannerNames.append(planner[1])
367  cur.execute("""SELECT DISTINCT progress.runid FROM progress INNER JOIN runs
368  WHERE progress.runid=runs.id AND runs.plannerid=?""", (planner[0],))
369  runids = [t[0] for t in cur.fetchall()]
370  timeTable = []
371  dataTable = []
372  for r in runids:
373  # Select data for given run
374  cur.execute('SELECT time, %s FROM progress WHERE runid = %s ORDER BY time' % (attribute,r))
375  (time, data) = zip(*(cur.fetchall()))
376  timeTable.append(time)
377  dataTable.append(data)
378  # It's conceivable that the sampling process may have
379  # generated more samples for one run than another; in this
380  # case, truncate all data series to length of shortest
381  # one.
382  fewestSamples = min(len(time[:]) for time in timeTable)
383  times = np.array(timeTable[0][:fewestSamples])
384  dataArrays = np.array([data[:fewestSamples] for data in dataTable])
385  filteredData = ma.masked_array(dataArrays, np.equal(dataArrays, None), dtype=float)
386 
387  means = np.mean(filteredData, axis=0)
388  stddevs = np.std(filteredData, axis=0, ddof=1)
389 
390  # plot average with error bars
391  plt.errorbar(times, means, yerr=2*stddevs, errorevery=max(1, len(times) // 20))
392  ax.legend(plannerNames)
393  if len(plannerNames)>0:
394  plt.show()
395  else:
396  plt.clf()
397 
398 def plotStatistics(dbname, fname):
399  """Create a PDF file with box plots for all attributes."""
400  print("Generating plots...")
401  conn = sqlite3.connect(dbname)
402  c = conn.cursor()
403  c.execute('PRAGMA FOREIGN_KEYS = ON')
404  c.execute('SELECT id, name FROM plannerConfigs')
405  planners = [(t[0],t[1].replace('geometric_','').replace('control_',''))
406  for t in c.fetchall()]
407  c.execute('PRAGMA table_info(runs)')
408  colInfo = c.fetchall()[3:]
409 
410  pp = PdfPages(fname)
411  for col in colInfo:
412  if col[2] == 'BOOLEAN' or col[2] == 'ENUM' or \
413  col[2] == 'INTEGER' or col[2] == 'REAL':
414  plotAttribute(c, planners, col[1], col[2])
415  pp.savefig(plt.gcf())
416 
417  c.execute('PRAGMA table_info(progress)')
418  colInfo = c.fetchall()[2:]
419  for col in colInfo:
420  plotProgressAttribute(c, planners, col[1])
421  pp.savefig(plt.gcf())
422  plt.clf()
423 
424  pagey = 0.9
425  pagex = 0.06
426  c.execute("""SELECT id, name, timelimit, memorylimit FROM experiments""")
427  experiments = c.fetchall()
428  for experiment in experiments:
429  c.execute("""SELECT count(*) FROM runs WHERE runs.experimentid = %d
430  GROUP BY runs.plannerid""" % experiment[0])
431  numRuns = [run[0] for run in c.fetchall()]
432  numRuns = numRuns[0] if len(set(numRuns)) == 1 else ','.join(numRuns)
433 
434  plt.figtext(pagex, pagey, 'Experiment "%s"' % experiment[1])
435  plt.figtext(pagex, pagey-0.05, 'Number of averaged runs: %d' % numRuns)
436  plt.figtext(pagex, pagey-0.10, "Time limit per run: %g seconds" % experiment[2])
437  plt.figtext(pagex, pagey-0.15, "Memory limit per run: %g MB" % experiment[3])
438  pagey -= 0.22
439  plt.show()
440  pp.savefig(plt.gcf())
441  pp.close()
442 
443 def saveAsMysql(dbname, mysqldump):
444  # See http://stackoverflow.com/questions/1067060/perl-to-python
445  import re
446  print("Saving as MySQL dump file...")
447 
448  conn = sqlite3.connect(dbname)
449  mysqldump = open(mysqldump,'w')
450 
451  # make sure all tables are dropped in an order that keepd foreign keys valid
452  c = conn.cursor()
453  c.execute("SELECT name FROM sqlite_master WHERE type='table'")
454  table_names = [ str(t[0]) for t in c.fetchall() ]
455  c.close()
456  last = ['experiments', 'planner_configs']
457  for table in table_names:
458  if table.startswith("sqlite"):
459  continue
460  if not table in last:
461  mysqldump.write("DROP TABLE IF EXISTS `%s`;\n" % table)
462  for table in last:
463  if table in table_names:
464  mysqldump.write("DROP TABLE IF EXISTS `%s`;\n" % table)
465 
466  for line in conn.iterdump():
467  process = False
468  for nope in ('BEGIN TRANSACTION','COMMIT',
469  'sqlite_sequence','CREATE UNIQUE INDEX', 'CREATE VIEW'):
470  if nope in line: break
471  else:
472  process = True
473  if not process: continue
474  line = re.sub(r"[\n\r\t ]+", " ", line)
475  m = re.search('CREATE TABLE ([a-zA-Z0-9_]*)(.*)', line)
476  if m:
477  name, sub = m.groups()
478  sub = sub.replace('"','`')
479  line = '''CREATE TABLE IF NOT EXISTS %(name)s%(sub)s'''
480  line = line % dict(name=name, sub=sub)
481  # make sure we use an engine that supports foreign keys
482  line = line.rstrip("\n\t ;") + " ENGINE = InnoDB;\n"
483  else:
484  m = re.search('INSERT INTO "([a-zA-Z0-9_]*)"(.*)', line)
485  if m:
486  line = 'INSERT INTO %s%s\n' % m.groups()
487  line = line.replace('"', r'\"')
488  line = line.replace('"', "'")
489 
490  line = re.sub(r"([^'])'t'(.)", "\\1THIS_IS_TRUE\\2", line)
491  line = line.replace('THIS_IS_TRUE', '1')
492  line = re.sub(r"([^'])'f'(.)", "\\1THIS_IS_FALSE\\2", line)
493  line = line.replace('THIS_IS_FALSE', '0')
494  line = line.replace('AUTOINCREMENT', 'AUTO_INCREMENT')
495  mysqldump.write(line)
496  mysqldump.close()
497 
498 def computeViews(dbname):
499  conn = sqlite3.connect(dbname)
500  c = conn.cursor()
501  c.execute('PRAGMA FOREIGN_KEYS = ON')
502  c.execute('PRAGMA table_info(runs)')
503  # kinodynamic paths cannot be simplified (or least not easily),
504  # so simplification_time may not exist as a database column
505  if 'simplification_time' in [col[1] for col in c.fetchall()]:
506  s0 = """SELECT plannerid, plannerConfigs.name AS plannerName, experimentid, solved, time + simplification_time AS total_time
507  FROM plannerConfigs INNER JOIN experiments INNER JOIN runs
508  ON plannerConfigs.id=runs.plannerid AND experiments.id=runs.experimentid"""
509  else:
510  s0 = """SELECT plannerid, plannerConfigs.name AS plannerName, experimentid, solved, time AS total_time
511  FROM plannerConfigs INNER JOIN experiments INNER JOIN runs
512  ON plannerConfigs.id=runs.plannerid AND experiments.id=runs.experimentid"""
513  s1 = """SELECT plannerid, plannerName, experimentid, AVG(solved) AS avg_solved, AVG(total_time) AS avg_total_time
514  FROM (%s) GROUP BY plannerid, experimentid""" % s0
515  s2 = """SELECT plannerid, experimentid, MIN(avg_solved) AS avg_solved, avg_total_time
516  FROM (%s) GROUP BY plannerName, experimentid ORDER BY avg_solved DESC, avg_total_time ASC""" % s1
517  c.execute('DROP VIEW IF EXISTS bestPlannerConfigsPerExperiment')
518  c.execute('CREATE VIEW IF NOT EXISTS bestPlannerConfigsPerExperiment AS %s' % s2)
519 
520  s1 = """SELECT plannerid, plannerName, AVG(solved) AS avg_solved, AVG(total_time) AS avg_total_time
521  FROM (%s) GROUP BY plannerid""" % s0
522  s2 = """SELECT plannerid, MIN(avg_solved) AS avg_solved, avg_total_time
523  FROM (%s) GROUP BY plannerName ORDER BY avg_solved DESC, avg_total_time ASC""" % s1
524  c.execute('DROP VIEW IF EXISTS bestPlannerConfigs')
525  c.execute('CREATE VIEW IF NOT EXISTS bestPlannerConfigs AS %s' % s2)
526 
527  conn.commit()
528  c.close()
529 
530 if __name__ == "__main__":
531  usage = """%prog [options] [<benchmark.log> ...]"""
532  parser = OptionParser("A script to parse benchmarking results.\n" + usage)
533  parser.add_option("-d", "--database", dest="dbname", default="benchmark.db",
534  help="Filename of benchmark database [default: %default]")
535  parser.add_option("-v", "--view", action="store_true", dest="view", default=False,
536  help="Compute the views for best planner configurations")
537  parser.add_option("-p", "--plot", dest="plot", default=None,
538  help="Create a PDF of plots with the filename provided")
539  parser.add_option("-m", "--mysql", dest="mysqldb", default=None,
540  help="Save SQLite3 database as a MySQL dump file")
541  (options, args) = parser.parse_args()
542 
543  if len(args) == 0:
544  parser.error("No arguments were provided. Please provide full path of log file")
545 
546  if len(args) == 1:
547  readBenchmarkLog(options.dbname, args)
548  # If we update the database, we recompute the views as well
549  options.view = True
550 
551  if options.view:
552  computeViews(options.dbname)
553 
554  if options.plot:
555  plotStatistics(options.dbname, options.plot)
556 
557  if options.mysqldb:
558  saveAsMysql(options.dbname, options.mysqldb)
559 
def plotAttribute(cur, planners, attribute, typename)
def readRequiredLogValue(name, filevar, desired_token_index, expected_tokens={})
def readOptionalLogValue(filevar, desired_token_index, expected_tokens={})
def readLogValue(filevar, desired_token_index, expected_tokens)
def plotProgressAttribute(cur, planners, attribute)


benchmarks
Author(s): Ryan Luna
autogenerated on Wed Jul 10 2019 04:04:08