forked from Imagelibrary/littlefs
scripts: Reworked to support optional json input/output
Guh This may have been more work than I expected. The goal was to allowing passing recursive results (callgraph info, structs, etc) between scripts, which is simply not possible with csv files. Unfortunately, this raised a number of questions: What happens if a script receives recursive results? -d/--diff with recursive results? How to prevent folding of ordered results (structs, hot, etc) in piped scripts? etc. And ended up with a significant rewrite of most of the result scripts' internals. Key changes: - Most result scripts now support -O/--output-json in addition to -o/--json, with -O/--output-json including any recursive results in the "children" field. - Most result scripts now support both csv and json as input to relevant flags: -u/--use, -d/--diff, -p/--percent. This is accomplished by looking for a '[' as the first character to decide if an input file is json or csv. Technically this breaks if your json has leading whitespace, but why would you ever keep whitespace around in json? The human-editability of json was already ruined the moment comments were disallowed. - csv.py requires all fields to be explicitly defined, so added -i/--enumerate, -Z/--children, and -N/--notes. At least we can provide some reasonable defaults so you shouldn't usually need to type out the whole field. - Notably, the rendering scripts (plot.py, treemapd3.py, etc) and test/bench scripts do _not_ support json. csv.py can always convert to/from json when needed. - The table renderer now supports diffing recursive results, which is nice for seeing how the hot path changed in stack.py/perf.py/etc. - Moved the -r/--hot logic up into main, so it also affects the outputted results. Note it is impossible for -z/--depth to _not_ affect the outputted results. - We now sort in one pass, which is in theory more efficient. - Renamed -t/--hot -> -r/--hot and -R/--reverse-hot, matching -s/-S. - Fixed an issue with -S/--reverse-sort where only the short form was actually reversed (I misunderstood what argparse passes to Action classes). - csv.py now supports json input/output, which is funny.
This commit is contained in:
@@ -667,7 +667,7 @@ def find_runner(runner, id=None, main=True, **args):
|
|||||||
# run under perf?
|
# run under perf?
|
||||||
if args.get('perf'):
|
if args.get('perf'):
|
||||||
cmd[:0] = args['perf_script'] + list(filter(None, [
|
cmd[:0] = args['perf_script'] + list(filter(None, [
|
||||||
'-R',
|
'--record',
|
||||||
'--perf-freq=%s' % args['perf_freq']
|
'--perf-freq=%s' % args['perf_freq']
|
||||||
if args.get('perf_freq') else None,
|
if args.get('perf_freq') else None,
|
||||||
'--perf-period=%s' % args['perf_period']
|
'--perf-period=%s' % args['perf_period']
|
||||||
|
|||||||
560
scripts/code.py
560
scripts/code.py
@@ -18,8 +18,9 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
import collections as co
|
import collections as co
|
||||||
import csv
|
import csv
|
||||||
import itertools as it
|
|
||||||
import functools as ft
|
import functools as ft
|
||||||
|
import io
|
||||||
|
import itertools as it
|
||||||
import math as mt
|
import math as mt
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -449,7 +450,7 @@ def collect_dwarf_info(obj_path, tags=None, *,
|
|||||||
|
|
||||||
return DwarfInfo(info)
|
return DwarfInfo(info)
|
||||||
|
|
||||||
def collect(obj_paths, *,
|
def collect_code(obj_paths, *,
|
||||||
everything=False,
|
everything=False,
|
||||||
**args):
|
**args):
|
||||||
results = []
|
results = []
|
||||||
@@ -496,7 +497,31 @@ def collect(obj_paths, *,
|
|||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def fold(Result, results, by=None, defines=[]):
|
# common folding/tabling/read/write code
|
||||||
|
|
||||||
|
class Rev(co.namedtuple('Rev', 'x')):
|
||||||
|
__slots__ = ()
|
||||||
|
# yes we need all of these because we're a namedtuple
|
||||||
|
def __lt__(self, other):
|
||||||
|
return self.x > other.x
|
||||||
|
def __gt__(self, other):
|
||||||
|
return self.x < other.x
|
||||||
|
def __le__(self, other):
|
||||||
|
return self.x >= other.x
|
||||||
|
def __ge__(self, other):
|
||||||
|
return self.x <= other.x
|
||||||
|
|
||||||
|
def fold(Result, results, *,
|
||||||
|
by=None,
|
||||||
|
defines=[],
|
||||||
|
sort=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
# stop when depth hits zero
|
||||||
|
if depth == 0:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# organize by by
|
||||||
if by is None:
|
if by is None:
|
||||||
by = Result._by
|
by = Result._by
|
||||||
|
|
||||||
@@ -510,7 +535,7 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
if defines:
|
if defines:
|
||||||
results_ = []
|
results_ = []
|
||||||
for r in results:
|
for r in results:
|
||||||
if all(getattr(r, k) in vs for k, vs in defines):
|
if all(str(getattr(r, k)) in vs for k, vs in defines):
|
||||||
results_.append(r)
|
results_.append(r)
|
||||||
results = results_
|
results = results_
|
||||||
|
|
||||||
@@ -527,6 +552,31 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
for name, rs in folding.items():
|
for name, rs in folding.items():
|
||||||
folded.append(sum(rs[1:], start=rs[0]))
|
folded.append(sum(rs[1:], start=rs[0]))
|
||||||
|
|
||||||
|
# sort, note that python's sort is stable
|
||||||
|
folded.sort(key=lambda r: (
|
||||||
|
# sort by explicit sort fields
|
||||||
|
tuple((Rev
|
||||||
|
if reverse ^ (not k or k in Result._fields)
|
||||||
|
else lambda x: x)(
|
||||||
|
tuple((getattr(r, k_),)
|
||||||
|
if getattr(r, k_) is not None
|
||||||
|
else ()
|
||||||
|
for k_ in ([k] if k else Result._sort)))
|
||||||
|
for k, reverse in (sort or [])),
|
||||||
|
# sort by result
|
||||||
|
r))
|
||||||
|
|
||||||
|
# recurse if we have recursive results
|
||||||
|
if hasattr(Result, '_children'):
|
||||||
|
folded = [r._replace(**{
|
||||||
|
Result._children: fold(
|
||||||
|
Result, getattr(r, Result._children),
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
sort=sort,
|
||||||
|
depth=depth-1)})
|
||||||
|
for r in folded]
|
||||||
|
|
||||||
return folded
|
return folded
|
||||||
|
|
||||||
def table(Result, results, diff_results=None, *,
|
def table(Result, results, diff_results=None, *,
|
||||||
@@ -554,124 +604,32 @@ def table(Result, results, diff_results=None, *,
|
|||||||
fields = Result._fields
|
fields = Result._fields
|
||||||
types = Result._types
|
types = Result._types
|
||||||
|
|
||||||
# fold again
|
# fold again, otherwise results risk being hidden
|
||||||
results = fold(Result, results, by=by)
|
results = fold(Result, results,
|
||||||
|
by=by,
|
||||||
|
depth=depth)
|
||||||
if diff_results is not None:
|
if diff_results is not None:
|
||||||
diff_results = fold(Result, diff_results, by=by)
|
diff_results = fold(Result, diff_results,
|
||||||
|
by=by,
|
||||||
# reduce children to hot paths? only used by some scripts
|
depth=depth)
|
||||||
if hot:
|
|
||||||
# subclass to reintroduce __dict__
|
|
||||||
Result_ = Result
|
|
||||||
class HotResult(Result_):
|
|
||||||
_i = '_hot_i'
|
|
||||||
_children = '_hot_children'
|
|
||||||
|
|
||||||
def __new__(cls, r, i=None, children=None, notes=None):
|
|
||||||
self = HotResult._make(r)
|
|
||||||
self._hot_i = i
|
|
||||||
self._hot_children = children if children is not None else []
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __add__(self, other):
|
|
||||||
return HotResult(
|
|
||||||
Result_.__add__(self, other),
|
|
||||||
self._hot_i if other._hot_i is None
|
|
||||||
else other._hot_i if self._hot_i is None
|
|
||||||
else min(self._hot_i, other._hot_i),
|
|
||||||
self._hot_children + other._hot_children)
|
|
||||||
|
|
||||||
results_ = []
|
|
||||||
for r in results:
|
|
||||||
hot_ = []
|
|
||||||
def recurse(results_, depth_):
|
|
||||||
nonlocal hot_
|
|
||||||
if not results_:
|
|
||||||
return
|
|
||||||
|
|
||||||
# find the hottest result
|
|
||||||
r = max(results_,
|
|
||||||
key=lambda r: tuple(
|
|
||||||
tuple((getattr(r, k),)
|
|
||||||
if getattr(r, k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])
|
|
||||||
if k in fields)
|
|
||||||
for k in it.chain(hot, [None])))
|
|
||||||
hot_.append(HotResult(r, i=len(hot_)))
|
|
||||||
|
|
||||||
# recurse?
|
|
||||||
if depth_ > 1:
|
|
||||||
recurse(getattr(r, Result._children),
|
|
||||||
depth_-1)
|
|
||||||
|
|
||||||
recurse(getattr(r, Result._children), depth-1)
|
|
||||||
results_.append(HotResult(r, children=hot_))
|
|
||||||
|
|
||||||
Result = HotResult
|
|
||||||
results = results_
|
|
||||||
|
|
||||||
# organize by name
|
# organize by name
|
||||||
table = {
|
table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results}
|
for r in results}
|
||||||
diff_table = {
|
diff_table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in diff_results or []}
|
for r in diff_results or []}
|
||||||
names = [name
|
|
||||||
for name in table.keys() | diff_table.keys()
|
|
||||||
if diff_results is None
|
|
||||||
or all_
|
|
||||||
or any(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(name), k, None),
|
|
||||||
getattr(diff_table.get(name), k, None))
|
|
||||||
for k in fields)]
|
|
||||||
|
|
||||||
# find compare entry if there is one
|
# find compare entry if there is one
|
||||||
if compare:
|
if compare:
|
||||||
compare_result = table.get(','.join(str(k) for k in compare))
|
compare_r = table.get(','.join(str(k) for k in compare))
|
||||||
|
|
||||||
# sort again, now with diff info, note that python's sort is stable
|
|
||||||
names.sort()
|
|
||||||
if compare:
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: (
|
|
||||||
# move compare entry to the top, note this can be
|
|
||||||
# overridden by explicitly sorting by fields
|
|
||||||
table.get(n) == compare_result,
|
|
||||||
# sort by ratio if comparing
|
|
||||||
tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(compare_result, k, None))
|
|
||||||
for k in fields)),
|
|
||||||
reverse=True)
|
|
||||||
if diff or percent:
|
|
||||||
names.sort(
|
|
||||||
# sort by ratio if diffing
|
|
||||||
key=lambda n: tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(diff_table.get(n), k, None))
|
|
||||||
for k in fields),
|
|
||||||
reverse=True)
|
|
||||||
if sort:
|
|
||||||
for k, reverse in reversed(sort):
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: tuple(
|
|
||||||
(getattr(table[n], k),)
|
|
||||||
if getattr(table.get(n), k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])),
|
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
|
||||||
|
|
||||||
|
|
||||||
# build up our lines
|
# build up our lines
|
||||||
lines = []
|
lines = []
|
||||||
@@ -698,11 +656,16 @@ def table(Result, results, diff_results=None, *,
|
|||||||
header.append('d'+k)
|
header.append('d'+k)
|
||||||
lines.append(header)
|
lines.append(header)
|
||||||
|
|
||||||
|
# delete these to try to catch typos below, we need to rebuild
|
||||||
|
# these tables at each recursive layer
|
||||||
|
del table
|
||||||
|
del diff_table
|
||||||
|
|
||||||
# entry helper
|
# entry helper
|
||||||
def table_entry(name, r, diff_r=None):
|
def table_entry(name, r, diff_r=None):
|
||||||
entry = [name]
|
entry = [name]
|
||||||
# normal entry?
|
# normal entry?
|
||||||
if ((compare is None or r == compare_result)
|
if ((compare is None or r == compare_r)
|
||||||
and not percent
|
and not percent
|
||||||
and not diff):
|
and not diff):
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -723,7 +686,7 @@ def table(Result, results, diff_results=None, *,
|
|||||||
else ['%+.1f%%' % (100*t)])(
|
else ['%+.1f%%' % (100*t)])(
|
||||||
types[k].ratio(
|
types[k].ratio(
|
||||||
getattr(r, k, None),
|
getattr(r, k, None),
|
||||||
getattr(compare_result, k, None)))))
|
getattr(compare_r, k, None)))))
|
||||||
# percent entry?
|
# percent entry?
|
||||||
elif not diff:
|
elif not diff:
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -769,71 +732,94 @@ def table(Result, results, diff_results=None, *,
|
|||||||
|
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
# recursive entry helper, only used by some scripts
|
# recursive entry helper
|
||||||
def recurse(results_, depth_,
|
def table_recurse(results_, diff_results_,
|
||||||
|
depth_,
|
||||||
prefixes=('', '', '', '')):
|
prefixes=('', '', '', '')):
|
||||||
# build the children table at each layer
|
# build the children table at each layer
|
||||||
results_ = fold(Result, results_, by=by)
|
|
||||||
table_ = {
|
table_ = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results_}
|
for r in results_}
|
||||||
names_ = list(table_.keys())
|
diff_table_ = {
|
||||||
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
|
for r in diff_results_ or []}
|
||||||
|
names_ = [n
|
||||||
|
for n in table_.keys() | diff_table_.keys()
|
||||||
|
if diff_results_ is None
|
||||||
|
or all_
|
||||||
|
or any(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(diff_table_.get(n), k, None))
|
||||||
|
for k in fields)]
|
||||||
|
|
||||||
# sort the children layer
|
# sort again, now with diff info, note that python's sort is stable
|
||||||
names_.sort()
|
names_.sort(key=lambda n: (
|
||||||
if hasattr(Result, '_i'):
|
# sort by explicit sort fields
|
||||||
names_.sort(key=lambda n: getattr(table_[n], Result._i))
|
tuple((Rev
|
||||||
if sort:
|
if reverse ^ (not k or k in Result._fields)
|
||||||
for k, reverse in reversed(sort):
|
else lambda x: x)(
|
||||||
names_.sort(
|
tuple((getattr(table_[n], k_),)
|
||||||
key=lambda n: tuple(
|
if getattr(table_.get(n), k_, None) is not None
|
||||||
(getattr(table_[n], k),)
|
else ()
|
||||||
if getattr(table_.get(n), k, None)
|
for k_ in ([k] if k else Result._sort)))
|
||||||
is not None
|
for k, reverse in (sort or [])),
|
||||||
else ()
|
# sort by ratio if diffing
|
||||||
for k in (
|
Rev(tuple(types[k].ratio(
|
||||||
[k] if k else [
|
getattr(table_.get(n), k, None),
|
||||||
k for k in Result._sort
|
getattr(diff_table_.get(n), k, None))
|
||||||
if k in fields])),
|
for k in fields))
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
if diff or percent
|
||||||
|
else (),
|
||||||
|
# move compare entry to the top, note this can be
|
||||||
|
# overridden by explicitly sorting by fields
|
||||||
|
(table_.get(n) != compare_r,
|
||||||
|
# sort by ratio if comparing
|
||||||
|
Rev(tuple(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(compare_r, k, None))
|
||||||
|
for k in fields)))
|
||||||
|
if compare
|
||||||
|
else (),
|
||||||
|
# sort by result
|
||||||
|
(table_[n],) if n in table_ else (),
|
||||||
|
# and finally by name (diffs may be missing results)
|
||||||
|
n))
|
||||||
|
|
||||||
for i, name in enumerate(names_):
|
for i, n in enumerate(names_):
|
||||||
r = table_[name]
|
# find comparable results
|
||||||
is_last = (i == len(names_)-1)
|
r = table_.get(n)
|
||||||
|
diff_r = diff_table_.get(n)
|
||||||
|
|
||||||
|
# build line
|
||||||
|
line = table_entry(n, r, diff_r)
|
||||||
|
|
||||||
line = table_entry(name, r)
|
|
||||||
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
|
||||||
# add prefixes
|
# add prefixes
|
||||||
line[0] = (prefixes[0+is_last] + line[0][0], line[0][1])
|
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
|
line[0] = (prefixes[0+(i==len(names_)-1)] + line[0][0], line[0][1])
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
|
|
||||||
# recurse?
|
# recurse?
|
||||||
if depth_ > 1:
|
if n in table_ and depth_ > 1:
|
||||||
recurse(getattr(r, Result._children),
|
table_recurse(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
getattr(diff_r, Result._children, None) or [],
|
||||||
depth_-1,
|
depth_-1,
|
||||||
(prefixes[2+is_last] + "|-> ",
|
(prefixes[2+(i==len(names_)-1)] + "|-> ",
|
||||||
prefixes[2+is_last] + "'-> ",
|
prefixes[2+(i==len(names_)-1)] + "'-> ",
|
||||||
prefixes[2+is_last] + "| ",
|
prefixes[2+(i==len(names_)-1)] + "| ",
|
||||||
prefixes[2+is_last] + " "))
|
prefixes[2+(i==len(names_)-1)] + " "))
|
||||||
|
|
||||||
# entries
|
# build entries
|
||||||
if not summary:
|
if not summary:
|
||||||
for name in names:
|
table_recurse(results, diff_results, depth)
|
||||||
r = table.get(name)
|
|
||||||
if diff_results is None:
|
|
||||||
diff_r = None
|
|
||||||
else:
|
|
||||||
diff_r = diff_table.get(name)
|
|
||||||
lines.append(table_entry(name, r, diff_r))
|
|
||||||
|
|
||||||
# recursive entries
|
|
||||||
if name in table and depth > 1:
|
|
||||||
recurse(getattr(table[name], Result._children),
|
|
||||||
depth-1,
|
|
||||||
("|-> ",
|
|
||||||
"'-> ",
|
|
||||||
"| ",
|
|
||||||
" "))
|
|
||||||
|
|
||||||
# total
|
# total
|
||||||
if not no_total and not (small_table and not summary):
|
if not no_total and not (small_table and not summary):
|
||||||
@@ -845,9 +831,8 @@ def table(Result, results, diff_results=None, *,
|
|||||||
lines.append(table_entry('TOTAL', r, diff_r))
|
lines.append(table_entry('TOTAL', r, diff_r))
|
||||||
|
|
||||||
# homogenize
|
# homogenize
|
||||||
lines = [
|
lines = [[x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
[x if isinstance(x, tuple) else (x, []) for x in line]
|
for line in lines]
|
||||||
for line in lines]
|
|
||||||
|
|
||||||
# find the best widths, note that column 0 contains the names and is
|
# find the best widths, note that column 0 contains the names and is
|
||||||
# handled a bit differently
|
# handled a bit differently
|
||||||
@@ -868,6 +853,130 @@ def table(Result, results, diff_results=None, *,
|
|||||||
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
||||||
for i, x in enumerate(line[1:], 1))))
|
for i, x in enumerate(line[1:], 1))))
|
||||||
|
|
||||||
|
def read_csv(path, Result, *,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'r') as f:
|
||||||
|
# csv or json? assume json starts with [
|
||||||
|
json = (f.buffer.peek(1)[:1] == b'[')
|
||||||
|
|
||||||
|
# read csv?
|
||||||
|
if not json:
|
||||||
|
results = []
|
||||||
|
reader = csv.DictReader(f, restval='')
|
||||||
|
for r in reader:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k].strip()}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k].strip()})))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results
|
||||||
|
|
||||||
|
# read json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
def unjsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| ({Result._children: unjsonify(
|
||||||
|
r[Result._children],
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and Result._children in r
|
||||||
|
and r[Result._children] is not None
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: set(r[Result._notes])}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and Result._notes in r
|
||||||
|
and r[Result._notes] is not None
|
||||||
|
else {}))))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results_
|
||||||
|
return unjsonify(json.load(f), depth)
|
||||||
|
|
||||||
|
def write_csv(path, Result, results, *,
|
||||||
|
json=False,
|
||||||
|
by=None,
|
||||||
|
fields=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'w') as f:
|
||||||
|
# write csv?
|
||||||
|
if not json:
|
||||||
|
writer = csv.DictWriter(f,
|
||||||
|
(by if by is not None else Result._by)
|
||||||
|
+ [k for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)])
|
||||||
|
writer.writeheader()
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
writer.writerow(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None})
|
||||||
|
|
||||||
|
# write json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
# the neat thing about json is we can include recursive results
|
||||||
|
def jsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| ({Result._children: jsonify(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and getattr(r, Result._children)
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: list(
|
||||||
|
getattr(r, Result._notes))}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and getattr(r, Result._notes)
|
||||||
|
else {}))
|
||||||
|
return results_
|
||||||
|
json.dump(jsonify(results, depth), f,
|
||||||
|
separators=(',', ':'))
|
||||||
|
|
||||||
|
|
||||||
def main(obj_paths, *,
|
def main(obj_paths, *,
|
||||||
by=None,
|
by=None,
|
||||||
@@ -884,87 +993,46 @@ def main(obj_paths, *,
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# collect info
|
# collect info
|
||||||
results = collect(obj_paths, **args)
|
results = collect_code(obj_paths,
|
||||||
|
**args)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
results = []
|
results = read_csv(args['use'], CodeResult,
|
||||||
with openio(args['use']) as f:
|
**args)
|
||||||
reader = csv.DictReader(f, restval='')
|
|
||||||
for r in reader:
|
|
||||||
# filter by matching defines
|
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in CodeResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
results.append(CodeResult(
|
|
||||||
**{k: r[k] for k in CodeResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k] for k in CodeResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
results = fold(CodeResult, results, by=by, defines=defines)
|
results = fold(CodeResult, results,
|
||||||
|
by=by,
|
||||||
|
defines=defines)
|
||||||
|
|
||||||
# sort, note that python's sort is stable
|
# write results to CSV/JSON
|
||||||
results.sort()
|
|
||||||
if sort:
|
|
||||||
for k, reverse in reversed(sort):
|
|
||||||
results.sort(
|
|
||||||
key=lambda r: tuple(
|
|
||||||
(getattr(r, k),) if getattr(r, k) is not None else ()
|
|
||||||
for k in ([k] if k else CodeResult._sort)),
|
|
||||||
reverse=reverse ^ (not k or k in CodeResult._fields))
|
|
||||||
|
|
||||||
# write results to CSV
|
|
||||||
if args.get('output'):
|
if args.get('output'):
|
||||||
with openio(args['output'], 'w') as f:
|
write_csv(args['output'], CodeResult, results,
|
||||||
writer = csv.DictWriter(f,
|
by=by,
|
||||||
(by if by is not None else CodeResult._by)
|
fields=fields,
|
||||||
+ [k for k in (
|
**args)
|
||||||
fields if fields is not None
|
if args.get('output_json'):
|
||||||
else CodeResult._fields)])
|
write_csv(args['output_json'], CodeResult, results,
|
||||||
writer.writeheader()
|
json=True,
|
||||||
for r in results:
|
by=by,
|
||||||
writer.writerow(
|
fields=fields,
|
||||||
{k: getattr(r, k) for k in (
|
**args)
|
||||||
by if by is not None else CodeResult._by)}
|
|
||||||
| {k: getattr(r, k) for k in (
|
|
||||||
fields if fields is not None
|
|
||||||
else CodeResult._fields)})
|
|
||||||
|
|
||||||
# find previous results?
|
# find previous results?
|
||||||
diff_results = None
|
diff_results = None
|
||||||
if args.get('diff') or args.get('percent'):
|
if args.get('diff') or args.get('percent'):
|
||||||
diff_results = []
|
|
||||||
try:
|
try:
|
||||||
with openio(args.get('diff') or args.get('percent')) as f:
|
diff_results = read_csv(
|
||||||
reader = csv.DictReader(f, restval='')
|
args.get('diff') or args.get('percent'),
|
||||||
for r in reader:
|
CodeResult,
|
||||||
# filter by matching defines
|
**args)
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in CodeResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
diff_results.append(CodeResult(
|
|
||||||
**{k: r[k] for k in CodeResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k] for k in CodeResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
diff_results = []
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
diff_results = fold(CodeResult, diff_results, by=by, defines=defines)
|
diff_results = fold(CodeResult, diff_results,
|
||||||
|
by=by,
|
||||||
|
defines=defines)
|
||||||
|
|
||||||
# print table
|
# print table
|
||||||
if not args.get('quiet'):
|
if not args.get('quiet'):
|
||||||
@@ -996,24 +1064,28 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o', '--output',
|
'-o', '--output',
|
||||||
help="Specify CSV file to store results.")
|
help="Specify CSV file to store results.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-O', '--output-json',
|
||||||
|
help="Specify JSON file to store results. This may contain "
|
||||||
|
"recursive info.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-u', '--use',
|
'-u', '--use',
|
||||||
help="Don't parse anything, use this CSV file.")
|
help="Don't parse anything, use this CSV/JSON file.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-d', '--diff',
|
'-d', '--diff',
|
||||||
help="Specify CSV file to diff against.")
|
help="Specify CSV/JSON file to diff against.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-p', '--percent',
|
'-p', '--percent',
|
||||||
help="Specify CSV file to diff against, but only show precentage "
|
help="Specify CSV/JSON file to diff against, but only show "
|
||||||
"change, not a full diff.")
|
"percentage change, not a full diff.")
|
||||||
parser.add_argument(
|
|
||||||
'-a', '--all',
|
|
||||||
action='store_true',
|
|
||||||
help="Show all, not just the ones that changed.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-c', '--compare',
|
'-c', '--compare',
|
||||||
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
||||||
help="Compare results to the row matching this by pattern.")
|
help="Compare results to the row matching this by pattern.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-a', '--all',
|
||||||
|
action='store_true',
|
||||||
|
help="Show all, not just the ones that changed.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-b', '--by',
|
'-b', '--by',
|
||||||
action='append',
|
action='append',
|
||||||
@@ -1039,7 +1111,7 @@ if __name__ == "__main__":
|
|||||||
def __call__(self, parser, namespace, value, option):
|
def __call__(self, parser, namespace, value, option):
|
||||||
if namespace.sort is None:
|
if namespace.sort is None:
|
||||||
namespace.sort = []
|
namespace.sort = []
|
||||||
namespace.sort.append((value, True if option == '-S' else False))
|
namespace.sort.append((value, option in {'-S', '--reverse-sort'}))
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-s', '--sort',
|
'-s', '--sort',
|
||||||
nargs='?',
|
nargs='?',
|
||||||
|
|||||||
563
scripts/cov.py
563
scripts/cov.py
@@ -18,6 +18,7 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
import collections as co
|
import collections as co
|
||||||
import csv
|
import csv
|
||||||
|
import io
|
||||||
import itertools as it
|
import itertools as it
|
||||||
import json
|
import json
|
||||||
import math as mt
|
import math as mt
|
||||||
@@ -270,7 +271,7 @@ def openio(path, mode='r', buffering=-1):
|
|||||||
else:
|
else:
|
||||||
return open(path, mode, buffering)
|
return open(path, mode, buffering)
|
||||||
|
|
||||||
def collect_cov(gcda_path, *,
|
def collect_gcov(gcda_path, *,
|
||||||
gcov_path=GCOV_PATH,
|
gcov_path=GCOV_PATH,
|
||||||
**args):
|
**args):
|
||||||
# get coverage info through gcov's json output
|
# get coverage info through gcov's json output
|
||||||
@@ -290,14 +291,14 @@ def collect_cov(gcda_path, *,
|
|||||||
|
|
||||||
return cov
|
return cov
|
||||||
|
|
||||||
def collect(gcda_paths, *,
|
def collect_cov(gcda_paths, *,
|
||||||
sources=None,
|
sources=None,
|
||||||
everything=False,
|
everything=False,
|
||||||
**args):
|
**args):
|
||||||
results = []
|
results = []
|
||||||
for gcda_path in gcda_paths:
|
for gcda_path in gcda_paths:
|
||||||
# find coverage info
|
# find coverage info
|
||||||
cov = collect_cov(gcda_path, **args)
|
cov = collect_gcov(gcda_path, **args)
|
||||||
|
|
||||||
# collect line/branch coverage
|
# collect line/branch coverage
|
||||||
for file in cov['files']:
|
for file in cov['files']:
|
||||||
@@ -357,7 +358,31 @@ def collect(gcda_paths, *,
|
|||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def fold(Result, results, by=None, defines=[]):
|
# common folding/tabling/read/write code
|
||||||
|
|
||||||
|
class Rev(co.namedtuple('Rev', 'x')):
|
||||||
|
__slots__ = ()
|
||||||
|
# yes we need all of these because we're a namedtuple
|
||||||
|
def __lt__(self, other):
|
||||||
|
return self.x > other.x
|
||||||
|
def __gt__(self, other):
|
||||||
|
return self.x < other.x
|
||||||
|
def __le__(self, other):
|
||||||
|
return self.x >= other.x
|
||||||
|
def __ge__(self, other):
|
||||||
|
return self.x <= other.x
|
||||||
|
|
||||||
|
def fold(Result, results, *,
|
||||||
|
by=None,
|
||||||
|
defines=[],
|
||||||
|
sort=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
# stop when depth hits zero
|
||||||
|
if depth == 0:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# organize by by
|
||||||
if by is None:
|
if by is None:
|
||||||
by = Result._by
|
by = Result._by
|
||||||
|
|
||||||
@@ -371,7 +396,7 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
if defines:
|
if defines:
|
||||||
results_ = []
|
results_ = []
|
||||||
for r in results:
|
for r in results:
|
||||||
if all(getattr(r, k) in vs for k, vs in defines):
|
if all(str(getattr(r, k)) in vs for k, vs in defines):
|
||||||
results_.append(r)
|
results_.append(r)
|
||||||
results = results_
|
results = results_
|
||||||
|
|
||||||
@@ -388,6 +413,31 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
for name, rs in folding.items():
|
for name, rs in folding.items():
|
||||||
folded.append(sum(rs[1:], start=rs[0]))
|
folded.append(sum(rs[1:], start=rs[0]))
|
||||||
|
|
||||||
|
# sort, note that python's sort is stable
|
||||||
|
folded.sort(key=lambda r: (
|
||||||
|
# sort by explicit sort fields
|
||||||
|
tuple((Rev
|
||||||
|
if reverse ^ (not k or k in Result._fields)
|
||||||
|
else lambda x: x)(
|
||||||
|
tuple((getattr(r, k_),)
|
||||||
|
if getattr(r, k_) is not None
|
||||||
|
else ()
|
||||||
|
for k_ in ([k] if k else Result._sort)))
|
||||||
|
for k, reverse in (sort or [])),
|
||||||
|
# sort by result
|
||||||
|
r))
|
||||||
|
|
||||||
|
# recurse if we have recursive results
|
||||||
|
if hasattr(Result, '_children'):
|
||||||
|
folded = [r._replace(**{
|
||||||
|
Result._children: fold(
|
||||||
|
Result, getattr(r, Result._children),
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
sort=sort,
|
||||||
|
depth=depth-1)})
|
||||||
|
for r in folded]
|
||||||
|
|
||||||
return folded
|
return folded
|
||||||
|
|
||||||
def table(Result, results, diff_results=None, *,
|
def table(Result, results, diff_results=None, *,
|
||||||
@@ -415,124 +465,32 @@ def table(Result, results, diff_results=None, *,
|
|||||||
fields = Result._fields
|
fields = Result._fields
|
||||||
types = Result._types
|
types = Result._types
|
||||||
|
|
||||||
# fold again
|
# fold again, otherwise results risk being hidden
|
||||||
results = fold(Result, results, by=by)
|
results = fold(Result, results,
|
||||||
|
by=by,
|
||||||
|
depth=depth)
|
||||||
if diff_results is not None:
|
if diff_results is not None:
|
||||||
diff_results = fold(Result, diff_results, by=by)
|
diff_results = fold(Result, diff_results,
|
||||||
|
by=by,
|
||||||
# reduce children to hot paths? only used by some scripts
|
depth=depth)
|
||||||
if hot:
|
|
||||||
# subclass to reintroduce __dict__
|
|
||||||
Result_ = Result
|
|
||||||
class HotResult(Result_):
|
|
||||||
_i = '_hot_i'
|
|
||||||
_children = '_hot_children'
|
|
||||||
|
|
||||||
def __new__(cls, r, i=None, children=None, notes=None):
|
|
||||||
self = HotResult._make(r)
|
|
||||||
self._hot_i = i
|
|
||||||
self._hot_children = children if children is not None else []
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __add__(self, other):
|
|
||||||
return HotResult(
|
|
||||||
Result_.__add__(self, other),
|
|
||||||
self._hot_i if other._hot_i is None
|
|
||||||
else other._hot_i if self._hot_i is None
|
|
||||||
else min(self._hot_i, other._hot_i),
|
|
||||||
self._hot_children + other._hot_children)
|
|
||||||
|
|
||||||
results_ = []
|
|
||||||
for r in results:
|
|
||||||
hot_ = []
|
|
||||||
def recurse(results_, depth_):
|
|
||||||
nonlocal hot_
|
|
||||||
if not results_:
|
|
||||||
return
|
|
||||||
|
|
||||||
# find the hottest result
|
|
||||||
r = max(results_,
|
|
||||||
key=lambda r: tuple(
|
|
||||||
tuple((getattr(r, k),)
|
|
||||||
if getattr(r, k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])
|
|
||||||
if k in fields)
|
|
||||||
for k in it.chain(hot, [None])))
|
|
||||||
hot_.append(HotResult(r, i=len(hot_)))
|
|
||||||
|
|
||||||
# recurse?
|
|
||||||
if depth_ > 1:
|
|
||||||
recurse(getattr(r, Result._children),
|
|
||||||
depth_-1)
|
|
||||||
|
|
||||||
recurse(getattr(r, Result._children), depth-1)
|
|
||||||
results_.append(HotResult(r, children=hot_))
|
|
||||||
|
|
||||||
Result = HotResult
|
|
||||||
results = results_
|
|
||||||
|
|
||||||
# organize by name
|
# organize by name
|
||||||
table = {
|
table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results}
|
for r in results}
|
||||||
diff_table = {
|
diff_table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in diff_results or []}
|
for r in diff_results or []}
|
||||||
names = [name
|
|
||||||
for name in table.keys() | diff_table.keys()
|
|
||||||
if diff_results is None
|
|
||||||
or all_
|
|
||||||
or any(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(name), k, None),
|
|
||||||
getattr(diff_table.get(name), k, None))
|
|
||||||
for k in fields)]
|
|
||||||
|
|
||||||
# find compare entry if there is one
|
# find compare entry if there is one
|
||||||
if compare:
|
if compare:
|
||||||
compare_result = table.get(','.join(str(k) for k in compare))
|
compare_r = table.get(','.join(str(k) for k in compare))
|
||||||
|
|
||||||
# sort again, now with diff info, note that python's sort is stable
|
|
||||||
names.sort()
|
|
||||||
if compare:
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: (
|
|
||||||
# move compare entry to the top, note this can be
|
|
||||||
# overridden by explicitly sorting by fields
|
|
||||||
table.get(n) == compare_result,
|
|
||||||
# sort by ratio if comparing
|
|
||||||
tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(compare_result, k, None))
|
|
||||||
for k in fields)),
|
|
||||||
reverse=True)
|
|
||||||
if diff or percent:
|
|
||||||
names.sort(
|
|
||||||
# sort by ratio if diffing
|
|
||||||
key=lambda n: tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(diff_table.get(n), k, None))
|
|
||||||
for k in fields),
|
|
||||||
reverse=True)
|
|
||||||
if sort:
|
|
||||||
for k, reverse in reversed(sort):
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: tuple(
|
|
||||||
(getattr(table[n], k),)
|
|
||||||
if getattr(table.get(n), k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])),
|
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
|
||||||
|
|
||||||
|
|
||||||
# build up our lines
|
# build up our lines
|
||||||
lines = []
|
lines = []
|
||||||
@@ -559,11 +517,16 @@ def table(Result, results, diff_results=None, *,
|
|||||||
header.append('d'+k)
|
header.append('d'+k)
|
||||||
lines.append(header)
|
lines.append(header)
|
||||||
|
|
||||||
|
# delete these to try to catch typos below, we need to rebuild
|
||||||
|
# these tables at each recursive layer
|
||||||
|
del table
|
||||||
|
del diff_table
|
||||||
|
|
||||||
# entry helper
|
# entry helper
|
||||||
def table_entry(name, r, diff_r=None):
|
def table_entry(name, r, diff_r=None):
|
||||||
entry = [name]
|
entry = [name]
|
||||||
# normal entry?
|
# normal entry?
|
||||||
if ((compare is None or r == compare_result)
|
if ((compare is None or r == compare_r)
|
||||||
and not percent
|
and not percent
|
||||||
and not diff):
|
and not diff):
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -584,7 +547,7 @@ def table(Result, results, diff_results=None, *,
|
|||||||
else ['%+.1f%%' % (100*t)])(
|
else ['%+.1f%%' % (100*t)])(
|
||||||
types[k].ratio(
|
types[k].ratio(
|
||||||
getattr(r, k, None),
|
getattr(r, k, None),
|
||||||
getattr(compare_result, k, None)))))
|
getattr(compare_r, k, None)))))
|
||||||
# percent entry?
|
# percent entry?
|
||||||
elif not diff:
|
elif not diff:
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -630,71 +593,94 @@ def table(Result, results, diff_results=None, *,
|
|||||||
|
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
# recursive entry helper, only used by some scripts
|
# recursive entry helper
|
||||||
def recurse(results_, depth_,
|
def table_recurse(results_, diff_results_,
|
||||||
|
depth_,
|
||||||
prefixes=('', '', '', '')):
|
prefixes=('', '', '', '')):
|
||||||
# build the children table at each layer
|
# build the children table at each layer
|
||||||
results_ = fold(Result, results_, by=by)
|
|
||||||
table_ = {
|
table_ = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results_}
|
for r in results_}
|
||||||
names_ = list(table_.keys())
|
diff_table_ = {
|
||||||
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
|
for r in diff_results_ or []}
|
||||||
|
names_ = [n
|
||||||
|
for n in table_.keys() | diff_table_.keys()
|
||||||
|
if diff_results_ is None
|
||||||
|
or all_
|
||||||
|
or any(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(diff_table_.get(n), k, None))
|
||||||
|
for k in fields)]
|
||||||
|
|
||||||
# sort the children layer
|
# sort again, now with diff info, note that python's sort is stable
|
||||||
names_.sort()
|
names_.sort(key=lambda n: (
|
||||||
if hasattr(Result, '_i'):
|
# sort by explicit sort fields
|
||||||
names_.sort(key=lambda n: getattr(table_[n], Result._i))
|
tuple((Rev
|
||||||
if sort:
|
if reverse ^ (not k or k in Result._fields)
|
||||||
for k, reverse in reversed(sort):
|
else lambda x: x)(
|
||||||
names_.sort(
|
tuple((getattr(table_[n], k_),)
|
||||||
key=lambda n: tuple(
|
if getattr(table_.get(n), k_, None) is not None
|
||||||
(getattr(table_[n], k),)
|
else ()
|
||||||
if getattr(table_.get(n), k, None)
|
for k_ in ([k] if k else Result._sort)))
|
||||||
is not None
|
for k, reverse in (sort or [])),
|
||||||
else ()
|
# sort by ratio if diffing
|
||||||
for k in (
|
Rev(tuple(types[k].ratio(
|
||||||
[k] if k else [
|
getattr(table_.get(n), k, None),
|
||||||
k for k in Result._sort
|
getattr(diff_table_.get(n), k, None))
|
||||||
if k in fields])),
|
for k in fields))
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
if diff or percent
|
||||||
|
else (),
|
||||||
|
# move compare entry to the top, note this can be
|
||||||
|
# overridden by explicitly sorting by fields
|
||||||
|
(table_.get(n) != compare_r,
|
||||||
|
# sort by ratio if comparing
|
||||||
|
Rev(tuple(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(compare_r, k, None))
|
||||||
|
for k in fields)))
|
||||||
|
if compare
|
||||||
|
else (),
|
||||||
|
# sort by result
|
||||||
|
(table_[n],) if n in table_ else (),
|
||||||
|
# and finally by name (diffs may be missing results)
|
||||||
|
n))
|
||||||
|
|
||||||
for i, name in enumerate(names_):
|
for i, n in enumerate(names_):
|
||||||
r = table_[name]
|
# find comparable results
|
||||||
is_last = (i == len(names_)-1)
|
r = table_.get(n)
|
||||||
|
diff_r = diff_table_.get(n)
|
||||||
|
|
||||||
|
# build line
|
||||||
|
line = table_entry(n, r, diff_r)
|
||||||
|
|
||||||
line = table_entry(name, r)
|
|
||||||
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
|
||||||
# add prefixes
|
# add prefixes
|
||||||
line[0] = (prefixes[0+is_last] + line[0][0], line[0][1])
|
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
|
line[0] = (prefixes[0+(i==len(names_)-1)] + line[0][0], line[0][1])
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
|
|
||||||
# recurse?
|
# recurse?
|
||||||
if depth_ > 1:
|
if n in table_ and depth_ > 1:
|
||||||
recurse(getattr(r, Result._children),
|
table_recurse(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
getattr(diff_r, Result._children, None) or [],
|
||||||
depth_-1,
|
depth_-1,
|
||||||
(prefixes[2+is_last] + "|-> ",
|
(prefixes[2+(i==len(names_)-1)] + "|-> ",
|
||||||
prefixes[2+is_last] + "'-> ",
|
prefixes[2+(i==len(names_)-1)] + "'-> ",
|
||||||
prefixes[2+is_last] + "| ",
|
prefixes[2+(i==len(names_)-1)] + "| ",
|
||||||
prefixes[2+is_last] + " "))
|
prefixes[2+(i==len(names_)-1)] + " "))
|
||||||
|
|
||||||
# entries
|
# build entries
|
||||||
if not summary:
|
if not summary:
|
||||||
for name in names:
|
table_recurse(results, diff_results, depth)
|
||||||
r = table.get(name)
|
|
||||||
if diff_results is None:
|
|
||||||
diff_r = None
|
|
||||||
else:
|
|
||||||
diff_r = diff_table.get(name)
|
|
||||||
lines.append(table_entry(name, r, diff_r))
|
|
||||||
|
|
||||||
# recursive entries
|
|
||||||
if name in table and depth > 1:
|
|
||||||
recurse(getattr(table[name], Result._children),
|
|
||||||
depth-1,
|
|
||||||
("|-> ",
|
|
||||||
"'-> ",
|
|
||||||
"| ",
|
|
||||||
" "))
|
|
||||||
|
|
||||||
# total
|
# total
|
||||||
if not no_total and not (small_table and not summary):
|
if not no_total and not (small_table and not summary):
|
||||||
@@ -706,9 +692,8 @@ def table(Result, results, diff_results=None, *,
|
|||||||
lines.append(table_entry('TOTAL', r, diff_r))
|
lines.append(table_entry('TOTAL', r, diff_r))
|
||||||
|
|
||||||
# homogenize
|
# homogenize
|
||||||
lines = [
|
lines = [[x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
[x if isinstance(x, tuple) else (x, []) for x in line]
|
for line in lines]
|
||||||
for line in lines]
|
|
||||||
|
|
||||||
# find the best widths, note that column 0 contains the names and is
|
# find the best widths, note that column 0 contains the names and is
|
||||||
# handled a bit differently
|
# handled a bit differently
|
||||||
@@ -729,6 +714,130 @@ def table(Result, results, diff_results=None, *,
|
|||||||
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
||||||
for i, x in enumerate(line[1:], 1))))
|
for i, x in enumerate(line[1:], 1))))
|
||||||
|
|
||||||
|
def read_csv(path, Result, *,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'r') as f:
|
||||||
|
# csv or json? assume json starts with [
|
||||||
|
json = (f.buffer.peek(1)[:1] == b'[')
|
||||||
|
|
||||||
|
# read csv?
|
||||||
|
if not json:
|
||||||
|
results = []
|
||||||
|
reader = csv.DictReader(f, restval='')
|
||||||
|
for r in reader:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k].strip()}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k].strip()})))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results
|
||||||
|
|
||||||
|
# read json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
def unjsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| ({Result._children: unjsonify(
|
||||||
|
r[Result._children],
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and Result._children in r
|
||||||
|
and r[Result._children] is not None
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: set(r[Result._notes])}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and Result._notes in r
|
||||||
|
and r[Result._notes] is not None
|
||||||
|
else {}))))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results_
|
||||||
|
return unjsonify(json.load(f), depth)
|
||||||
|
|
||||||
|
def write_csv(path, Result, results, *,
|
||||||
|
json=False,
|
||||||
|
by=None,
|
||||||
|
fields=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'w') as f:
|
||||||
|
# write csv?
|
||||||
|
if not json:
|
||||||
|
writer = csv.DictWriter(f,
|
||||||
|
(by if by is not None else Result._by)
|
||||||
|
+ [k for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)])
|
||||||
|
writer.writeheader()
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
writer.writerow(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None})
|
||||||
|
|
||||||
|
# write json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
# the neat thing about json is we can include recursive results
|
||||||
|
def jsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| ({Result._children: jsonify(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and getattr(r, Result._children)
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: list(
|
||||||
|
getattr(r, Result._notes))}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and getattr(r, Result._notes)
|
||||||
|
else {}))
|
||||||
|
return results_
|
||||||
|
json.dump(jsonify(results, depth), f,
|
||||||
|
separators=(',', ':'))
|
||||||
|
|
||||||
|
|
||||||
def annotate(Result, results, *,
|
def annotate(Result, results, *,
|
||||||
annotate=False,
|
annotate=False,
|
||||||
@@ -829,88 +938,46 @@ def main(gcda_paths, *,
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# collect info
|
# collect info
|
||||||
results = collect(gcda_paths, **args)
|
results = collect_cov(gcda_paths,
|
||||||
|
**args)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
results = []
|
results = read_csv(args['use'], CovResult,
|
||||||
with openio(args['use']) as f:
|
**args)
|
||||||
reader = csv.DictReader(f, restval='')
|
|
||||||
for r in reader:
|
|
||||||
# filter by matching defines
|
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in CovResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
results.append(CovResult(
|
|
||||||
**{k: r[k] for k in CovResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k]
|
|
||||||
for k in CovResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
results = fold(CovResult, results, by=by, defines=defines)
|
results = fold(CovResult, results,
|
||||||
|
by=by,
|
||||||
|
defines=defines)
|
||||||
|
|
||||||
# sort, note that python's sort is stable
|
# write results to CSV/JSON
|
||||||
results.sort()
|
|
||||||
if sort:
|
|
||||||
for k, reverse in reversed(sort):
|
|
||||||
results.sort(
|
|
||||||
key=lambda r: tuple(
|
|
||||||
(getattr(r, k),) if getattr(r, k) is not None else ()
|
|
||||||
for k in ([k] if k else CovResult._sort)),
|
|
||||||
reverse=reverse ^ (not k or k in CovResult._fields))
|
|
||||||
|
|
||||||
# write results to CSV
|
|
||||||
if args.get('output'):
|
if args.get('output'):
|
||||||
with openio(args['output'], 'w') as f:
|
write_csv(args['output'], CovResult, results,
|
||||||
writer = csv.DictWriter(f,
|
by=by,
|
||||||
(by if by is not None else CovResult._by)
|
fields=fields,
|
||||||
+ [k for k in (
|
**args)
|
||||||
fields if fields is not None
|
if args.get('output_json'):
|
||||||
else CovResult._fields)])
|
write_csv(args['output_json'], CovResult, results,
|
||||||
writer.writeheader()
|
json=True,
|
||||||
for r in results:
|
by=by,
|
||||||
writer.writerow(
|
fields=fields,
|
||||||
{k: getattr(r, k) for k in (
|
**args)
|
||||||
by if by is not None else CovResult._by)}
|
|
||||||
| {k: getattr(r, k) for k in (
|
|
||||||
fields if fields is not None
|
|
||||||
else CovResult._fields)})
|
|
||||||
|
|
||||||
# find previous results?
|
# find previous results?
|
||||||
diff_results = None
|
diff_results = None
|
||||||
if args.get('diff') or args.get('percent'):
|
if args.get('diff') or args.get('percent'):
|
||||||
diff_results = []
|
|
||||||
try:
|
try:
|
||||||
with openio(args.get('diff') or args.get('percent')) as f:
|
diff_results = read_csv(
|
||||||
reader = csv.DictReader(f, restval='')
|
args.get('diff') or args.get('percent'),
|
||||||
for r in reader:
|
CovResult,
|
||||||
# filter by matching defines
|
**args)
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in CovResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
diff_results.append(CovResult(
|
|
||||||
**{k: r[k] for k in CovResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k] for k in CovResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
diff_results = []
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
diff_results = fold(CovResult, diff_results, by=by, defines=defines)
|
diff_results = fold(CovResult, diff_results,
|
||||||
|
by=by,
|
||||||
|
defines=defines)
|
||||||
|
|
||||||
# print table
|
# print table
|
||||||
if not args.get('quiet'):
|
if not args.get('quiet'):
|
||||||
@@ -959,24 +1026,28 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o', '--output',
|
'-o', '--output',
|
||||||
help="Specify CSV file to store results.")
|
help="Specify CSV file to store results.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-O', '--output-json',
|
||||||
|
help="Specify JSON file to store results. This may contain "
|
||||||
|
"recursive info.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-u', '--use',
|
'-u', '--use',
|
||||||
help="Don't parse anything, use this CSV file.")
|
help="Don't parse anything, use this CSV/JSON file.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-d', '--diff',
|
'-d', '--diff',
|
||||||
help="Specify CSV file to diff against.")
|
help="Specify CSV/JSON file to diff against.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-p', '--percent',
|
'-p', '--percent',
|
||||||
help="Specify CSV file to diff against, but only show precentage "
|
help="Specify CSV/JSON file to diff against, but only show "
|
||||||
"change, not a full diff.")
|
"percentage change, not a full diff.")
|
||||||
parser.add_argument(
|
|
||||||
'-a', '--all',
|
|
||||||
action='store_true',
|
|
||||||
help="Show all, not just the ones that changed.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-c', '--compare',
|
'-c', '--compare',
|
||||||
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
||||||
help="Compare results to the row matching this by pattern.")
|
help="Compare results to the row matching this by pattern.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-a', '--all',
|
||||||
|
action='store_true',
|
||||||
|
help="Show all, not just the ones that changed.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-b', '--by',
|
'-b', '--by',
|
||||||
action='append',
|
action='append',
|
||||||
@@ -1002,7 +1073,7 @@ if __name__ == "__main__":
|
|||||||
def __call__(self, parser, namespace, value, option):
|
def __call__(self, parser, namespace, value, option):
|
||||||
if namespace.sort is None:
|
if namespace.sort is None:
|
||||||
namespace.sort = []
|
namespace.sort = []
|
||||||
namespace.sort.append((value, True if option == '-S' else False))
|
namespace.sort.append((value, option in {'-S', '--reverse-sort'}))
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-s', '--sort',
|
'-s', '--sort',
|
||||||
nargs='?',
|
nargs='?',
|
||||||
|
|||||||
928
scripts/csv.py
928
scripts/csv.py
File diff suppressed because it is too large
Load Diff
698
scripts/ctx.py
698
scripts/ctx.py
@@ -15,8 +15,9 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
import collections as co
|
import collections as co
|
||||||
import csv
|
import csv
|
||||||
import itertools as it
|
|
||||||
import functools as ft
|
import functools as ft
|
||||||
|
import io
|
||||||
|
import itertools as it
|
||||||
import math as mt
|
import math as mt
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -130,12 +131,12 @@ class RInt(co.namedtuple('RInt', 'x')):
|
|||||||
def __mod__(self, other):
|
def __mod__(self, other):
|
||||||
return self.__class__(self.x % other.x)
|
return self.__class__(self.x % other.x)
|
||||||
|
|
||||||
# struct size results
|
# ctx size results
|
||||||
class CtxResult(co.namedtuple('CtxResult', [
|
class CtxResult(co.namedtuple('CtxResult', [
|
||||||
'file', 'function',
|
'i', 'file', 'function',
|
||||||
'size',
|
'size',
|
||||||
'i', 'children', 'notes'])):
|
'children', 'notes'])):
|
||||||
_by = ['file', 'function']
|
_by = ['i', 'file', 'function']
|
||||||
_fields = ['size']
|
_fields = ['size']
|
||||||
_sort = ['size']
|
_sort = ['size']
|
||||||
_types = {'size': RInt}
|
_types = {'size': RInt}
|
||||||
@@ -144,20 +145,16 @@ class CtxResult(co.namedtuple('CtxResult', [
|
|||||||
_notes = 'notes'
|
_notes = 'notes'
|
||||||
|
|
||||||
__slots__ = ()
|
__slots__ = ()
|
||||||
def __new__(cls, file='', function='', size=0,
|
def __new__(cls, i=None, file='', function='', size=0,
|
||||||
i=None, children=None, notes=None):
|
children=None, notes=None):
|
||||||
return super().__new__(cls, file, function,
|
return super().__new__(cls, i, file, function,
|
||||||
RInt(size),
|
RInt(size),
|
||||||
i,
|
|
||||||
children if children is not None else [],
|
children if children is not None else [],
|
||||||
notes if notes is not None else set())
|
notes if notes is not None else set())
|
||||||
|
|
||||||
def __add__(self, other):
|
def __add__(self, other):
|
||||||
return CtxResult(self.file, self.function,
|
return CtxResult(self.i, self.file, self.function,
|
||||||
max(self.size, other.size),
|
max(self.size, other.size),
|
||||||
self.i if other.i is None
|
|
||||||
else other.i if self.i is None
|
|
||||||
else min(self.i, other.i),
|
|
||||||
self.children + other.children,
|
self.children + other.children,
|
||||||
self.notes | other.notes)
|
self.notes | other.notes)
|
||||||
|
|
||||||
@@ -458,7 +455,7 @@ def collect_dwarf_info(obj_path, tags=None, *,
|
|||||||
|
|
||||||
return DwarfInfo(info)
|
return DwarfInfo(info)
|
||||||
|
|
||||||
def collect(obj_paths, *,
|
def collect_ctx(obj_paths, *,
|
||||||
everything=False,
|
everything=False,
|
||||||
depth=1,
|
depth=1,
|
||||||
**args):
|
**args):
|
||||||
@@ -603,7 +600,7 @@ def collect(obj_paths, *,
|
|||||||
size_ = sizeof(type, seen | {entry.off})
|
size_ = sizeof(type, seen | {entry.off})
|
||||||
children_, notes_, dirty_ = childrenof(
|
children_, notes_, dirty_ = childrenof(
|
||||||
type, depth-1, seen | {entry.off})
|
type, depth-1, seen | {entry.off})
|
||||||
children.append(CtxResult(file, name_, size_,
|
children.append(CtxResult(0, file, name_, size_,
|
||||||
children=children_,
|
children=children_,
|
||||||
notes=notes_))
|
notes=notes_))
|
||||||
dirty = dirty or dirty_
|
dirty = dirty or dirty_
|
||||||
@@ -619,8 +616,7 @@ def collect(obj_paths, *,
|
|||||||
size_ = sizeof(child, seen | {entry.off})
|
size_ = sizeof(child, seen | {entry.off})
|
||||||
children_, notes_, dirty_ = childrenof(
|
children_, notes_, dirty_ = childrenof(
|
||||||
child, depth-1, seen | {entry.off})
|
child, depth-1, seen | {entry.off})
|
||||||
children.append(CtxResult(file, name_, size_,
|
children.append(CtxResult(child.off, file, name_, size_,
|
||||||
i=child.off,
|
|
||||||
children=children_,
|
children=children_,
|
||||||
notes=notes_))
|
notes=notes_))
|
||||||
dirty = dirty or dirty_
|
dirty = dirty or dirty_
|
||||||
@@ -682,21 +678,44 @@ def collect(obj_paths, *,
|
|||||||
# find children, recursing if necessary
|
# find children, recursing if necessary
|
||||||
children_, notes_, _ = childrenof(param, depth-2)
|
children_, notes_, _ = childrenof(param, depth-2)
|
||||||
|
|
||||||
params.append(CtxResult(file, name_, size_,
|
params.append(CtxResult(param.off, file, name_, size_,
|
||||||
i=param.off,
|
|
||||||
children=children_,
|
children=children_,
|
||||||
notes=notes_))
|
notes=notes_))
|
||||||
|
|
||||||
# context = sum of params
|
# context = sum of params
|
||||||
name = entry.name
|
name = entry.name
|
||||||
size = sum((param.size for param in params), start=RInt(0))
|
size = sum((param.size for param in params), start=RInt(0))
|
||||||
results.append(CtxResult(file, name, size,
|
results.append(CtxResult(None, file, name, size,
|
||||||
children=params))
|
children=params))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def fold(Result, results, by=None, defines=[]):
|
# common folding/tabling/read/write code
|
||||||
|
|
||||||
|
class Rev(co.namedtuple('Rev', 'x')):
|
||||||
|
__slots__ = ()
|
||||||
|
# yes we need all of these because we're a namedtuple
|
||||||
|
def __lt__(self, other):
|
||||||
|
return self.x > other.x
|
||||||
|
def __gt__(self, other):
|
||||||
|
return self.x < other.x
|
||||||
|
def __le__(self, other):
|
||||||
|
return self.x >= other.x
|
||||||
|
def __ge__(self, other):
|
||||||
|
return self.x <= other.x
|
||||||
|
|
||||||
|
def fold(Result, results, *,
|
||||||
|
by=None,
|
||||||
|
defines=[],
|
||||||
|
sort=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
# stop when depth hits zero
|
||||||
|
if depth == 0:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# organize by by
|
||||||
if by is None:
|
if by is None:
|
||||||
by = Result._by
|
by = Result._by
|
||||||
|
|
||||||
@@ -710,7 +729,7 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
if defines:
|
if defines:
|
||||||
results_ = []
|
results_ = []
|
||||||
for r in results:
|
for r in results:
|
||||||
if all(getattr(r, k) in vs for k, vs in defines):
|
if all(str(getattr(r, k)) in vs for k, vs in defines):
|
||||||
results_.append(r)
|
results_.append(r)
|
||||||
results = results_
|
results = results_
|
||||||
|
|
||||||
@@ -727,8 +746,80 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
for name, rs in folding.items():
|
for name, rs in folding.items():
|
||||||
folded.append(sum(rs[1:], start=rs[0]))
|
folded.append(sum(rs[1:], start=rs[0]))
|
||||||
|
|
||||||
|
# sort, note that python's sort is stable
|
||||||
|
folded.sort(key=lambda r: (
|
||||||
|
# sort by explicit sort fields
|
||||||
|
tuple((Rev
|
||||||
|
if reverse ^ (not k or k in Result._fields)
|
||||||
|
else lambda x: x)(
|
||||||
|
tuple((getattr(r, k_),)
|
||||||
|
if getattr(r, k_) is not None
|
||||||
|
else ()
|
||||||
|
for k_ in ([k] if k else Result._sort)))
|
||||||
|
for k, reverse in (sort or [])),
|
||||||
|
# sort by result
|
||||||
|
r))
|
||||||
|
|
||||||
|
# recurse if we have recursive results
|
||||||
|
if hasattr(Result, '_children'):
|
||||||
|
folded = [r._replace(**{
|
||||||
|
Result._children: fold(
|
||||||
|
Result, getattr(r, Result._children),
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
sort=sort,
|
||||||
|
depth=depth-1)})
|
||||||
|
for r in folded]
|
||||||
|
|
||||||
return folded
|
return folded
|
||||||
|
|
||||||
|
def hotify(Result, results, *,
|
||||||
|
fields=None,
|
||||||
|
sort=None,
|
||||||
|
depth=1,
|
||||||
|
hot=None,
|
||||||
|
**_):
|
||||||
|
# hotify only makes sense for recursive results
|
||||||
|
assert hasattr(Result, '_i')
|
||||||
|
assert hasattr(Result, '_children')
|
||||||
|
|
||||||
|
if fields is None:
|
||||||
|
fields = Result._fields
|
||||||
|
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
hot_ = []
|
||||||
|
def recurse(results_, depth_):
|
||||||
|
nonlocal hot_
|
||||||
|
if not results_:
|
||||||
|
return
|
||||||
|
|
||||||
|
# find the hottest result
|
||||||
|
r = min(results_, key=lambda r:
|
||||||
|
tuple((Rev
|
||||||
|
if reverse ^ (not k or k in Result._fields)
|
||||||
|
else lambda x: x)(
|
||||||
|
tuple((getattr(r, k_),)
|
||||||
|
if getattr(r, k_) is not None
|
||||||
|
else ()
|
||||||
|
for k_ in ([k] if k else Result._sort)))
|
||||||
|
for k, reverse in it.chain(hot, [(None, False)])))
|
||||||
|
|
||||||
|
hot_.append(r._replace(**{
|
||||||
|
Result._i: RInt(len(hot_)),
|
||||||
|
Result._children: []}))
|
||||||
|
|
||||||
|
# recurse?
|
||||||
|
if depth_ > 1:
|
||||||
|
recurse(getattr(r, Result._children),
|
||||||
|
depth_-1)
|
||||||
|
|
||||||
|
recurse(getattr(r, Result._children), depth-1)
|
||||||
|
results_.append(r._replace(**{
|
||||||
|
Result._children: hot_}))
|
||||||
|
|
||||||
|
return results_
|
||||||
|
|
||||||
def table(Result, results, diff_results=None, *,
|
def table(Result, results, diff_results=None, *,
|
||||||
by=None,
|
by=None,
|
||||||
fields=None,
|
fields=None,
|
||||||
@@ -754,124 +845,32 @@ def table(Result, results, diff_results=None, *,
|
|||||||
fields = Result._fields
|
fields = Result._fields
|
||||||
types = Result._types
|
types = Result._types
|
||||||
|
|
||||||
# fold again
|
# fold again, otherwise results risk being hidden
|
||||||
results = fold(Result, results, by=by)
|
results = fold(Result, results,
|
||||||
|
by=by,
|
||||||
|
depth=depth)
|
||||||
if diff_results is not None:
|
if diff_results is not None:
|
||||||
diff_results = fold(Result, diff_results, by=by)
|
diff_results = fold(Result, diff_results,
|
||||||
|
by=by,
|
||||||
# reduce children to hot paths? only used by some scripts
|
depth=depth)
|
||||||
if hot:
|
|
||||||
# subclass to reintroduce __dict__
|
|
||||||
Result_ = Result
|
|
||||||
class HotResult(Result_):
|
|
||||||
_i = '_hot_i'
|
|
||||||
_children = '_hot_children'
|
|
||||||
|
|
||||||
def __new__(cls, r, i=None, children=None, notes=None):
|
|
||||||
self = HotResult._make(r)
|
|
||||||
self._hot_i = i
|
|
||||||
self._hot_children = children if children is not None else []
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __add__(self, other):
|
|
||||||
return HotResult(
|
|
||||||
Result_.__add__(self, other),
|
|
||||||
self._hot_i if other._hot_i is None
|
|
||||||
else other._hot_i if self._hot_i is None
|
|
||||||
else min(self._hot_i, other._hot_i),
|
|
||||||
self._hot_children + other._hot_children)
|
|
||||||
|
|
||||||
results_ = []
|
|
||||||
for r in results:
|
|
||||||
hot_ = []
|
|
||||||
def recurse(results_, depth_):
|
|
||||||
nonlocal hot_
|
|
||||||
if not results_:
|
|
||||||
return
|
|
||||||
|
|
||||||
# find the hottest result
|
|
||||||
r = max(results_,
|
|
||||||
key=lambda r: tuple(
|
|
||||||
tuple((getattr(r, k),)
|
|
||||||
if getattr(r, k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])
|
|
||||||
if k in fields)
|
|
||||||
for k in it.chain(hot, [None])))
|
|
||||||
hot_.append(HotResult(r, i=len(hot_)))
|
|
||||||
|
|
||||||
# recurse?
|
|
||||||
if depth_ > 1:
|
|
||||||
recurse(getattr(r, Result._children),
|
|
||||||
depth_-1)
|
|
||||||
|
|
||||||
recurse(getattr(r, Result._children), depth-1)
|
|
||||||
results_.append(HotResult(r, children=hot_))
|
|
||||||
|
|
||||||
Result = HotResult
|
|
||||||
results = results_
|
|
||||||
|
|
||||||
# organize by name
|
# organize by name
|
||||||
table = {
|
table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results}
|
for r in results}
|
||||||
diff_table = {
|
diff_table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in diff_results or []}
|
for r in diff_results or []}
|
||||||
names = [name
|
|
||||||
for name in table.keys() | diff_table.keys()
|
|
||||||
if diff_results is None
|
|
||||||
or all_
|
|
||||||
or any(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(name), k, None),
|
|
||||||
getattr(diff_table.get(name), k, None))
|
|
||||||
for k in fields)]
|
|
||||||
|
|
||||||
# find compare entry if there is one
|
# find compare entry if there is one
|
||||||
if compare:
|
if compare:
|
||||||
compare_result = table.get(','.join(str(k) for k in compare))
|
compare_r = table.get(','.join(str(k) for k in compare))
|
||||||
|
|
||||||
# sort again, now with diff info, note that python's sort is stable
|
|
||||||
names.sort()
|
|
||||||
if compare:
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: (
|
|
||||||
# move compare entry to the top, note this can be
|
|
||||||
# overridden by explicitly sorting by fields
|
|
||||||
table.get(n) == compare_result,
|
|
||||||
# sort by ratio if comparing
|
|
||||||
tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(compare_result, k, None))
|
|
||||||
for k in fields)),
|
|
||||||
reverse=True)
|
|
||||||
if diff or percent:
|
|
||||||
names.sort(
|
|
||||||
# sort by ratio if diffing
|
|
||||||
key=lambda n: tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(diff_table.get(n), k, None))
|
|
||||||
for k in fields),
|
|
||||||
reverse=True)
|
|
||||||
if sort:
|
|
||||||
for k, reverse in reversed(sort):
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: tuple(
|
|
||||||
(getattr(table[n], k),)
|
|
||||||
if getattr(table.get(n), k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])),
|
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
|
||||||
|
|
||||||
|
|
||||||
# build up our lines
|
# build up our lines
|
||||||
lines = []
|
lines = []
|
||||||
@@ -898,11 +897,16 @@ def table(Result, results, diff_results=None, *,
|
|||||||
header.append('d'+k)
|
header.append('d'+k)
|
||||||
lines.append(header)
|
lines.append(header)
|
||||||
|
|
||||||
|
# delete these to try to catch typos below, we need to rebuild
|
||||||
|
# these tables at each recursive layer
|
||||||
|
del table
|
||||||
|
del diff_table
|
||||||
|
|
||||||
# entry helper
|
# entry helper
|
||||||
def table_entry(name, r, diff_r=None):
|
def table_entry(name, r, diff_r=None):
|
||||||
entry = [name]
|
entry = [name]
|
||||||
# normal entry?
|
# normal entry?
|
||||||
if ((compare is None or r == compare_result)
|
if ((compare is None or r == compare_r)
|
||||||
and not percent
|
and not percent
|
||||||
and not diff):
|
and not diff):
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -923,7 +927,7 @@ def table(Result, results, diff_results=None, *,
|
|||||||
else ['%+.1f%%' % (100*t)])(
|
else ['%+.1f%%' % (100*t)])(
|
||||||
types[k].ratio(
|
types[k].ratio(
|
||||||
getattr(r, k, None),
|
getattr(r, k, None),
|
||||||
getattr(compare_result, k, None)))))
|
getattr(compare_r, k, None)))))
|
||||||
# percent entry?
|
# percent entry?
|
||||||
elif not diff:
|
elif not diff:
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -969,71 +973,94 @@ def table(Result, results, diff_results=None, *,
|
|||||||
|
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
# recursive entry helper, only used by some scripts
|
# recursive entry helper
|
||||||
def recurse(results_, depth_,
|
def table_recurse(results_, diff_results_,
|
||||||
|
depth_,
|
||||||
prefixes=('', '', '', '')):
|
prefixes=('', '', '', '')):
|
||||||
# build the children table at each layer
|
# build the children table at each layer
|
||||||
results_ = fold(Result, results_, by=by)
|
|
||||||
table_ = {
|
table_ = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results_}
|
for r in results_}
|
||||||
names_ = list(table_.keys())
|
diff_table_ = {
|
||||||
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
|
for r in diff_results_ or []}
|
||||||
|
names_ = [n
|
||||||
|
for n in table_.keys() | diff_table_.keys()
|
||||||
|
if diff_results_ is None
|
||||||
|
or all_
|
||||||
|
or any(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(diff_table_.get(n), k, None))
|
||||||
|
for k in fields)]
|
||||||
|
|
||||||
# sort the children layer
|
# sort again, now with diff info, note that python's sort is stable
|
||||||
names_.sort()
|
names_.sort(key=lambda n: (
|
||||||
if hasattr(Result, '_i'):
|
# sort by explicit sort fields
|
||||||
names_.sort(key=lambda n: getattr(table_[n], Result._i))
|
tuple((Rev
|
||||||
if sort:
|
if reverse ^ (not k or k in Result._fields)
|
||||||
for k, reverse in reversed(sort):
|
else lambda x: x)(
|
||||||
names_.sort(
|
tuple((getattr(table_[n], k_),)
|
||||||
key=lambda n: tuple(
|
if getattr(table_.get(n), k_, None) is not None
|
||||||
(getattr(table_[n], k),)
|
else ()
|
||||||
if getattr(table_.get(n), k, None)
|
for k_ in ([k] if k else Result._sort)))
|
||||||
is not None
|
for k, reverse in (sort or [])),
|
||||||
else ()
|
# sort by ratio if diffing
|
||||||
for k in (
|
Rev(tuple(types[k].ratio(
|
||||||
[k] if k else [
|
getattr(table_.get(n), k, None),
|
||||||
k for k in Result._sort
|
getattr(diff_table_.get(n), k, None))
|
||||||
if k in fields])),
|
for k in fields))
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
if diff or percent
|
||||||
|
else (),
|
||||||
|
# move compare entry to the top, note this can be
|
||||||
|
# overridden by explicitly sorting by fields
|
||||||
|
(table_.get(n) != compare_r,
|
||||||
|
# sort by ratio if comparing
|
||||||
|
Rev(tuple(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(compare_r, k, None))
|
||||||
|
for k in fields)))
|
||||||
|
if compare
|
||||||
|
else (),
|
||||||
|
# sort by result
|
||||||
|
(table_[n],) if n in table_ else (),
|
||||||
|
# and finally by name (diffs may be missing results)
|
||||||
|
n))
|
||||||
|
|
||||||
for i, name in enumerate(names_):
|
for i, n in enumerate(names_):
|
||||||
r = table_[name]
|
# find comparable results
|
||||||
is_last = (i == len(names_)-1)
|
r = table_.get(n)
|
||||||
|
diff_r = diff_table_.get(n)
|
||||||
|
|
||||||
|
# build line
|
||||||
|
line = table_entry(n, r, diff_r)
|
||||||
|
|
||||||
line = table_entry(name, r)
|
|
||||||
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
|
||||||
# add prefixes
|
# add prefixes
|
||||||
line[0] = (prefixes[0+is_last] + line[0][0], line[0][1])
|
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
|
line[0] = (prefixes[0+(i==len(names_)-1)] + line[0][0], line[0][1])
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
|
|
||||||
# recurse?
|
# recurse?
|
||||||
if depth_ > 1:
|
if n in table_ and depth_ > 1:
|
||||||
recurse(getattr(r, Result._children),
|
table_recurse(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
getattr(diff_r, Result._children, None) or [],
|
||||||
depth_-1,
|
depth_-1,
|
||||||
(prefixes[2+is_last] + "|-> ",
|
(prefixes[2+(i==len(names_)-1)] + "|-> ",
|
||||||
prefixes[2+is_last] + "'-> ",
|
prefixes[2+(i==len(names_)-1)] + "'-> ",
|
||||||
prefixes[2+is_last] + "| ",
|
prefixes[2+(i==len(names_)-1)] + "| ",
|
||||||
prefixes[2+is_last] + " "))
|
prefixes[2+(i==len(names_)-1)] + " "))
|
||||||
|
|
||||||
# entries
|
# build entries
|
||||||
if not summary:
|
if not summary:
|
||||||
for name in names:
|
table_recurse(results, diff_results, depth)
|
||||||
r = table.get(name)
|
|
||||||
if diff_results is None:
|
|
||||||
diff_r = None
|
|
||||||
else:
|
|
||||||
diff_r = diff_table.get(name)
|
|
||||||
lines.append(table_entry(name, r, diff_r))
|
|
||||||
|
|
||||||
# recursive entries
|
|
||||||
if name in table and depth > 1:
|
|
||||||
recurse(getattr(table[name], Result._children),
|
|
||||||
depth-1,
|
|
||||||
("|-> ",
|
|
||||||
"'-> ",
|
|
||||||
"| ",
|
|
||||||
" "))
|
|
||||||
|
|
||||||
# total
|
# total
|
||||||
if not no_total and not (small_table and not summary):
|
if not no_total and not (small_table and not summary):
|
||||||
@@ -1045,9 +1072,8 @@ def table(Result, results, diff_results=None, *,
|
|||||||
lines.append(table_entry('TOTAL', r, diff_r))
|
lines.append(table_entry('TOTAL', r, diff_r))
|
||||||
|
|
||||||
# homogenize
|
# homogenize
|
||||||
lines = [
|
lines = [[x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
[x if isinstance(x, tuple) else (x, []) for x in line]
|
for line in lines]
|
||||||
for line in lines]
|
|
||||||
|
|
||||||
# find the best widths, note that column 0 contains the names and is
|
# find the best widths, note that column 0 contains the names and is
|
||||||
# handled a bit differently
|
# handled a bit differently
|
||||||
@@ -1068,18 +1094,144 @@ def table(Result, results, diff_results=None, *,
|
|||||||
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
||||||
for i, x in enumerate(line[1:], 1))))
|
for i, x in enumerate(line[1:], 1))))
|
||||||
|
|
||||||
|
def read_csv(path, Result, *,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'r') as f:
|
||||||
|
# csv or json? assume json starts with [
|
||||||
|
json = (f.buffer.peek(1)[:1] == b'[')
|
||||||
|
|
||||||
|
# read csv?
|
||||||
|
if not json:
|
||||||
|
results = []
|
||||||
|
reader = csv.DictReader(f, restval='')
|
||||||
|
for r in reader:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k].strip()}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k].strip()})))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results
|
||||||
|
|
||||||
|
# read json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
def unjsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| ({Result._children: unjsonify(
|
||||||
|
r[Result._children],
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and Result._children in r
|
||||||
|
and r[Result._children] is not None
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: set(r[Result._notes])}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and Result._notes in r
|
||||||
|
and r[Result._notes] is not None
|
||||||
|
else {}))))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results_
|
||||||
|
return unjsonify(json.load(f), depth)
|
||||||
|
|
||||||
|
def write_csv(path, Result, results, *,
|
||||||
|
json=False,
|
||||||
|
by=None,
|
||||||
|
fields=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'w') as f:
|
||||||
|
# write csv?
|
||||||
|
if not json:
|
||||||
|
writer = csv.DictWriter(f,
|
||||||
|
(by if by is not None else Result._by)
|
||||||
|
+ [k for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)])
|
||||||
|
writer.writeheader()
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
writer.writerow(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None})
|
||||||
|
|
||||||
|
# write json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
# the neat thing about json is we can include recursive results
|
||||||
|
def jsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| ({Result._children: jsonify(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and getattr(r, Result._children)
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: list(
|
||||||
|
getattr(r, Result._notes))}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and getattr(r, Result._notes)
|
||||||
|
else {}))
|
||||||
|
return results_
|
||||||
|
json.dump(jsonify(results, depth), f,
|
||||||
|
separators=(',', ':'))
|
||||||
|
|
||||||
|
|
||||||
def main(obj_paths, *,
|
def main(obj_paths, *,
|
||||||
by=None,
|
by=None,
|
||||||
fields=None,
|
fields=None,
|
||||||
defines=[],
|
defines=[],
|
||||||
sort=None,
|
sort=None,
|
||||||
|
depth=None,
|
||||||
|
hot=None,
|
||||||
**args):
|
**args):
|
||||||
# figure out depth
|
# figure out depth
|
||||||
if args.get('depth') is None:
|
if depth is None:
|
||||||
args['depth'] = mt.inf if args.get('hot') else 1
|
depth = mt.inf if hot else 1
|
||||||
elif args.get('depth') == 0:
|
elif depth == 0:
|
||||||
args['depth'] = mt.inf
|
depth = mt.inf
|
||||||
|
|
||||||
# find sizes
|
# find sizes
|
||||||
if not args.get('use', None):
|
if not args.get('use', None):
|
||||||
@@ -1090,89 +1242,61 @@ def main(obj_paths, *,
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# collect info
|
# collect info
|
||||||
results = collect(obj_paths, **args)
|
results = collect_ctx(obj_paths,
|
||||||
|
depth=depth,
|
||||||
|
**args)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
results = []
|
results = read_csv(args['use'], CtxResult,
|
||||||
with openio(args['use']) as f:
|
depth=depth,
|
||||||
reader = csv.DictReader(f, restval='')
|
**args)
|
||||||
for r in reader:
|
|
||||||
# filter by matching defines
|
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in CtxResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
results.append(CtxResult(
|
|
||||||
**{k: r[k] for k in CtxResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k]
|
|
||||||
for k in CtxResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
results = fold(CtxResult, results, by=by, defines=defines)
|
results = fold(CtxResult, results,
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
depth=depth)
|
||||||
|
|
||||||
# sort, note that python's sort is stable
|
# hotify?
|
||||||
results.sort()
|
if hot:
|
||||||
if sort:
|
results = hotify(CtxResult, results,
|
||||||
for k, reverse in reversed(sort):
|
fields=fields,
|
||||||
results.sort(
|
depth=depth,
|
||||||
key=lambda r: tuple(
|
hot=hot,
|
||||||
(getattr(r, k),) if getattr(r, k) is not None else ()
|
**args)
|
||||||
for k in ([k] if k else CtxResult._sort)),
|
|
||||||
reverse=reverse ^ (not k or k in CtxResult._fields))
|
|
||||||
|
|
||||||
# write results to CSV
|
# write results to CSV/JSON
|
||||||
if args.get('output'):
|
if args.get('output'):
|
||||||
with openio(args['output'], 'w') as f:
|
write_csv(args['output'], CtxResult, results,
|
||||||
writer = csv.DictWriter(f,
|
by=by,
|
||||||
(by if by is not None else CtxResult._by)
|
fields=fields,
|
||||||
+ [k for k in (
|
depth=depth,
|
||||||
fields if fields is not None
|
**args)
|
||||||
else CtxResult._fields)])
|
if args.get('output_json'):
|
||||||
writer.writeheader()
|
write_csv(args['output_json'], CtxResult, results,
|
||||||
for r in results:
|
json=True,
|
||||||
writer.writerow(
|
by=by,
|
||||||
{k: getattr(r, k) for k in (
|
fields=fields,
|
||||||
by if by is not None else CtxResult._by)}
|
depth=depth,
|
||||||
| {k: getattr(r, k) for k in (
|
**args)
|
||||||
fields if fields is not None
|
|
||||||
else CtxResult._fields)})
|
|
||||||
|
|
||||||
# find previous results?
|
# find previous results?
|
||||||
diff_results = None
|
diff_results = None
|
||||||
if args.get('diff') or args.get('percent'):
|
if args.get('diff') or args.get('percent'):
|
||||||
diff_results = []
|
|
||||||
try:
|
try:
|
||||||
with openio(args.get('diff') or args.get('percent')) as f:
|
diff_results = read_csv(
|
||||||
reader = csv.DictReader(f, restval='')
|
args.get('diff') or args.get('percent'),
|
||||||
for r in reader:
|
CtxResult,
|
||||||
# filter by matching defines
|
depth=depth,
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
**args)
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in CtxResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
diff_results.append(CtxResult(
|
|
||||||
**{k: r[k] for k in CtxResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k]
|
|
||||||
for k in CtxResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
diff_results = []
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
diff_results = fold(CtxResult, diff_results, by=by, defines=defines)
|
diff_results = fold(CtxResult, diff_results,
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
depth=depth)
|
||||||
|
|
||||||
# print table
|
# print table
|
||||||
if not args.get('quiet'):
|
if not args.get('quiet'):
|
||||||
@@ -1180,6 +1304,7 @@ def main(obj_paths, *,
|
|||||||
by=by if by is not None else ['function'],
|
by=by if by is not None else ['function'],
|
||||||
fields=fields,
|
fields=fields,
|
||||||
sort=sort,
|
sort=sort,
|
||||||
|
depth=depth,
|
||||||
**args)
|
**args)
|
||||||
|
|
||||||
|
|
||||||
@@ -1204,24 +1329,28 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o', '--output',
|
'-o', '--output',
|
||||||
help="Specify CSV file to store results.")
|
help="Specify CSV file to store results.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-O', '--output-json',
|
||||||
|
help="Specify JSON file to store results. This may contain "
|
||||||
|
"recursive info.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-u', '--use',
|
'-u', '--use',
|
||||||
help="Don't parse anything, use this CSV file.")
|
help="Don't parse anything, use this CSV/JSON file.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-d', '--diff',
|
'-d', '--diff',
|
||||||
help="Specify CSV file to diff against.")
|
help="Specify CSV/JSON file to diff against.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-p', '--percent',
|
'-p', '--percent',
|
||||||
help="Specify CSV file to diff against, but only show precentage "
|
help="Specify CSV/JSON file to diff against, but only show "
|
||||||
"change, not a full diff.")
|
"percentage change, not a full diff.")
|
||||||
parser.add_argument(
|
|
||||||
'-a', '--all',
|
|
||||||
action='store_true',
|
|
||||||
help="Show all, not just the ones that changed.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-c', '--compare',
|
'-c', '--compare',
|
||||||
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
||||||
help="Compare results to the row matching this by pattern.")
|
help="Compare results to the row matching this by pattern.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-a', '--all',
|
||||||
|
action='store_true',
|
||||||
|
help="Show all, not just the ones that changed.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-b', '--by',
|
'-b', '--by',
|
||||||
action='append',
|
action='append',
|
||||||
@@ -1247,7 +1376,7 @@ if __name__ == "__main__":
|
|||||||
def __call__(self, parser, namespace, value, option):
|
def __call__(self, parser, namespace, value, option):
|
||||||
if namespace.sort is None:
|
if namespace.sort is None:
|
||||||
namespace.sort = []
|
namespace.sort = []
|
||||||
namespace.sort.append((value, True if option == '-S' else False))
|
namespace.sort.append((value, option in {'-S', '--reverse-sort'}))
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-s', '--sort',
|
'-s', '--sort',
|
||||||
nargs='?',
|
nargs='?',
|
||||||
@@ -1258,6 +1387,29 @@ if __name__ == "__main__":
|
|||||||
nargs='?',
|
nargs='?',
|
||||||
action=AppendSort,
|
action=AppendSort,
|
||||||
help="Sort by this field, but backwards.")
|
help="Sort by this field, but backwards.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-z', '--depth',
|
||||||
|
nargs='?',
|
||||||
|
type=lambda x: int(x, 0),
|
||||||
|
const=0,
|
||||||
|
help="Depth of function calls to show. 0 shows all calls unless "
|
||||||
|
"we find a cycle. Defaults to 0.")
|
||||||
|
class AppendHot(argparse.Action):
|
||||||
|
def __call__(self, parser, namespace, value, option):
|
||||||
|
if namespace.hot is None:
|
||||||
|
namespace.hot = []
|
||||||
|
namespace.hot.append((value, option in {'-R', '--reverse-hot'}))
|
||||||
|
parser.add_argument(
|
||||||
|
'-r', '--hot',
|
||||||
|
nargs='?',
|
||||||
|
action=AppendHot,
|
||||||
|
help="Show only the hot path for each function call. Can "
|
||||||
|
"optionally provide fields like sort.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-R', '--reverse-hot',
|
||||||
|
nargs='?',
|
||||||
|
action=AppendHot,
|
||||||
|
help="Like -r/--hot, but backwards.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--no-header',
|
'--no-header',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
@@ -1282,18 +1434,6 @@ if __name__ == "__main__":
|
|||||||
'--everything',
|
'--everything',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="Include builtin and libc specific symbols.")
|
help="Include builtin and libc specific symbols.")
|
||||||
parser.add_argument(
|
|
||||||
'-z', '--depth',
|
|
||||||
nargs='?',
|
|
||||||
type=lambda x: int(x, 0),
|
|
||||||
const=0,
|
|
||||||
help="Depth of function calls to show. 0 shows all calls unless "
|
|
||||||
"we find a cycle. Defaults to 0.")
|
|
||||||
parser.add_argument(
|
|
||||||
'-t', '--hot',
|
|
||||||
nargs='?',
|
|
||||||
action='append',
|
|
||||||
help="Show only the hot path for each function call.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--objdump-path',
|
'--objdump-path',
|
||||||
type=lambda x: x.split(),
|
type=lambda x: x.split(),
|
||||||
|
|||||||
557
scripts/data.py
557
scripts/data.py
@@ -18,8 +18,9 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
import collections as co
|
import collections as co
|
||||||
import csv
|
import csv
|
||||||
import itertools as it
|
|
||||||
import functools as ft
|
import functools as ft
|
||||||
|
import io
|
||||||
|
import itertools as it
|
||||||
import math as mt
|
import math as mt
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -449,7 +450,7 @@ def collect_dwarf_info(obj_path, tags=None, *,
|
|||||||
|
|
||||||
return DwarfInfo(info)
|
return DwarfInfo(info)
|
||||||
|
|
||||||
def collect(obj_paths, *,
|
def collect_data(obj_paths, *,
|
||||||
everything=False,
|
everything=False,
|
||||||
**args):
|
**args):
|
||||||
results = []
|
results = []
|
||||||
@@ -496,7 +497,31 @@ def collect(obj_paths, *,
|
|||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def fold(Result, results, by=None, defines=[]):
|
# common folding/tabling/read/write code
|
||||||
|
|
||||||
|
class Rev(co.namedtuple('Rev', 'x')):
|
||||||
|
__slots__ = ()
|
||||||
|
# yes we need all of these because we're a namedtuple
|
||||||
|
def __lt__(self, other):
|
||||||
|
return self.x > other.x
|
||||||
|
def __gt__(self, other):
|
||||||
|
return self.x < other.x
|
||||||
|
def __le__(self, other):
|
||||||
|
return self.x >= other.x
|
||||||
|
def __ge__(self, other):
|
||||||
|
return self.x <= other.x
|
||||||
|
|
||||||
|
def fold(Result, results, *,
|
||||||
|
by=None,
|
||||||
|
defines=[],
|
||||||
|
sort=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
# stop when depth hits zero
|
||||||
|
if depth == 0:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# organize by by
|
||||||
if by is None:
|
if by is None:
|
||||||
by = Result._by
|
by = Result._by
|
||||||
|
|
||||||
@@ -510,7 +535,7 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
if defines:
|
if defines:
|
||||||
results_ = []
|
results_ = []
|
||||||
for r in results:
|
for r in results:
|
||||||
if all(getattr(r, k) in vs for k, vs in defines):
|
if all(str(getattr(r, k)) in vs for k, vs in defines):
|
||||||
results_.append(r)
|
results_.append(r)
|
||||||
results = results_
|
results = results_
|
||||||
|
|
||||||
@@ -527,6 +552,31 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
for name, rs in folding.items():
|
for name, rs in folding.items():
|
||||||
folded.append(sum(rs[1:], start=rs[0]))
|
folded.append(sum(rs[1:], start=rs[0]))
|
||||||
|
|
||||||
|
# sort, note that python's sort is stable
|
||||||
|
folded.sort(key=lambda r: (
|
||||||
|
# sort by explicit sort fields
|
||||||
|
tuple((Rev
|
||||||
|
if reverse ^ (not k or k in Result._fields)
|
||||||
|
else lambda x: x)(
|
||||||
|
tuple((getattr(r, k_),)
|
||||||
|
if getattr(r, k_) is not None
|
||||||
|
else ()
|
||||||
|
for k_ in ([k] if k else Result._sort)))
|
||||||
|
for k, reverse in (sort or [])),
|
||||||
|
# sort by result
|
||||||
|
r))
|
||||||
|
|
||||||
|
# recurse if we have recursive results
|
||||||
|
if hasattr(Result, '_children'):
|
||||||
|
folded = [r._replace(**{
|
||||||
|
Result._children: fold(
|
||||||
|
Result, getattr(r, Result._children),
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
sort=sort,
|
||||||
|
depth=depth-1)})
|
||||||
|
for r in folded]
|
||||||
|
|
||||||
return folded
|
return folded
|
||||||
|
|
||||||
def table(Result, results, diff_results=None, *,
|
def table(Result, results, diff_results=None, *,
|
||||||
@@ -554,124 +604,32 @@ def table(Result, results, diff_results=None, *,
|
|||||||
fields = Result._fields
|
fields = Result._fields
|
||||||
types = Result._types
|
types = Result._types
|
||||||
|
|
||||||
# fold again
|
# fold again, otherwise results risk being hidden
|
||||||
results = fold(Result, results, by=by)
|
results = fold(Result, results,
|
||||||
|
by=by,
|
||||||
|
depth=depth)
|
||||||
if diff_results is not None:
|
if diff_results is not None:
|
||||||
diff_results = fold(Result, diff_results, by=by)
|
diff_results = fold(Result, diff_results,
|
||||||
|
by=by,
|
||||||
# reduce children to hot paths? only used by some scripts
|
depth=depth)
|
||||||
if hot:
|
|
||||||
# subclass to reintroduce __dict__
|
|
||||||
Result_ = Result
|
|
||||||
class HotResult(Result_):
|
|
||||||
_i = '_hot_i'
|
|
||||||
_children = '_hot_children'
|
|
||||||
|
|
||||||
def __new__(cls, r, i=None, children=None, notes=None):
|
|
||||||
self = HotResult._make(r)
|
|
||||||
self._hot_i = i
|
|
||||||
self._hot_children = children if children is not None else []
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __add__(self, other):
|
|
||||||
return HotResult(
|
|
||||||
Result_.__add__(self, other),
|
|
||||||
self._hot_i if other._hot_i is None
|
|
||||||
else other._hot_i if self._hot_i is None
|
|
||||||
else min(self._hot_i, other._hot_i),
|
|
||||||
self._hot_children + other._hot_children)
|
|
||||||
|
|
||||||
results_ = []
|
|
||||||
for r in results:
|
|
||||||
hot_ = []
|
|
||||||
def recurse(results_, depth_):
|
|
||||||
nonlocal hot_
|
|
||||||
if not results_:
|
|
||||||
return
|
|
||||||
|
|
||||||
# find the hottest result
|
|
||||||
r = max(results_,
|
|
||||||
key=lambda r: tuple(
|
|
||||||
tuple((getattr(r, k),)
|
|
||||||
if getattr(r, k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])
|
|
||||||
if k in fields)
|
|
||||||
for k in it.chain(hot, [None])))
|
|
||||||
hot_.append(HotResult(r, i=len(hot_)))
|
|
||||||
|
|
||||||
# recurse?
|
|
||||||
if depth_ > 1:
|
|
||||||
recurse(getattr(r, Result._children),
|
|
||||||
depth_-1)
|
|
||||||
|
|
||||||
recurse(getattr(r, Result._children), depth-1)
|
|
||||||
results_.append(HotResult(r, children=hot_))
|
|
||||||
|
|
||||||
Result = HotResult
|
|
||||||
results = results_
|
|
||||||
|
|
||||||
# organize by name
|
# organize by name
|
||||||
table = {
|
table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results}
|
for r in results}
|
||||||
diff_table = {
|
diff_table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in diff_results or []}
|
for r in diff_results or []}
|
||||||
names = [name
|
|
||||||
for name in table.keys() | diff_table.keys()
|
|
||||||
if diff_results is None
|
|
||||||
or all_
|
|
||||||
or any(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(name), k, None),
|
|
||||||
getattr(diff_table.get(name), k, None))
|
|
||||||
for k in fields)]
|
|
||||||
|
|
||||||
# find compare entry if there is one
|
# find compare entry if there is one
|
||||||
if compare:
|
if compare:
|
||||||
compare_result = table.get(','.join(str(k) for k in compare))
|
compare_r = table.get(','.join(str(k) for k in compare))
|
||||||
|
|
||||||
# sort again, now with diff info, note that python's sort is stable
|
|
||||||
names.sort()
|
|
||||||
if compare:
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: (
|
|
||||||
# move compare entry to the top, note this can be
|
|
||||||
# overridden by explicitly sorting by fields
|
|
||||||
table.get(n) == compare_result,
|
|
||||||
# sort by ratio if comparing
|
|
||||||
tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(compare_result, k, None))
|
|
||||||
for k in fields)),
|
|
||||||
reverse=True)
|
|
||||||
if diff or percent:
|
|
||||||
names.sort(
|
|
||||||
# sort by ratio if diffing
|
|
||||||
key=lambda n: tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(diff_table.get(n), k, None))
|
|
||||||
for k in fields),
|
|
||||||
reverse=True)
|
|
||||||
if sort:
|
|
||||||
for k, reverse in reversed(sort):
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: tuple(
|
|
||||||
(getattr(table[n], k),)
|
|
||||||
if getattr(table.get(n), k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])),
|
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
|
||||||
|
|
||||||
|
|
||||||
# build up our lines
|
# build up our lines
|
||||||
lines = []
|
lines = []
|
||||||
@@ -698,11 +656,16 @@ def table(Result, results, diff_results=None, *,
|
|||||||
header.append('d'+k)
|
header.append('d'+k)
|
||||||
lines.append(header)
|
lines.append(header)
|
||||||
|
|
||||||
|
# delete these to try to catch typos below, we need to rebuild
|
||||||
|
# these tables at each recursive layer
|
||||||
|
del table
|
||||||
|
del diff_table
|
||||||
|
|
||||||
# entry helper
|
# entry helper
|
||||||
def table_entry(name, r, diff_r=None):
|
def table_entry(name, r, diff_r=None):
|
||||||
entry = [name]
|
entry = [name]
|
||||||
# normal entry?
|
# normal entry?
|
||||||
if ((compare is None or r == compare_result)
|
if ((compare is None or r == compare_r)
|
||||||
and not percent
|
and not percent
|
||||||
and not diff):
|
and not diff):
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -723,7 +686,7 @@ def table(Result, results, diff_results=None, *,
|
|||||||
else ['%+.1f%%' % (100*t)])(
|
else ['%+.1f%%' % (100*t)])(
|
||||||
types[k].ratio(
|
types[k].ratio(
|
||||||
getattr(r, k, None),
|
getattr(r, k, None),
|
||||||
getattr(compare_result, k, None)))))
|
getattr(compare_r, k, None)))))
|
||||||
# percent entry?
|
# percent entry?
|
||||||
elif not diff:
|
elif not diff:
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -769,71 +732,94 @@ def table(Result, results, diff_results=None, *,
|
|||||||
|
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
# recursive entry helper, only used by some scripts
|
# recursive entry helper
|
||||||
def recurse(results_, depth_,
|
def table_recurse(results_, diff_results_,
|
||||||
|
depth_,
|
||||||
prefixes=('', '', '', '')):
|
prefixes=('', '', '', '')):
|
||||||
# build the children table at each layer
|
# build the children table at each layer
|
||||||
results_ = fold(Result, results_, by=by)
|
|
||||||
table_ = {
|
table_ = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results_}
|
for r in results_}
|
||||||
names_ = list(table_.keys())
|
diff_table_ = {
|
||||||
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
|
for r in diff_results_ or []}
|
||||||
|
names_ = [n
|
||||||
|
for n in table_.keys() | diff_table_.keys()
|
||||||
|
if diff_results_ is None
|
||||||
|
or all_
|
||||||
|
or any(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(diff_table_.get(n), k, None))
|
||||||
|
for k in fields)]
|
||||||
|
|
||||||
# sort the children layer
|
# sort again, now with diff info, note that python's sort is stable
|
||||||
names_.sort()
|
names_.sort(key=lambda n: (
|
||||||
if hasattr(Result, '_i'):
|
# sort by explicit sort fields
|
||||||
names_.sort(key=lambda n: getattr(table_[n], Result._i))
|
tuple((Rev
|
||||||
if sort:
|
if reverse ^ (not k or k in Result._fields)
|
||||||
for k, reverse in reversed(sort):
|
else lambda x: x)(
|
||||||
names_.sort(
|
tuple((getattr(table_[n], k_),)
|
||||||
key=lambda n: tuple(
|
if getattr(table_.get(n), k_, None) is not None
|
||||||
(getattr(table_[n], k),)
|
else ()
|
||||||
if getattr(table_.get(n), k, None)
|
for k_ in ([k] if k else Result._sort)))
|
||||||
is not None
|
for k, reverse in (sort or [])),
|
||||||
else ()
|
# sort by ratio if diffing
|
||||||
for k in (
|
Rev(tuple(types[k].ratio(
|
||||||
[k] if k else [
|
getattr(table_.get(n), k, None),
|
||||||
k for k in Result._sort
|
getattr(diff_table_.get(n), k, None))
|
||||||
if k in fields])),
|
for k in fields))
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
if diff or percent
|
||||||
|
else (),
|
||||||
|
# move compare entry to the top, note this can be
|
||||||
|
# overridden by explicitly sorting by fields
|
||||||
|
(table_.get(n) != compare_r,
|
||||||
|
# sort by ratio if comparing
|
||||||
|
Rev(tuple(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(compare_r, k, None))
|
||||||
|
for k in fields)))
|
||||||
|
if compare
|
||||||
|
else (),
|
||||||
|
# sort by result
|
||||||
|
(table_[n],) if n in table_ else (),
|
||||||
|
# and finally by name (diffs may be missing results)
|
||||||
|
n))
|
||||||
|
|
||||||
for i, name in enumerate(names_):
|
for i, n in enumerate(names_):
|
||||||
r = table_[name]
|
# find comparable results
|
||||||
is_last = (i == len(names_)-1)
|
r = table_.get(n)
|
||||||
|
diff_r = diff_table_.get(n)
|
||||||
|
|
||||||
|
# build line
|
||||||
|
line = table_entry(n, r, diff_r)
|
||||||
|
|
||||||
line = table_entry(name, r)
|
|
||||||
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
|
||||||
# add prefixes
|
# add prefixes
|
||||||
line[0] = (prefixes[0+is_last] + line[0][0], line[0][1])
|
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
|
line[0] = (prefixes[0+(i==len(names_)-1)] + line[0][0], line[0][1])
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
|
|
||||||
# recurse?
|
# recurse?
|
||||||
if depth_ > 1:
|
if n in table_ and depth_ > 1:
|
||||||
recurse(getattr(r, Result._children),
|
table_recurse(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
getattr(diff_r, Result._children, None) or [],
|
||||||
depth_-1,
|
depth_-1,
|
||||||
(prefixes[2+is_last] + "|-> ",
|
(prefixes[2+(i==len(names_)-1)] + "|-> ",
|
||||||
prefixes[2+is_last] + "'-> ",
|
prefixes[2+(i==len(names_)-1)] + "'-> ",
|
||||||
prefixes[2+is_last] + "| ",
|
prefixes[2+(i==len(names_)-1)] + "| ",
|
||||||
prefixes[2+is_last] + " "))
|
prefixes[2+(i==len(names_)-1)] + " "))
|
||||||
|
|
||||||
# entries
|
# build entries
|
||||||
if not summary:
|
if not summary:
|
||||||
for name in names:
|
table_recurse(results, diff_results, depth)
|
||||||
r = table.get(name)
|
|
||||||
if diff_results is None:
|
|
||||||
diff_r = None
|
|
||||||
else:
|
|
||||||
diff_r = diff_table.get(name)
|
|
||||||
lines.append(table_entry(name, r, diff_r))
|
|
||||||
|
|
||||||
# recursive entries
|
|
||||||
if name in table and depth > 1:
|
|
||||||
recurse(getattr(table[name], Result._children),
|
|
||||||
depth-1,
|
|
||||||
("|-> ",
|
|
||||||
"'-> ",
|
|
||||||
"| ",
|
|
||||||
" "))
|
|
||||||
|
|
||||||
# total
|
# total
|
||||||
if not no_total and not (small_table and not summary):
|
if not no_total and not (small_table and not summary):
|
||||||
@@ -845,9 +831,8 @@ def table(Result, results, diff_results=None, *,
|
|||||||
lines.append(table_entry('TOTAL', r, diff_r))
|
lines.append(table_entry('TOTAL', r, diff_r))
|
||||||
|
|
||||||
# homogenize
|
# homogenize
|
||||||
lines = [
|
lines = [[x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
[x if isinstance(x, tuple) else (x, []) for x in line]
|
for line in lines]
|
||||||
for line in lines]
|
|
||||||
|
|
||||||
# find the best widths, note that column 0 contains the names and is
|
# find the best widths, note that column 0 contains the names and is
|
||||||
# handled a bit differently
|
# handled a bit differently
|
||||||
@@ -868,6 +853,130 @@ def table(Result, results, diff_results=None, *,
|
|||||||
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
||||||
for i, x in enumerate(line[1:], 1))))
|
for i, x in enumerate(line[1:], 1))))
|
||||||
|
|
||||||
|
def read_csv(path, Result, *,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'r') as f:
|
||||||
|
# csv or json? assume json starts with [
|
||||||
|
json = (f.buffer.peek(1)[:1] == b'[')
|
||||||
|
|
||||||
|
# read csv?
|
||||||
|
if not json:
|
||||||
|
results = []
|
||||||
|
reader = csv.DictReader(f, restval='')
|
||||||
|
for r in reader:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k].strip()}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k].strip()})))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results
|
||||||
|
|
||||||
|
# read json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
def unjsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| ({Result._children: unjsonify(
|
||||||
|
r[Result._children],
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and Result._children in r
|
||||||
|
and r[Result._children] is not None
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: set(r[Result._notes])}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and Result._notes in r
|
||||||
|
and r[Result._notes] is not None
|
||||||
|
else {}))))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results_
|
||||||
|
return unjsonify(json.load(f), depth)
|
||||||
|
|
||||||
|
def write_csv(path, Result, results, *,
|
||||||
|
json=False,
|
||||||
|
by=None,
|
||||||
|
fields=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'w') as f:
|
||||||
|
# write csv?
|
||||||
|
if not json:
|
||||||
|
writer = csv.DictWriter(f,
|
||||||
|
(by if by is not None else Result._by)
|
||||||
|
+ [k for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)])
|
||||||
|
writer.writeheader()
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
writer.writerow(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None})
|
||||||
|
|
||||||
|
# write json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
# the neat thing about json is we can include recursive results
|
||||||
|
def jsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| ({Result._children: jsonify(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and getattr(r, Result._children)
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: list(
|
||||||
|
getattr(r, Result._notes))}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and getattr(r, Result._notes)
|
||||||
|
else {}))
|
||||||
|
return results_
|
||||||
|
json.dump(jsonify(results, depth), f,
|
||||||
|
separators=(',', ':'))
|
||||||
|
|
||||||
|
|
||||||
def main(obj_paths, *,
|
def main(obj_paths, *,
|
||||||
by=None,
|
by=None,
|
||||||
@@ -884,84 +993,46 @@ def main(obj_paths, *,
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# collect info
|
# collect info
|
||||||
results = collect(obj_paths, **args)
|
results = collect_data(obj_paths,
|
||||||
|
**args)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
results = []
|
results = read_csv(args['use'], DataResult,
|
||||||
with openio(args['use']) as f:
|
**args)
|
||||||
reader = csv.DictReader(f, restval='')
|
|
||||||
for r in reader:
|
|
||||||
# filter by matching defines
|
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
results.append(DataResult(
|
|
||||||
**{k: r[k] for k in DataResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k] for k in DataResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
results = fold(DataResult, results, by=by, defines=defines)
|
results = fold(DataResult, results,
|
||||||
|
by=by,
|
||||||
|
defines=defines)
|
||||||
|
|
||||||
# sort, note that python's sort is stable
|
# write results to CSV/JSON
|
||||||
results.sort()
|
|
||||||
if sort:
|
|
||||||
for k, reverse in reversed(sort):
|
|
||||||
results.sort(
|
|
||||||
key=lambda r: tuple(
|
|
||||||
(getattr(r, k),) if getattr(r, k) is not None else ()
|
|
||||||
for k in ([k] if k else DataResult._sort)),
|
|
||||||
reverse=reverse ^ (not k or k in DataResult._fields))
|
|
||||||
|
|
||||||
# write results to CSV
|
|
||||||
if args.get('output'):
|
if args.get('output'):
|
||||||
with openio(args['output'], 'w') as f:
|
write_csv(args['output'], DataResult, results,
|
||||||
writer = csv.DictWriter(f,
|
by=by,
|
||||||
(by if by is not None else DataResult._by)
|
fields=fields,
|
||||||
+ [k for k in (
|
**args)
|
||||||
fields if fields is not None
|
if args.get('output_json'):
|
||||||
else DataResult._fields)])
|
write_csv(args['output_json'], DataResult, results,
|
||||||
writer.writeheader()
|
json=True,
|
||||||
for r in results:
|
by=by,
|
||||||
writer.writerow(
|
fields=fields,
|
||||||
{k: getattr(r, k) for k in (
|
**args)
|
||||||
by if by is not None else DataResult._by)}
|
|
||||||
| {k: getattr(r, k) for k in (
|
|
||||||
fields if fields is not None
|
|
||||||
else DataResult._fields)})
|
|
||||||
|
|
||||||
# find previous results?
|
# find previous results?
|
||||||
diff_results = None
|
diff_results = None
|
||||||
if args.get('diff') or args.get('percent'):
|
if args.get('diff') or args.get('percent'):
|
||||||
diff_results = []
|
|
||||||
try:
|
try:
|
||||||
with openio(args.get('diff') or args.get('percent')) as f:
|
diff_results = read_csv(
|
||||||
reader = csv.DictReader(f, restval='')
|
args.get('diff') or args.get('percent'),
|
||||||
for r in reader:
|
DataResult,
|
||||||
# filter by matching defines
|
**args)
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in DataResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
diff_results.append(DataResult(
|
|
||||||
**{k: r[k] for k in DataResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k] for k in DataResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
diff_results = []
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
diff_results = fold(DataResult, diff_results, by=by, defines=defines)
|
diff_results = fold(DataResult, diff_results,
|
||||||
|
by=by,
|
||||||
|
defines=defines)
|
||||||
|
|
||||||
# print table
|
# print table
|
||||||
if not args.get('quiet'):
|
if not args.get('quiet'):
|
||||||
@@ -993,24 +1064,28 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o', '--output',
|
'-o', '--output',
|
||||||
help="Specify CSV file to store results.")
|
help="Specify CSV file to store results.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-O', '--output-json',
|
||||||
|
help="Specify JSON file to store results. This may contain "
|
||||||
|
"recursive info.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-u', '--use',
|
'-u', '--use',
|
||||||
help="Don't parse anything, use this CSV file.")
|
help="Don't parse anything, use this CSV/JSON file.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-d', '--diff',
|
'-d', '--diff',
|
||||||
help="Specify CSV file to diff against.")
|
help="Specify CSV/JSON file to diff against.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-p', '--percent',
|
'-p', '--percent',
|
||||||
help="Specify CSV file to diff against, but only show precentage "
|
help="Specify CSV/JSON file to diff against, but only show "
|
||||||
"change, not a full diff.")
|
"percentage change, not a full diff.")
|
||||||
parser.add_argument(
|
|
||||||
'-a', '--all',
|
|
||||||
action='store_true',
|
|
||||||
help="Show all, not just the ones that changed.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-c', '--compare',
|
'-c', '--compare',
|
||||||
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
||||||
help="Compare results to the row matching this by pattern.")
|
help="Compare results to the row matching this by pattern.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-a', '--all',
|
||||||
|
action='store_true',
|
||||||
|
help="Show all, not just the ones that changed.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-b', '--by',
|
'-b', '--by',
|
||||||
action='append',
|
action='append',
|
||||||
@@ -1036,7 +1111,7 @@ if __name__ == "__main__":
|
|||||||
def __call__(self, parser, namespace, value, option):
|
def __call__(self, parser, namespace, value, option):
|
||||||
if namespace.sort is None:
|
if namespace.sort is None:
|
||||||
namespace.sort = []
|
namespace.sort = []
|
||||||
namespace.sort.append((value, True if option == '-S' else False))
|
namespace.sort.append((value, option in {'-S', '--reverse-sort'}))
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-s', '--sort',
|
'-s', '--sort',
|
||||||
nargs='?',
|
nargs='?',
|
||||||
|
|||||||
693
scripts/perf.py
693
scripts/perf.py
@@ -3,7 +3,7 @@
|
|||||||
# Script to aggregate and report Linux perf results.
|
# Script to aggregate and report Linux perf results.
|
||||||
#
|
#
|
||||||
# Example:
|
# Example:
|
||||||
# ./scripts/perf.py -R -obench.perf ./runners/bench_runner
|
# ./scripts/perf.py --record -obench.perf ./runners/bench_runner
|
||||||
# ./scripts/perf.py bench.perf -j -Flfs.c -Flfs_util.c -Scycles
|
# ./scripts/perf.py bench.perf -j -Flfs.c -Flfs_util.c -Scycles
|
||||||
#
|
#
|
||||||
# Copyright (c) 2022, The littlefs authors.
|
# Copyright (c) 2022, The littlefs authors.
|
||||||
@@ -20,6 +20,7 @@ import csv
|
|||||||
import errno
|
import errno
|
||||||
import fcntl
|
import fcntl
|
||||||
import functools as ft
|
import functools as ft
|
||||||
|
import io
|
||||||
import itertools as it
|
import itertools as it
|
||||||
import math as mt
|
import math as mt
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
@@ -146,30 +147,31 @@ class RInt(co.namedtuple('RInt', 'x')):
|
|||||||
|
|
||||||
# perf results
|
# perf results
|
||||||
class PerfResult(co.namedtuple('PerfResult', [
|
class PerfResult(co.namedtuple('PerfResult', [
|
||||||
'file', 'function', 'line',
|
'i', 'file', 'function', 'line',
|
||||||
'cycles', 'bmisses', 'branches', 'cmisses', 'caches',
|
'cycles', 'bmisses', 'branches', 'cmisses', 'caches',
|
||||||
'children'])):
|
'children'])):
|
||||||
_by = ['file', 'function', 'line']
|
_by = ['i', 'file', 'function', 'line']
|
||||||
_fields = ['cycles', 'bmisses', 'branches', 'cmisses', 'caches']
|
_fields = ['cycles', 'bmisses', 'branches', 'cmisses', 'caches']
|
||||||
_sort = ['cycles', 'bmisses', 'cmisses', 'branches', 'caches']
|
_sort = ['cycles', 'bmisses', 'cmisses', 'branches', 'caches']
|
||||||
_types = {
|
_types = {
|
||||||
'cycles': RInt,
|
'cycles': RInt,
|
||||||
'bmisses': RInt, 'branches': RInt,
|
'bmisses': RInt, 'branches': RInt,
|
||||||
'cmisses': RInt, 'caches': RInt}
|
'cmisses': RInt, 'caches': RInt}
|
||||||
|
_i = 'i'
|
||||||
_children = 'children'
|
_children = 'children'
|
||||||
|
|
||||||
__slots__ = ()
|
__slots__ = ()
|
||||||
def __new__(cls, file='', function='', line=0,
|
def __new__(cls, i=None, file='', function='', line=0,
|
||||||
cycles=0, bmisses=0, branches=0, cmisses=0, caches=0,
|
cycles=0, bmisses=0, branches=0, cmisses=0, caches=0,
|
||||||
children=None):
|
children=None):
|
||||||
return super().__new__(cls, file, function, int(RInt(line)),
|
return super().__new__(cls, i, file, function, int(RInt(line)),
|
||||||
RInt(cycles),
|
RInt(cycles),
|
||||||
RInt(bmisses), RInt(branches),
|
RInt(bmisses), RInt(branches),
|
||||||
RInt(cmisses), RInt(caches),
|
RInt(cmisses), RInt(caches),
|
||||||
children if children is not None else [])
|
children if children is not None else [])
|
||||||
|
|
||||||
def __add__(self, other):
|
def __add__(self, other):
|
||||||
return PerfResult(self.file, self.function, self.line,
|
return PerfResult(self.i, self.file, self.function, self.line,
|
||||||
self.cycles + other.cycles,
|
self.cycles + other.cycles,
|
||||||
self.bmisses + other.bmisses,
|
self.bmisses + other.bmisses,
|
||||||
self.branches + other.branches,
|
self.branches + other.branches,
|
||||||
@@ -757,7 +759,7 @@ def collect_decompressed(path, *,
|
|||||||
def to_results(results):
|
def to_results(results):
|
||||||
results_ = []
|
results_ = []
|
||||||
for name, (r, children) in results.items():
|
for name, (r, children) in results.items():
|
||||||
results_.append(PerfResult(*name,
|
results_.append(PerfResult(None, *name,
|
||||||
**{events[k]: v for k, v in r.items()},
|
**{events[k]: v for k, v in r.items()},
|
||||||
children=to_results(children)))
|
children=to_results(children)))
|
||||||
return results_
|
return results_
|
||||||
@@ -779,7 +781,7 @@ def starapply(args):
|
|||||||
f, args, kwargs = args
|
f, args, kwargs = args
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
|
|
||||||
def collect(perf_paths, *,
|
def collect_perf(perf_paths, *,
|
||||||
jobs=None,
|
jobs=None,
|
||||||
**args):
|
**args):
|
||||||
# automatic job detection?
|
# automatic job detection?
|
||||||
@@ -811,7 +813,31 @@ def collect(perf_paths, *,
|
|||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def fold(Result, results, by=None, defines=[]):
|
# common folding/tabling/read/write code
|
||||||
|
|
||||||
|
class Rev(co.namedtuple('Rev', 'x')):
|
||||||
|
__slots__ = ()
|
||||||
|
# yes we need all of these because we're a namedtuple
|
||||||
|
def __lt__(self, other):
|
||||||
|
return self.x > other.x
|
||||||
|
def __gt__(self, other):
|
||||||
|
return self.x < other.x
|
||||||
|
def __le__(self, other):
|
||||||
|
return self.x >= other.x
|
||||||
|
def __ge__(self, other):
|
||||||
|
return self.x <= other.x
|
||||||
|
|
||||||
|
def fold(Result, results, *,
|
||||||
|
by=None,
|
||||||
|
defines=[],
|
||||||
|
sort=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
# stop when depth hits zero
|
||||||
|
if depth == 0:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# organize by by
|
||||||
if by is None:
|
if by is None:
|
||||||
by = Result._by
|
by = Result._by
|
||||||
|
|
||||||
@@ -825,7 +851,7 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
if defines:
|
if defines:
|
||||||
results_ = []
|
results_ = []
|
||||||
for r in results:
|
for r in results:
|
||||||
if all(getattr(r, k) in vs for k, vs in defines):
|
if all(str(getattr(r, k)) in vs for k, vs in defines):
|
||||||
results_.append(r)
|
results_.append(r)
|
||||||
results = results_
|
results = results_
|
||||||
|
|
||||||
@@ -842,8 +868,80 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
for name, rs in folding.items():
|
for name, rs in folding.items():
|
||||||
folded.append(sum(rs[1:], start=rs[0]))
|
folded.append(sum(rs[1:], start=rs[0]))
|
||||||
|
|
||||||
|
# sort, note that python's sort is stable
|
||||||
|
folded.sort(key=lambda r: (
|
||||||
|
# sort by explicit sort fields
|
||||||
|
tuple((Rev
|
||||||
|
if reverse ^ (not k or k in Result._fields)
|
||||||
|
else lambda x: x)(
|
||||||
|
tuple((getattr(r, k_),)
|
||||||
|
if getattr(r, k_) is not None
|
||||||
|
else ()
|
||||||
|
for k_ in ([k] if k else Result._sort)))
|
||||||
|
for k, reverse in (sort or [])),
|
||||||
|
# sort by result
|
||||||
|
r))
|
||||||
|
|
||||||
|
# recurse if we have recursive results
|
||||||
|
if hasattr(Result, '_children'):
|
||||||
|
folded = [r._replace(**{
|
||||||
|
Result._children: fold(
|
||||||
|
Result, getattr(r, Result._children),
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
sort=sort,
|
||||||
|
depth=depth-1)})
|
||||||
|
for r in folded]
|
||||||
|
|
||||||
return folded
|
return folded
|
||||||
|
|
||||||
|
def hotify(Result, results, *,
|
||||||
|
fields=None,
|
||||||
|
sort=None,
|
||||||
|
depth=1,
|
||||||
|
hot=None,
|
||||||
|
**_):
|
||||||
|
# hotify only makes sense for recursive results
|
||||||
|
assert hasattr(Result, '_i')
|
||||||
|
assert hasattr(Result, '_children')
|
||||||
|
|
||||||
|
if fields is None:
|
||||||
|
fields = Result._fields
|
||||||
|
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
hot_ = []
|
||||||
|
def recurse(results_, depth_):
|
||||||
|
nonlocal hot_
|
||||||
|
if not results_:
|
||||||
|
return
|
||||||
|
|
||||||
|
# find the hottest result
|
||||||
|
r = min(results_, key=lambda r:
|
||||||
|
tuple((Rev
|
||||||
|
if reverse ^ (not k or k in Result._fields)
|
||||||
|
else lambda x: x)(
|
||||||
|
tuple((getattr(r, k_),)
|
||||||
|
if getattr(r, k_) is not None
|
||||||
|
else ()
|
||||||
|
for k_ in ([k] if k else Result._sort)))
|
||||||
|
for k, reverse in it.chain(hot, [(None, False)])))
|
||||||
|
|
||||||
|
hot_.append(r._replace(**{
|
||||||
|
Result._i: RInt(len(hot_)),
|
||||||
|
Result._children: []}))
|
||||||
|
|
||||||
|
# recurse?
|
||||||
|
if depth_ > 1:
|
||||||
|
recurse(getattr(r, Result._children),
|
||||||
|
depth_-1)
|
||||||
|
|
||||||
|
recurse(getattr(r, Result._children), depth-1)
|
||||||
|
results_.append(r._replace(**{
|
||||||
|
Result._children: hot_}))
|
||||||
|
|
||||||
|
return results_
|
||||||
|
|
||||||
def table(Result, results, diff_results=None, *,
|
def table(Result, results, diff_results=None, *,
|
||||||
by=None,
|
by=None,
|
||||||
fields=None,
|
fields=None,
|
||||||
@@ -869,124 +967,32 @@ def table(Result, results, diff_results=None, *,
|
|||||||
fields = Result._fields
|
fields = Result._fields
|
||||||
types = Result._types
|
types = Result._types
|
||||||
|
|
||||||
# fold again
|
# fold again, otherwise results risk being hidden
|
||||||
results = fold(Result, results, by=by)
|
results = fold(Result, results,
|
||||||
|
by=by,
|
||||||
|
depth=depth)
|
||||||
if diff_results is not None:
|
if diff_results is not None:
|
||||||
diff_results = fold(Result, diff_results, by=by)
|
diff_results = fold(Result, diff_results,
|
||||||
|
by=by,
|
||||||
# reduce children to hot paths? only used by some scripts
|
depth=depth)
|
||||||
if hot:
|
|
||||||
# subclass to reintroduce __dict__
|
|
||||||
Result_ = Result
|
|
||||||
class HotResult(Result_):
|
|
||||||
_i = '_hot_i'
|
|
||||||
_children = '_hot_children'
|
|
||||||
|
|
||||||
def __new__(cls, r, i=None, children=None, notes=None):
|
|
||||||
self = HotResult._make(r)
|
|
||||||
self._hot_i = i
|
|
||||||
self._hot_children = children if children is not None else []
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __add__(self, other):
|
|
||||||
return HotResult(
|
|
||||||
Result_.__add__(self, other),
|
|
||||||
self._hot_i if other._hot_i is None
|
|
||||||
else other._hot_i if self._hot_i is None
|
|
||||||
else min(self._hot_i, other._hot_i),
|
|
||||||
self._hot_children + other._hot_children)
|
|
||||||
|
|
||||||
results_ = []
|
|
||||||
for r in results:
|
|
||||||
hot_ = []
|
|
||||||
def recurse(results_, depth_):
|
|
||||||
nonlocal hot_
|
|
||||||
if not results_:
|
|
||||||
return
|
|
||||||
|
|
||||||
# find the hottest result
|
|
||||||
r = max(results_,
|
|
||||||
key=lambda r: tuple(
|
|
||||||
tuple((getattr(r, k),)
|
|
||||||
if getattr(r, k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])
|
|
||||||
if k in fields)
|
|
||||||
for k in it.chain(hot, [None])))
|
|
||||||
hot_.append(HotResult(r, i=len(hot_)))
|
|
||||||
|
|
||||||
# recurse?
|
|
||||||
if depth_ > 1:
|
|
||||||
recurse(getattr(r, Result._children),
|
|
||||||
depth_-1)
|
|
||||||
|
|
||||||
recurse(getattr(r, Result._children), depth-1)
|
|
||||||
results_.append(HotResult(r, children=hot_))
|
|
||||||
|
|
||||||
Result = HotResult
|
|
||||||
results = results_
|
|
||||||
|
|
||||||
# organize by name
|
# organize by name
|
||||||
table = {
|
table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results}
|
for r in results}
|
||||||
diff_table = {
|
diff_table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in diff_results or []}
|
for r in diff_results or []}
|
||||||
names = [name
|
|
||||||
for name in table.keys() | diff_table.keys()
|
|
||||||
if diff_results is None
|
|
||||||
or all_
|
|
||||||
or any(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(name), k, None),
|
|
||||||
getattr(diff_table.get(name), k, None))
|
|
||||||
for k in fields)]
|
|
||||||
|
|
||||||
# find compare entry if there is one
|
# find compare entry if there is one
|
||||||
if compare:
|
if compare:
|
||||||
compare_result = table.get(','.join(str(k) for k in compare))
|
compare_r = table.get(','.join(str(k) for k in compare))
|
||||||
|
|
||||||
# sort again, now with diff info, note that python's sort is stable
|
|
||||||
names.sort()
|
|
||||||
if compare:
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: (
|
|
||||||
# move compare entry to the top, note this can be
|
|
||||||
# overridden by explicitly sorting by fields
|
|
||||||
table.get(n) == compare_result,
|
|
||||||
# sort by ratio if comparing
|
|
||||||
tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(compare_result, k, None))
|
|
||||||
for k in fields)),
|
|
||||||
reverse=True)
|
|
||||||
if diff or percent:
|
|
||||||
names.sort(
|
|
||||||
# sort by ratio if diffing
|
|
||||||
key=lambda n: tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(diff_table.get(n), k, None))
|
|
||||||
for k in fields),
|
|
||||||
reverse=True)
|
|
||||||
if sort:
|
|
||||||
for k, reverse in reversed(sort):
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: tuple(
|
|
||||||
(getattr(table[n], k),)
|
|
||||||
if getattr(table.get(n), k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])),
|
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
|
||||||
|
|
||||||
|
|
||||||
# build up our lines
|
# build up our lines
|
||||||
lines = []
|
lines = []
|
||||||
@@ -1013,11 +1019,16 @@ def table(Result, results, diff_results=None, *,
|
|||||||
header.append('d'+k)
|
header.append('d'+k)
|
||||||
lines.append(header)
|
lines.append(header)
|
||||||
|
|
||||||
|
# delete these to try to catch typos below, we need to rebuild
|
||||||
|
# these tables at each recursive layer
|
||||||
|
del table
|
||||||
|
del diff_table
|
||||||
|
|
||||||
# entry helper
|
# entry helper
|
||||||
def table_entry(name, r, diff_r=None):
|
def table_entry(name, r, diff_r=None):
|
||||||
entry = [name]
|
entry = [name]
|
||||||
# normal entry?
|
# normal entry?
|
||||||
if ((compare is None or r == compare_result)
|
if ((compare is None or r == compare_r)
|
||||||
and not percent
|
and not percent
|
||||||
and not diff):
|
and not diff):
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -1038,7 +1049,7 @@ def table(Result, results, diff_results=None, *,
|
|||||||
else ['%+.1f%%' % (100*t)])(
|
else ['%+.1f%%' % (100*t)])(
|
||||||
types[k].ratio(
|
types[k].ratio(
|
||||||
getattr(r, k, None),
|
getattr(r, k, None),
|
||||||
getattr(compare_result, k, None)))))
|
getattr(compare_r, k, None)))))
|
||||||
# percent entry?
|
# percent entry?
|
||||||
elif not diff:
|
elif not diff:
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -1084,71 +1095,94 @@ def table(Result, results, diff_results=None, *,
|
|||||||
|
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
# recursive entry helper, only used by some scripts
|
# recursive entry helper
|
||||||
def recurse(results_, depth_,
|
def table_recurse(results_, diff_results_,
|
||||||
|
depth_,
|
||||||
prefixes=('', '', '', '')):
|
prefixes=('', '', '', '')):
|
||||||
# build the children table at each layer
|
# build the children table at each layer
|
||||||
results_ = fold(Result, results_, by=by)
|
|
||||||
table_ = {
|
table_ = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results_}
|
for r in results_}
|
||||||
names_ = list(table_.keys())
|
diff_table_ = {
|
||||||
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
|
for r in diff_results_ or []}
|
||||||
|
names_ = [n
|
||||||
|
for n in table_.keys() | diff_table_.keys()
|
||||||
|
if diff_results_ is None
|
||||||
|
or all_
|
||||||
|
or any(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(diff_table_.get(n), k, None))
|
||||||
|
for k in fields)]
|
||||||
|
|
||||||
# sort the children layer
|
# sort again, now with diff info, note that python's sort is stable
|
||||||
names_.sort()
|
names_.sort(key=lambda n: (
|
||||||
if hasattr(Result, '_i'):
|
# sort by explicit sort fields
|
||||||
names_.sort(key=lambda n: getattr(table_[n], Result._i))
|
tuple((Rev
|
||||||
if sort:
|
if reverse ^ (not k or k in Result._fields)
|
||||||
for k, reverse in reversed(sort):
|
else lambda x: x)(
|
||||||
names_.sort(
|
tuple((getattr(table_[n], k_),)
|
||||||
key=lambda n: tuple(
|
if getattr(table_.get(n), k_, None) is not None
|
||||||
(getattr(table_[n], k),)
|
else ()
|
||||||
if getattr(table_.get(n), k, None)
|
for k_ in ([k] if k else Result._sort)))
|
||||||
is not None
|
for k, reverse in (sort or [])),
|
||||||
else ()
|
# sort by ratio if diffing
|
||||||
for k in (
|
Rev(tuple(types[k].ratio(
|
||||||
[k] if k else [
|
getattr(table_.get(n), k, None),
|
||||||
k for k in Result._sort
|
getattr(diff_table_.get(n), k, None))
|
||||||
if k in fields])),
|
for k in fields))
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
if diff or percent
|
||||||
|
else (),
|
||||||
|
# move compare entry to the top, note this can be
|
||||||
|
# overridden by explicitly sorting by fields
|
||||||
|
(table_.get(n) != compare_r,
|
||||||
|
# sort by ratio if comparing
|
||||||
|
Rev(tuple(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(compare_r, k, None))
|
||||||
|
for k in fields)))
|
||||||
|
if compare
|
||||||
|
else (),
|
||||||
|
# sort by result
|
||||||
|
(table_[n],) if n in table_ else (),
|
||||||
|
# and finally by name (diffs may be missing results)
|
||||||
|
n))
|
||||||
|
|
||||||
for i, name in enumerate(names_):
|
for i, n in enumerate(names_):
|
||||||
r = table_[name]
|
# find comparable results
|
||||||
is_last = (i == len(names_)-1)
|
r = table_.get(n)
|
||||||
|
diff_r = diff_table_.get(n)
|
||||||
|
|
||||||
|
# build line
|
||||||
|
line = table_entry(n, r, diff_r)
|
||||||
|
|
||||||
line = table_entry(name, r)
|
|
||||||
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
|
||||||
# add prefixes
|
# add prefixes
|
||||||
line[0] = (prefixes[0+is_last] + line[0][0], line[0][1])
|
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
|
line[0] = (prefixes[0+(i==len(names_)-1)] + line[0][0], line[0][1])
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
|
|
||||||
# recurse?
|
# recurse?
|
||||||
if depth_ > 1:
|
if n in table_ and depth_ > 1:
|
||||||
recurse(getattr(r, Result._children),
|
table_recurse(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
getattr(diff_r, Result._children, None) or [],
|
||||||
depth_-1,
|
depth_-1,
|
||||||
(prefixes[2+is_last] + "|-> ",
|
(prefixes[2+(i==len(names_)-1)] + "|-> ",
|
||||||
prefixes[2+is_last] + "'-> ",
|
prefixes[2+(i==len(names_)-1)] + "'-> ",
|
||||||
prefixes[2+is_last] + "| ",
|
prefixes[2+(i==len(names_)-1)] + "| ",
|
||||||
prefixes[2+is_last] + " "))
|
prefixes[2+(i==len(names_)-1)] + " "))
|
||||||
|
|
||||||
# entries
|
# build entries
|
||||||
if not summary:
|
if not summary:
|
||||||
for name in names:
|
table_recurse(results, diff_results, depth)
|
||||||
r = table.get(name)
|
|
||||||
if diff_results is None:
|
|
||||||
diff_r = None
|
|
||||||
else:
|
|
||||||
diff_r = diff_table.get(name)
|
|
||||||
lines.append(table_entry(name, r, diff_r))
|
|
||||||
|
|
||||||
# recursive entries
|
|
||||||
if name in table and depth > 1:
|
|
||||||
recurse(getattr(table[name], Result._children),
|
|
||||||
depth-1,
|
|
||||||
("|-> ",
|
|
||||||
"'-> ",
|
|
||||||
"| ",
|
|
||||||
" "))
|
|
||||||
|
|
||||||
# total
|
# total
|
||||||
if not no_total and not (small_table and not summary):
|
if not no_total and not (small_table and not summary):
|
||||||
@@ -1160,9 +1194,8 @@ def table(Result, results, diff_results=None, *,
|
|||||||
lines.append(table_entry('TOTAL', r, diff_r))
|
lines.append(table_entry('TOTAL', r, diff_r))
|
||||||
|
|
||||||
# homogenize
|
# homogenize
|
||||||
lines = [
|
lines = [[x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
[x if isinstance(x, tuple) else (x, []) for x in line]
|
for line in lines]
|
||||||
for line in lines]
|
|
||||||
|
|
||||||
# find the best widths, note that column 0 contains the names and is
|
# find the best widths, note that column 0 contains the names and is
|
||||||
# handled a bit differently
|
# handled a bit differently
|
||||||
@@ -1183,6 +1216,130 @@ def table(Result, results, diff_results=None, *,
|
|||||||
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
||||||
for i, x in enumerate(line[1:], 1))))
|
for i, x in enumerate(line[1:], 1))))
|
||||||
|
|
||||||
|
def read_csv(path, Result, *,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'r') as f:
|
||||||
|
# csv or json? assume json starts with [
|
||||||
|
json = (f.buffer.peek(1)[:1] == b'[')
|
||||||
|
|
||||||
|
# read csv?
|
||||||
|
if not json:
|
||||||
|
results = []
|
||||||
|
reader = csv.DictReader(f, restval='')
|
||||||
|
for r in reader:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k].strip()}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k].strip()})))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results
|
||||||
|
|
||||||
|
# read json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
def unjsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| ({Result._children: unjsonify(
|
||||||
|
r[Result._children],
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and Result._children in r
|
||||||
|
and r[Result._children] is not None
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: set(r[Result._notes])}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and Result._notes in r
|
||||||
|
and r[Result._notes] is not None
|
||||||
|
else {}))))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results_
|
||||||
|
return unjsonify(json.load(f), depth)
|
||||||
|
|
||||||
|
def write_csv(path, Result, results, *,
|
||||||
|
json=False,
|
||||||
|
by=None,
|
||||||
|
fields=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'w') as f:
|
||||||
|
# write csv?
|
||||||
|
if not json:
|
||||||
|
writer = csv.DictWriter(f,
|
||||||
|
(by if by is not None else Result._by)
|
||||||
|
+ [k for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)])
|
||||||
|
writer.writeheader()
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
writer.writerow(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None})
|
||||||
|
|
||||||
|
# write json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
# the neat thing about json is we can include recursive results
|
||||||
|
def jsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| ({Result._children: jsonify(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and getattr(r, Result._children)
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: list(
|
||||||
|
getattr(r, Result._notes))}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and getattr(r, Result._notes)
|
||||||
|
else {}))
|
||||||
|
return results_
|
||||||
|
json.dump(jsonify(results, depth), f,
|
||||||
|
separators=(',', ':'))
|
||||||
|
|
||||||
|
|
||||||
def annotate(Result, results, *,
|
def annotate(Result, results, *,
|
||||||
annotate=None,
|
annotate=None,
|
||||||
@@ -1290,6 +1447,8 @@ def report(perf_paths, *,
|
|||||||
sort=None,
|
sort=None,
|
||||||
branches=False,
|
branches=False,
|
||||||
caches=False,
|
caches=False,
|
||||||
|
depth=None,
|
||||||
|
hot=None,
|
||||||
**args):
|
**args):
|
||||||
# figure out what color should be
|
# figure out what color should be
|
||||||
if args.get('color') == 'auto':
|
if args.get('color') == 'auto':
|
||||||
@@ -1300,10 +1459,10 @@ def report(perf_paths, *,
|
|||||||
args['color'] = False
|
args['color'] = False
|
||||||
|
|
||||||
# figure out depth
|
# figure out depth
|
||||||
if args.get('depth') is None:
|
if depth is None:
|
||||||
args['depth'] = mt.inf if args.get('hot') else 1
|
depth = mt.inf if hot else 1
|
||||||
elif args.get('depth') == 0:
|
elif depth == 0:
|
||||||
args['depth'] = mt.inf
|
depth = mt.inf
|
||||||
|
|
||||||
# find sizes
|
# find sizes
|
||||||
if not args.get('use', None):
|
if not args.get('use', None):
|
||||||
@@ -1314,87 +1473,61 @@ def report(perf_paths, *,
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# collect info
|
# collect info
|
||||||
results = collect(perf_paths, **args)
|
results = collect_perf(perf_paths,
|
||||||
|
depth=depth,
|
||||||
|
**args)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
results = []
|
results = read_csv(args['use'], PerfResult,
|
||||||
with openio(args['use']) as f:
|
depth=depth,
|
||||||
reader = csv.DictReader(f, restval='')
|
**args)
|
||||||
for r in reader:
|
|
||||||
# filter by matching defines
|
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in PerfResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
results.append(PerfResult(
|
|
||||||
**{k: r[k] for k in PerfResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k] for k in PerfResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
results = fold(PerfResult, results, by=by, defines=defines)
|
results = fold(PerfResult, results,
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
depth=depth)
|
||||||
|
|
||||||
# sort, note that python's sort is stable
|
# hotify?
|
||||||
results.sort()
|
if hot:
|
||||||
if sort:
|
results = hotify(PerfResult, results,
|
||||||
for k, reverse in reversed(sort):
|
fields=fields,
|
||||||
results.sort(
|
depth=depth,
|
||||||
key=lambda r: tuple(
|
hot=hot,
|
||||||
(getattr(r, k),) if getattr(r, k) is not None else ()
|
**args)
|
||||||
for k in ([k] if k else PerfResult._sort)),
|
|
||||||
reverse=reverse ^ (not k or k in PerfResult._fields))
|
|
||||||
|
|
||||||
# write results to CSV
|
# write results to CSV/JSON
|
||||||
if args.get('output'):
|
if args.get('output'):
|
||||||
with openio(args['output'], 'w') as f:
|
write_csv(args['output'], PerfResult, results,
|
||||||
writer = csv.DictWriter(f,
|
by=by,
|
||||||
(by if by is not None else PerfResult._by)
|
fields=fields,
|
||||||
+ [k for k in (
|
depth=depth,
|
||||||
fields if fields is not None
|
**args)
|
||||||
else PerfResult._fields)])
|
if args.get('output_json'):
|
||||||
writer.writeheader()
|
write_csv(args['output_json'], PerfResult, results,
|
||||||
for r in results:
|
json=True,
|
||||||
writer.writerow(
|
by=by,
|
||||||
{k: getattr(r, k) for k in (
|
fields=fields,
|
||||||
by if by is not None else PerfResult._by)}
|
depth=depth,
|
||||||
| {k: getattr(r, k) for k in (
|
**args)
|
||||||
fields if fields is not None
|
|
||||||
else PerfResult._fields)})
|
|
||||||
|
|
||||||
# find previous results?
|
# find previous results?
|
||||||
diff_results = None
|
diff_results = None
|
||||||
if args.get('diff') or args.get('percent'):
|
if args.get('diff') or args.get('percent'):
|
||||||
diff_results = []
|
|
||||||
try:
|
try:
|
||||||
with openio(args.get('diff') or args.get('percent')) as f:
|
diff_results = read_csv(
|
||||||
reader = csv.DictReader(f, restval='')
|
args.get('diff') or args.get('percent'),
|
||||||
for r in reader:
|
PerfResult,
|
||||||
# filter by matching defines
|
depth=depth,
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
**args)
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in PerfResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
diff_results.append(PerfResult(
|
|
||||||
**{k: r[k] for k in PerfResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k] for k in PerfResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
diff_results = []
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
diff_results = fold(PerfResult, diff_results, by=by, defines=defines)
|
diff_results = fold(PerfResult, diff_results,
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
depth=depth)
|
||||||
|
|
||||||
# print table
|
# print table
|
||||||
if not args.get('quiet'):
|
if not args.get('quiet'):
|
||||||
@@ -1413,6 +1546,7 @@ def report(perf_paths, *,
|
|||||||
else ['bmisses', 'branches'] if branches
|
else ['bmisses', 'branches'] if branches
|
||||||
else ['cmisses', 'caches'],
|
else ['cmisses', 'caches'],
|
||||||
sort=sort,
|
sort=sort,
|
||||||
|
depth=depth,
|
||||||
**args)
|
**args)
|
||||||
|
|
||||||
|
|
||||||
@@ -1430,7 +1564,7 @@ if __name__ == "__main__":
|
|||||||
# bit of a hack, but parse_intermixed_args and REMAINDER are
|
# bit of a hack, but parse_intermixed_args and REMAINDER are
|
||||||
# incompatible, so we need to figure out what we want before running
|
# incompatible, so we need to figure out what we want before running
|
||||||
# argparse
|
# argparse
|
||||||
if '-R' in sys.argv or '--record' in sys.argv:
|
if '--record' in sys.argv:
|
||||||
nargs = argparse.REMAINDER
|
nargs = argparse.REMAINDER
|
||||||
else:
|
else:
|
||||||
nargs = '*'
|
nargs = '*'
|
||||||
@@ -1456,24 +1590,28 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o', '--output',
|
'-o', '--output',
|
||||||
help="Specify CSV file to store results.")
|
help="Specify CSV file to store results.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-O', '--output-json',
|
||||||
|
help="Specify JSON file to store results. This may contain "
|
||||||
|
"recursive info.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-u', '--use',
|
'-u', '--use',
|
||||||
help="Don't parse anything, use this CSV file.")
|
help="Don't parse anything, use this CSV/JSON file.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-d', '--diff',
|
'-d', '--diff',
|
||||||
help="Specify CSV file to diff against.")
|
help="Specify CSV/JSON file to diff against.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-p', '--percent',
|
'-p', '--percent',
|
||||||
help="Specify CSV file to diff against, but only show precentage "
|
help="Specify CSV/JSON file to diff against, but only show "
|
||||||
"change, not a full diff.")
|
"percentage change, not a full diff.")
|
||||||
parser.add_argument(
|
|
||||||
'-a', '--all',
|
|
||||||
action='store_true',
|
|
||||||
help="Show all, not just the ones that changed.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-c', '--compare',
|
'-c', '--compare',
|
||||||
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
||||||
help="Compare results to the row matching this by pattern.")
|
help="Compare results to the row matching this by pattern.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-a', '--all',
|
||||||
|
action='store_true',
|
||||||
|
help="Show all, not just the ones that changed.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-b', '--by',
|
'-b', '--by',
|
||||||
action='append',
|
action='append',
|
||||||
@@ -1499,7 +1637,7 @@ if __name__ == "__main__":
|
|||||||
def __call__(self, parser, namespace, value, option):
|
def __call__(self, parser, namespace, value, option):
|
||||||
if namespace.sort is None:
|
if namespace.sort is None:
|
||||||
namespace.sort = []
|
namespace.sort = []
|
||||||
namespace.sort.append((value, True if option == '-S' else False))
|
namespace.sort.append((value, option in {'-S', '--reverse-sort'}))
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-s', '--sort',
|
'-s', '--sort',
|
||||||
nargs='?',
|
nargs='?',
|
||||||
@@ -1510,6 +1648,34 @@ if __name__ == "__main__":
|
|||||||
nargs='?',
|
nargs='?',
|
||||||
action=AppendSort,
|
action=AppendSort,
|
||||||
help="Sort by this field, but backwards.")
|
help="Sort by this field, but backwards.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-z', '--depth',
|
||||||
|
nargs='?',
|
||||||
|
type=lambda x: int(x, 0),
|
||||||
|
const=0,
|
||||||
|
help="Depth of function calls to show. 0 shows all calls unless "
|
||||||
|
"we find a cycle. Defaults to 0.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-g', '--propagate',
|
||||||
|
type=lambda x: int(x, 0),
|
||||||
|
help="Depth to propagate samples up the call-stack. 0 propagates "
|
||||||
|
"up to the entry point, 1 does no propagation. Defaults to 0.")
|
||||||
|
class AppendHot(argparse.Action):
|
||||||
|
def __call__(self, parser, namespace, value, option):
|
||||||
|
if namespace.hot is None:
|
||||||
|
namespace.hot = []
|
||||||
|
namespace.hot.append((value, option in {'-R', '--reverse-hot'}))
|
||||||
|
parser.add_argument(
|
||||||
|
'-r', '--hot',
|
||||||
|
nargs='?',
|
||||||
|
action=AppendHot,
|
||||||
|
help="Show only the hot path for each function call. Can "
|
||||||
|
"optionally provide fields like sort.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-R', '--reverse-hot',
|
||||||
|
nargs='?',
|
||||||
|
action=AppendHot,
|
||||||
|
help="Like -r/--hot, but backwards.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--no-header',
|
'--no-header',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
@@ -1548,23 +1714,6 @@ if __name__ == "__main__":
|
|||||||
'--caches',
|
'--caches',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="Show cache accesses and cache misses.")
|
help="Show cache accesses and cache misses.")
|
||||||
parser.add_argument(
|
|
||||||
'-g', '--propagate',
|
|
||||||
type=lambda x: int(x, 0),
|
|
||||||
help="Depth to propagate samples up the call-stack. 0 propagates "
|
|
||||||
"up to the entry point, 1 does no propagation. Defaults to 0.")
|
|
||||||
parser.add_argument(
|
|
||||||
'-z', '--depth',
|
|
||||||
nargs='?',
|
|
||||||
type=lambda x: int(x, 0),
|
|
||||||
const=0,
|
|
||||||
help="Depth of function calls to show. 0 shows all calls unless "
|
|
||||||
"we find a cycle. Defaults to 0.")
|
|
||||||
parser.add_argument(
|
|
||||||
'-t', '--hot',
|
|
||||||
nargs='?',
|
|
||||||
action='append',
|
|
||||||
help="Show only the hot path for each function call.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-A', '--annotate',
|
'-A', '--annotate',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
@@ -1618,7 +1767,7 @@ if __name__ == "__main__":
|
|||||||
nargs=nargs,
|
nargs=nargs,
|
||||||
help="Command to run.")
|
help="Command to run.")
|
||||||
record_parser.add_argument(
|
record_parser.add_argument(
|
||||||
'-R', '--record',
|
'--record',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="Run a command and aggregate perf measurements.")
|
help="Run a command and aggregate perf measurements.")
|
||||||
record_parser.add_argument(
|
record_parser.add_argument(
|
||||||
|
|||||||
@@ -19,6 +19,7 @@ import bisect
|
|||||||
import collections as co
|
import collections as co
|
||||||
import csv
|
import csv
|
||||||
import functools as ft
|
import functools as ft
|
||||||
|
import io
|
||||||
import itertools as it
|
import itertools as it
|
||||||
import math as mt
|
import math as mt
|
||||||
import multiprocessing as mp
|
import multiprocessing as mp
|
||||||
@@ -137,25 +138,26 @@ class RInt(co.namedtuple('RInt', 'x')):
|
|||||||
|
|
||||||
# perf results
|
# perf results
|
||||||
class PerfBdResult(co.namedtuple('PerfBdResult', [
|
class PerfBdResult(co.namedtuple('PerfBdResult', [
|
||||||
'file', 'function', 'line',
|
'i', 'file', 'function', 'line',
|
||||||
'readed', 'proged', 'erased',
|
'readed', 'proged', 'erased',
|
||||||
'children'])):
|
'children'])):
|
||||||
_by = ['file', 'function', 'line']
|
_by = ['i', 'file', 'function', 'line']
|
||||||
_fields = ['readed', 'proged', 'erased']
|
_fields = ['readed', 'proged', 'erased']
|
||||||
_sort = ['erased', 'proged', 'readed']
|
_sort = ['erased', 'proged', 'readed']
|
||||||
_types = {'readed': RInt, 'proged': RInt, 'erased': RInt}
|
_types = {'readed': RInt, 'proged': RInt, 'erased': RInt}
|
||||||
|
_i = 'i'
|
||||||
_children = 'children'
|
_children = 'children'
|
||||||
|
|
||||||
__slots__ = ()
|
__slots__ = ()
|
||||||
def __new__(cls, file='', function='', line=0,
|
def __new__(cls, i=None, file='', function='', line=0,
|
||||||
readed=0, proged=0, erased=0,
|
readed=0, proged=0, erased=0,
|
||||||
children=None):
|
children=None):
|
||||||
return super().__new__(cls, file, function, int(RInt(line)),
|
return super().__new__(cls, i, file, function, int(RInt(line)),
|
||||||
RInt(readed), RInt(proged), RInt(erased),
|
RInt(readed), RInt(proged), RInt(erased),
|
||||||
children if children is not None else [])
|
children if children is not None else [])
|
||||||
|
|
||||||
def __add__(self, other):
|
def __add__(self, other):
|
||||||
return PerfBdResult(self.file, self.function, self.line,
|
return PerfBdResult(self.i, self.file, self.function, self.line,
|
||||||
self.readed + other.readed,
|
self.readed + other.readed,
|
||||||
self.proged + other.proged,
|
self.proged + other.proged,
|
||||||
self.erased + other.erased,
|
self.erased + other.erased,
|
||||||
@@ -716,7 +718,7 @@ def collect_job(path, start, stop, syms, lines, *,
|
|||||||
def to_results(results):
|
def to_results(results):
|
||||||
results_ = []
|
results_ = []
|
||||||
for name, (r, p, e, children) in results.items():
|
for name, (r, p, e, children) in results.items():
|
||||||
results_.append(PerfBdResult(*name,
|
results_.append(PerfBdResult(None, *name,
|
||||||
r, p, e,
|
r, p, e,
|
||||||
children=to_results(children)))
|
children=to_results(children)))
|
||||||
return results_
|
return results_
|
||||||
@@ -727,7 +729,7 @@ def starapply(args):
|
|||||||
f, args, kwargs = args
|
f, args, kwargs = args
|
||||||
return f(*args, **kwargs)
|
return f(*args, **kwargs)
|
||||||
|
|
||||||
def collect(elf_path, trace_paths, *,
|
def collect_perfbd(elf_path, trace_paths, *,
|
||||||
jobs=None,
|
jobs=None,
|
||||||
**args):
|
**args):
|
||||||
# automatic job detection?
|
# automatic job detection?
|
||||||
@@ -781,7 +783,31 @@ def collect(elf_path, trace_paths, *,
|
|||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def fold(Result, results, by=None, defines=[]):
|
# common folding/tabling/read/write code
|
||||||
|
|
||||||
|
class Rev(co.namedtuple('Rev', 'x')):
|
||||||
|
__slots__ = ()
|
||||||
|
# yes we need all of these because we're a namedtuple
|
||||||
|
def __lt__(self, other):
|
||||||
|
return self.x > other.x
|
||||||
|
def __gt__(self, other):
|
||||||
|
return self.x < other.x
|
||||||
|
def __le__(self, other):
|
||||||
|
return self.x >= other.x
|
||||||
|
def __ge__(self, other):
|
||||||
|
return self.x <= other.x
|
||||||
|
|
||||||
|
def fold(Result, results, *,
|
||||||
|
by=None,
|
||||||
|
defines=[],
|
||||||
|
sort=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
# stop when depth hits zero
|
||||||
|
if depth == 0:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# organize by by
|
||||||
if by is None:
|
if by is None:
|
||||||
by = Result._by
|
by = Result._by
|
||||||
|
|
||||||
@@ -795,7 +821,7 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
if defines:
|
if defines:
|
||||||
results_ = []
|
results_ = []
|
||||||
for r in results:
|
for r in results:
|
||||||
if all(getattr(r, k) in vs for k, vs in defines):
|
if all(str(getattr(r, k)) in vs for k, vs in defines):
|
||||||
results_.append(r)
|
results_.append(r)
|
||||||
results = results_
|
results = results_
|
||||||
|
|
||||||
@@ -812,8 +838,80 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
for name, rs in folding.items():
|
for name, rs in folding.items():
|
||||||
folded.append(sum(rs[1:], start=rs[0]))
|
folded.append(sum(rs[1:], start=rs[0]))
|
||||||
|
|
||||||
|
# sort, note that python's sort is stable
|
||||||
|
folded.sort(key=lambda r: (
|
||||||
|
# sort by explicit sort fields
|
||||||
|
tuple((Rev
|
||||||
|
if reverse ^ (not k or k in Result._fields)
|
||||||
|
else lambda x: x)(
|
||||||
|
tuple((getattr(r, k_),)
|
||||||
|
if getattr(r, k_) is not None
|
||||||
|
else ()
|
||||||
|
for k_ in ([k] if k else Result._sort)))
|
||||||
|
for k, reverse in (sort or [])),
|
||||||
|
# sort by result
|
||||||
|
r))
|
||||||
|
|
||||||
|
# recurse if we have recursive results
|
||||||
|
if hasattr(Result, '_children'):
|
||||||
|
folded = [r._replace(**{
|
||||||
|
Result._children: fold(
|
||||||
|
Result, getattr(r, Result._children),
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
sort=sort,
|
||||||
|
depth=depth-1)})
|
||||||
|
for r in folded]
|
||||||
|
|
||||||
return folded
|
return folded
|
||||||
|
|
||||||
|
def hotify(Result, results, *,
|
||||||
|
fields=None,
|
||||||
|
sort=None,
|
||||||
|
depth=1,
|
||||||
|
hot=None,
|
||||||
|
**_):
|
||||||
|
# hotify only makes sense for recursive results
|
||||||
|
assert hasattr(Result, '_i')
|
||||||
|
assert hasattr(Result, '_children')
|
||||||
|
|
||||||
|
if fields is None:
|
||||||
|
fields = Result._fields
|
||||||
|
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
hot_ = []
|
||||||
|
def recurse(results_, depth_):
|
||||||
|
nonlocal hot_
|
||||||
|
if not results_:
|
||||||
|
return
|
||||||
|
|
||||||
|
# find the hottest result
|
||||||
|
r = min(results_, key=lambda r:
|
||||||
|
tuple((Rev
|
||||||
|
if reverse ^ (not k or k in Result._fields)
|
||||||
|
else lambda x: x)(
|
||||||
|
tuple((getattr(r, k_),)
|
||||||
|
if getattr(r, k_) is not None
|
||||||
|
else ()
|
||||||
|
for k_ in ([k] if k else Result._sort)))
|
||||||
|
for k, reverse in it.chain(hot, [(None, False)])))
|
||||||
|
|
||||||
|
hot_.append(r._replace(**{
|
||||||
|
Result._i: RInt(len(hot_)),
|
||||||
|
Result._children: []}))
|
||||||
|
|
||||||
|
# recurse?
|
||||||
|
if depth_ > 1:
|
||||||
|
recurse(getattr(r, Result._children),
|
||||||
|
depth_-1)
|
||||||
|
|
||||||
|
recurse(getattr(r, Result._children), depth-1)
|
||||||
|
results_.append(r._replace(**{
|
||||||
|
Result._children: hot_}))
|
||||||
|
|
||||||
|
return results_
|
||||||
|
|
||||||
def table(Result, results, diff_results=None, *,
|
def table(Result, results, diff_results=None, *,
|
||||||
by=None,
|
by=None,
|
||||||
fields=None,
|
fields=None,
|
||||||
@@ -839,124 +937,32 @@ def table(Result, results, diff_results=None, *,
|
|||||||
fields = Result._fields
|
fields = Result._fields
|
||||||
types = Result._types
|
types = Result._types
|
||||||
|
|
||||||
# fold again
|
# fold again, otherwise results risk being hidden
|
||||||
results = fold(Result, results, by=by)
|
results = fold(Result, results,
|
||||||
|
by=by,
|
||||||
|
depth=depth)
|
||||||
if diff_results is not None:
|
if diff_results is not None:
|
||||||
diff_results = fold(Result, diff_results, by=by)
|
diff_results = fold(Result, diff_results,
|
||||||
|
by=by,
|
||||||
# reduce children to hot paths? only used by some scripts
|
depth=depth)
|
||||||
if hot:
|
|
||||||
# subclass to reintroduce __dict__
|
|
||||||
Result_ = Result
|
|
||||||
class HotResult(Result_):
|
|
||||||
_i = '_hot_i'
|
|
||||||
_children = '_hot_children'
|
|
||||||
|
|
||||||
def __new__(cls, r, i=None, children=None, notes=None):
|
|
||||||
self = HotResult._make(r)
|
|
||||||
self._hot_i = i
|
|
||||||
self._hot_children = children if children is not None else []
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __add__(self, other):
|
|
||||||
return HotResult(
|
|
||||||
Result_.__add__(self, other),
|
|
||||||
self._hot_i if other._hot_i is None
|
|
||||||
else other._hot_i if self._hot_i is None
|
|
||||||
else min(self._hot_i, other._hot_i),
|
|
||||||
self._hot_children + other._hot_children)
|
|
||||||
|
|
||||||
results_ = []
|
|
||||||
for r in results:
|
|
||||||
hot_ = []
|
|
||||||
def recurse(results_, depth_):
|
|
||||||
nonlocal hot_
|
|
||||||
if not results_:
|
|
||||||
return
|
|
||||||
|
|
||||||
# find the hottest result
|
|
||||||
r = max(results_,
|
|
||||||
key=lambda r: tuple(
|
|
||||||
tuple((getattr(r, k),)
|
|
||||||
if getattr(r, k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])
|
|
||||||
if k in fields)
|
|
||||||
for k in it.chain(hot, [None])))
|
|
||||||
hot_.append(HotResult(r, i=len(hot_)))
|
|
||||||
|
|
||||||
# recurse?
|
|
||||||
if depth_ > 1:
|
|
||||||
recurse(getattr(r, Result._children),
|
|
||||||
depth_-1)
|
|
||||||
|
|
||||||
recurse(getattr(r, Result._children), depth-1)
|
|
||||||
results_.append(HotResult(r, children=hot_))
|
|
||||||
|
|
||||||
Result = HotResult
|
|
||||||
results = results_
|
|
||||||
|
|
||||||
# organize by name
|
# organize by name
|
||||||
table = {
|
table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results}
|
for r in results}
|
||||||
diff_table = {
|
diff_table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in diff_results or []}
|
for r in diff_results or []}
|
||||||
names = [name
|
|
||||||
for name in table.keys() | diff_table.keys()
|
|
||||||
if diff_results is None
|
|
||||||
or all_
|
|
||||||
or any(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(name), k, None),
|
|
||||||
getattr(diff_table.get(name), k, None))
|
|
||||||
for k in fields)]
|
|
||||||
|
|
||||||
# find compare entry if there is one
|
# find compare entry if there is one
|
||||||
if compare:
|
if compare:
|
||||||
compare_result = table.get(','.join(str(k) for k in compare))
|
compare_r = table.get(','.join(str(k) for k in compare))
|
||||||
|
|
||||||
# sort again, now with diff info, note that python's sort is stable
|
|
||||||
names.sort()
|
|
||||||
if compare:
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: (
|
|
||||||
# move compare entry to the top, note this can be
|
|
||||||
# overridden by explicitly sorting by fields
|
|
||||||
table.get(n) == compare_result,
|
|
||||||
# sort by ratio if comparing
|
|
||||||
tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(compare_result, k, None))
|
|
||||||
for k in fields)),
|
|
||||||
reverse=True)
|
|
||||||
if diff or percent:
|
|
||||||
names.sort(
|
|
||||||
# sort by ratio if diffing
|
|
||||||
key=lambda n: tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(diff_table.get(n), k, None))
|
|
||||||
for k in fields),
|
|
||||||
reverse=True)
|
|
||||||
if sort:
|
|
||||||
for k, reverse in reversed(sort):
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: tuple(
|
|
||||||
(getattr(table[n], k),)
|
|
||||||
if getattr(table.get(n), k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])),
|
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
|
||||||
|
|
||||||
|
|
||||||
# build up our lines
|
# build up our lines
|
||||||
lines = []
|
lines = []
|
||||||
@@ -983,11 +989,16 @@ def table(Result, results, diff_results=None, *,
|
|||||||
header.append('d'+k)
|
header.append('d'+k)
|
||||||
lines.append(header)
|
lines.append(header)
|
||||||
|
|
||||||
|
# delete these to try to catch typos below, we need to rebuild
|
||||||
|
# these tables at each recursive layer
|
||||||
|
del table
|
||||||
|
del diff_table
|
||||||
|
|
||||||
# entry helper
|
# entry helper
|
||||||
def table_entry(name, r, diff_r=None):
|
def table_entry(name, r, diff_r=None):
|
||||||
entry = [name]
|
entry = [name]
|
||||||
# normal entry?
|
# normal entry?
|
||||||
if ((compare is None or r == compare_result)
|
if ((compare is None or r == compare_r)
|
||||||
and not percent
|
and not percent
|
||||||
and not diff):
|
and not diff):
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -1008,7 +1019,7 @@ def table(Result, results, diff_results=None, *,
|
|||||||
else ['%+.1f%%' % (100*t)])(
|
else ['%+.1f%%' % (100*t)])(
|
||||||
types[k].ratio(
|
types[k].ratio(
|
||||||
getattr(r, k, None),
|
getattr(r, k, None),
|
||||||
getattr(compare_result, k, None)))))
|
getattr(compare_r, k, None)))))
|
||||||
# percent entry?
|
# percent entry?
|
||||||
elif not diff:
|
elif not diff:
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -1054,71 +1065,94 @@ def table(Result, results, diff_results=None, *,
|
|||||||
|
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
# recursive entry helper, only used by some scripts
|
# recursive entry helper
|
||||||
def recurse(results_, depth_,
|
def table_recurse(results_, diff_results_,
|
||||||
|
depth_,
|
||||||
prefixes=('', '', '', '')):
|
prefixes=('', '', '', '')):
|
||||||
# build the children table at each layer
|
# build the children table at each layer
|
||||||
results_ = fold(Result, results_, by=by)
|
|
||||||
table_ = {
|
table_ = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results_}
|
for r in results_}
|
||||||
names_ = list(table_.keys())
|
diff_table_ = {
|
||||||
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
|
for r in diff_results_ or []}
|
||||||
|
names_ = [n
|
||||||
|
for n in table_.keys() | diff_table_.keys()
|
||||||
|
if diff_results_ is None
|
||||||
|
or all_
|
||||||
|
or any(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(diff_table_.get(n), k, None))
|
||||||
|
for k in fields)]
|
||||||
|
|
||||||
# sort the children layer
|
# sort again, now with diff info, note that python's sort is stable
|
||||||
names_.sort()
|
names_.sort(key=lambda n: (
|
||||||
if hasattr(Result, '_i'):
|
# sort by explicit sort fields
|
||||||
names_.sort(key=lambda n: getattr(table_[n], Result._i))
|
tuple((Rev
|
||||||
if sort:
|
if reverse ^ (not k or k in Result._fields)
|
||||||
for k, reverse in reversed(sort):
|
else lambda x: x)(
|
||||||
names_.sort(
|
tuple((getattr(table_[n], k_),)
|
||||||
key=lambda n: tuple(
|
if getattr(table_.get(n), k_, None) is not None
|
||||||
(getattr(table_[n], k),)
|
else ()
|
||||||
if getattr(table_.get(n), k, None)
|
for k_ in ([k] if k else Result._sort)))
|
||||||
is not None
|
for k, reverse in (sort or [])),
|
||||||
else ()
|
# sort by ratio if diffing
|
||||||
for k in (
|
Rev(tuple(types[k].ratio(
|
||||||
[k] if k else [
|
getattr(table_.get(n), k, None),
|
||||||
k for k in Result._sort
|
getattr(diff_table_.get(n), k, None))
|
||||||
if k in fields])),
|
for k in fields))
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
if diff or percent
|
||||||
|
else (),
|
||||||
|
# move compare entry to the top, note this can be
|
||||||
|
# overridden by explicitly sorting by fields
|
||||||
|
(table_.get(n) != compare_r,
|
||||||
|
# sort by ratio if comparing
|
||||||
|
Rev(tuple(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(compare_r, k, None))
|
||||||
|
for k in fields)))
|
||||||
|
if compare
|
||||||
|
else (),
|
||||||
|
# sort by result
|
||||||
|
(table_[n],) if n in table_ else (),
|
||||||
|
# and finally by name (diffs may be missing results)
|
||||||
|
n))
|
||||||
|
|
||||||
for i, name in enumerate(names_):
|
for i, n in enumerate(names_):
|
||||||
r = table_[name]
|
# find comparable results
|
||||||
is_last = (i == len(names_)-1)
|
r = table_.get(n)
|
||||||
|
diff_r = diff_table_.get(n)
|
||||||
|
|
||||||
|
# build line
|
||||||
|
line = table_entry(n, r, diff_r)
|
||||||
|
|
||||||
line = table_entry(name, r)
|
|
||||||
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
|
||||||
# add prefixes
|
# add prefixes
|
||||||
line[0] = (prefixes[0+is_last] + line[0][0], line[0][1])
|
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
|
line[0] = (prefixes[0+(i==len(names_)-1)] + line[0][0], line[0][1])
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
|
|
||||||
# recurse?
|
# recurse?
|
||||||
if depth_ > 1:
|
if n in table_ and depth_ > 1:
|
||||||
recurse(getattr(r, Result._children),
|
table_recurse(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
getattr(diff_r, Result._children, None) or [],
|
||||||
depth_-1,
|
depth_-1,
|
||||||
(prefixes[2+is_last] + "|-> ",
|
(prefixes[2+(i==len(names_)-1)] + "|-> ",
|
||||||
prefixes[2+is_last] + "'-> ",
|
prefixes[2+(i==len(names_)-1)] + "'-> ",
|
||||||
prefixes[2+is_last] + "| ",
|
prefixes[2+(i==len(names_)-1)] + "| ",
|
||||||
prefixes[2+is_last] + " "))
|
prefixes[2+(i==len(names_)-1)] + " "))
|
||||||
|
|
||||||
# entries
|
# build entries
|
||||||
if not summary:
|
if not summary:
|
||||||
for name in names:
|
table_recurse(results, diff_results, depth)
|
||||||
r = table.get(name)
|
|
||||||
if diff_results is None:
|
|
||||||
diff_r = None
|
|
||||||
else:
|
|
||||||
diff_r = diff_table.get(name)
|
|
||||||
lines.append(table_entry(name, r, diff_r))
|
|
||||||
|
|
||||||
# recursive entries
|
|
||||||
if name in table and depth > 1:
|
|
||||||
recurse(getattr(table[name], Result._children),
|
|
||||||
depth-1,
|
|
||||||
("|-> ",
|
|
||||||
"'-> ",
|
|
||||||
"| ",
|
|
||||||
" "))
|
|
||||||
|
|
||||||
# total
|
# total
|
||||||
if not no_total and not (small_table and not summary):
|
if not no_total and not (small_table and not summary):
|
||||||
@@ -1130,9 +1164,8 @@ def table(Result, results, diff_results=None, *,
|
|||||||
lines.append(table_entry('TOTAL', r, diff_r))
|
lines.append(table_entry('TOTAL', r, diff_r))
|
||||||
|
|
||||||
# homogenize
|
# homogenize
|
||||||
lines = [
|
lines = [[x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
[x if isinstance(x, tuple) else (x, []) for x in line]
|
for line in lines]
|
||||||
for line in lines]
|
|
||||||
|
|
||||||
# find the best widths, note that column 0 contains the names and is
|
# find the best widths, note that column 0 contains the names and is
|
||||||
# handled a bit differently
|
# handled a bit differently
|
||||||
@@ -1153,6 +1186,130 @@ def table(Result, results, diff_results=None, *,
|
|||||||
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
||||||
for i, x in enumerate(line[1:], 1))))
|
for i, x in enumerate(line[1:], 1))))
|
||||||
|
|
||||||
|
def read_csv(path, Result, *,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'r') as f:
|
||||||
|
# csv or json? assume json starts with [
|
||||||
|
json = (f.buffer.peek(1)[:1] == b'[')
|
||||||
|
|
||||||
|
# read csv?
|
||||||
|
if not json:
|
||||||
|
results = []
|
||||||
|
reader = csv.DictReader(f, restval='')
|
||||||
|
for r in reader:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k].strip()}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k].strip()})))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results
|
||||||
|
|
||||||
|
# read json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
def unjsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| ({Result._children: unjsonify(
|
||||||
|
r[Result._children],
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and Result._children in r
|
||||||
|
and r[Result._children] is not None
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: set(r[Result._notes])}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and Result._notes in r
|
||||||
|
and r[Result._notes] is not None
|
||||||
|
else {}))))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results_
|
||||||
|
return unjsonify(json.load(f), depth)
|
||||||
|
|
||||||
|
def write_csv(path, Result, results, *,
|
||||||
|
json=False,
|
||||||
|
by=None,
|
||||||
|
fields=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'w') as f:
|
||||||
|
# write csv?
|
||||||
|
if not json:
|
||||||
|
writer = csv.DictWriter(f,
|
||||||
|
(by if by is not None else Result._by)
|
||||||
|
+ [k for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)])
|
||||||
|
writer.writeheader()
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
writer.writerow(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None})
|
||||||
|
|
||||||
|
# write json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
# the neat thing about json is we can include recursive results
|
||||||
|
def jsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| ({Result._children: jsonify(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and getattr(r, Result._children)
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: list(
|
||||||
|
getattr(r, Result._notes))}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and getattr(r, Result._notes)
|
||||||
|
else {}))
|
||||||
|
return results_
|
||||||
|
json.dump(jsonify(results, depth), f,
|
||||||
|
separators=(',', ':'))
|
||||||
|
|
||||||
|
|
||||||
def annotate(Result, results, *,
|
def annotate(Result, results, *,
|
||||||
annotate=None,
|
annotate=None,
|
||||||
@@ -1272,6 +1429,8 @@ def report(paths, *,
|
|||||||
fields=None,
|
fields=None,
|
||||||
defines=[],
|
defines=[],
|
||||||
sort=None,
|
sort=None,
|
||||||
|
depth=None,
|
||||||
|
hot=None,
|
||||||
**args):
|
**args):
|
||||||
# figure out what color should be
|
# figure out what color should be
|
||||||
if args.get('color') == 'auto':
|
if args.get('color') == 'auto':
|
||||||
@@ -1282,10 +1441,10 @@ def report(paths, *,
|
|||||||
args['color'] = False
|
args['color'] = False
|
||||||
|
|
||||||
# figure out depth
|
# figure out depth
|
||||||
if args.get('depth') is None:
|
if depth is None:
|
||||||
args['depth'] = mt.inf if args.get('hot') else 1
|
depth = mt.inf if hot else 1
|
||||||
elif args.get('depth') == 0:
|
elif depth == 0:
|
||||||
args['depth'] = mt.inf
|
depth = mt.inf
|
||||||
|
|
||||||
# find sizes
|
# find sizes
|
||||||
if not args.get('use', None):
|
if not args.get('use', None):
|
||||||
@@ -1313,87 +1472,61 @@ def report(paths, *,
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# collect info
|
# collect info
|
||||||
results = collect(elf_paths[0], trace_paths, **args)
|
results = collect_perfbd(elf_paths[0], trace_paths,
|
||||||
|
depth=depth,
|
||||||
|
**args)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
results = []
|
results = read_csv(args['use'], PerfBdResult,
|
||||||
with openio(args['use']) as f:
|
depth=depth,
|
||||||
reader = csv.DictReader(f, restval='')
|
**args)
|
||||||
for r in reader:
|
|
||||||
# filter by matching defines
|
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in PerfBdResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
results.append(PerfBdResult(
|
|
||||||
**{k: r[k] for k in PerfBdResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k] for k in PerfBdResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
results = fold(PerfBdResult, results, by=by, defines=defines)
|
results = fold(PerfBdResult, results,
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
depth=depth)
|
||||||
|
|
||||||
# sort, note that python's sort is stable
|
# hotify?
|
||||||
results.sort()
|
if hot:
|
||||||
if sort:
|
results = hotify(PerfBdResult, results,
|
||||||
for k, reverse in reversed(sort):
|
fields=fields,
|
||||||
results.sort(
|
depth=depth,
|
||||||
key=lambda r: tuple(
|
hot=hot,
|
||||||
(getattr(r, k),) if getattr(r, k) is not None else ()
|
**args)
|
||||||
for k in ([k] if k else PerfBdResult._sort)),
|
|
||||||
reverse=reverse ^ (not k or k in PerfBdResult._fields))
|
|
||||||
|
|
||||||
# write results to CSV
|
# write results to CSV/JSON
|
||||||
if args.get('output'):
|
if args.get('output'):
|
||||||
with openio(args['output'], 'w') as f:
|
write_csv(args['output'], PerfBdResult, results,
|
||||||
writer = csv.DictWriter(f,
|
by=by,
|
||||||
(by if by is not None else PerfBdResult._by)
|
fields=fields,
|
||||||
+ [k for k in (
|
depth=depth,
|
||||||
fields if fields is not None
|
**args)
|
||||||
else PerfBdResult._fields)])
|
if args.get('output_json'):
|
||||||
writer.writeheader()
|
write_csv(args['output_json'], PerfBdResult, results,
|
||||||
for r in results:
|
json=True,
|
||||||
writer.writerow(
|
by=by,
|
||||||
{k: getattr(r, k) for k in (
|
fields=fields,
|
||||||
by if by is not None else PerfBdResult._by)}
|
depth=depth,
|
||||||
| {k: getattr(r, k) for k in (
|
**args)
|
||||||
fields if fields is not None
|
|
||||||
else PerfBdResult._fields)})
|
|
||||||
|
|
||||||
# find previous results?
|
# find previous results?
|
||||||
diff_results = None
|
diff_results = None
|
||||||
if args.get('diff') or args.get('percent'):
|
if args.get('diff') or args.get('percent'):
|
||||||
diff_results = []
|
|
||||||
try:
|
try:
|
||||||
with openio(args.get('diff') or args.get('percent')) as f:
|
diff_results = read_csv(
|
||||||
reader = csv.DictReader(f, restval='')
|
args.get('diff') or args.get('percent'),
|
||||||
for r in reader:
|
PerfBdResult,
|
||||||
# filter by matching defines
|
depth=depth,
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
**args)
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in PerfBdResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
diff_results.append(PerfBdResult(
|
|
||||||
**{k: r[k] for k in PerfBdResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k] for k in PerfBdResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
diff_results = []
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
diff_results = fold(PerfBdResult, diff_results, by=by, defines=defines)
|
diff_results = fold(PerfBdResult, diff_results,
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
depth=depth)
|
||||||
|
|
||||||
# print table
|
# print table
|
||||||
if not args.get('quiet'):
|
if not args.get('quiet'):
|
||||||
@@ -1410,6 +1543,7 @@ def report(paths, *,
|
|||||||
by=by if by is not None else ['function'],
|
by=by if by is not None else ['function'],
|
||||||
fields=fields,
|
fields=fields,
|
||||||
sort=sort,
|
sort=sort,
|
||||||
|
depth=depth,
|
||||||
**args)
|
**args)
|
||||||
|
|
||||||
|
|
||||||
@@ -1458,24 +1592,28 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o', '--output',
|
'-o', '--output',
|
||||||
help="Specify CSV file to store results.")
|
help="Specify CSV file to store results.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-O', '--output-json',
|
||||||
|
help="Specify JSON file to store results. This may contain "
|
||||||
|
"recursive info.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-u', '--use',
|
'-u', '--use',
|
||||||
help="Don't parse anything, use this CSV file.")
|
help="Don't parse anything, use this CSV/JSON file.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-d', '--diff',
|
'-d', '--diff',
|
||||||
help="Specify CSV file to diff against.")
|
help="Specify CSV/JSON file to diff against.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-p', '--percent',
|
'-p', '--percent',
|
||||||
help="Specify CSV file to diff against, but only show precentage "
|
help="Specify CSV/JSON file to diff against, but only show "
|
||||||
"change, not a full diff.")
|
"percentage change, not a full diff.")
|
||||||
parser.add_argument(
|
|
||||||
'-a', '--all',
|
|
||||||
action='store_true',
|
|
||||||
help="Show all, not just the ones that changed.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-c', '--compare',
|
'-c', '--compare',
|
||||||
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
||||||
help="Compare results to the row matching this by pattern.")
|
help="Compare results to the row matching this by pattern.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-a', '--all',
|
||||||
|
action='store_true',
|
||||||
|
help="Show all, not just the ones that changed.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-b', '--by',
|
'-b', '--by',
|
||||||
action='append',
|
action='append',
|
||||||
@@ -1501,7 +1639,7 @@ if __name__ == "__main__":
|
|||||||
def __call__(self, parser, namespace, value, option):
|
def __call__(self, parser, namespace, value, option):
|
||||||
if namespace.sort is None:
|
if namespace.sort is None:
|
||||||
namespace.sort = []
|
namespace.sort = []
|
||||||
namespace.sort.append((value, True if option == '-S' else False))
|
namespace.sort.append((value, option in {'-S', '--reverse-sort'}))
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-s', '--sort',
|
'-s', '--sort',
|
||||||
nargs='?',
|
nargs='?',
|
||||||
@@ -1512,6 +1650,34 @@ if __name__ == "__main__":
|
|||||||
nargs='?',
|
nargs='?',
|
||||||
action=AppendSort,
|
action=AppendSort,
|
||||||
help="Sort by this field, but backwards.")
|
help="Sort by this field, but backwards.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-z', '--depth',
|
||||||
|
nargs='?',
|
||||||
|
type=lambda x: int(x, 0),
|
||||||
|
const=0,
|
||||||
|
help="Depth of function calls to show. 0 shows all calls unless "
|
||||||
|
"we find a cycle. Defaults to 0.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-g', '--propagate',
|
||||||
|
type=lambda x: int(x, 0),
|
||||||
|
help="Depth to propagate samples up the call-stack. 0 propagates "
|
||||||
|
"up to the entry point, 1 does no propagation. Defaults to 0.")
|
||||||
|
class AppendHot(argparse.Action):
|
||||||
|
def __call__(self, parser, namespace, value, option):
|
||||||
|
if namespace.hot is None:
|
||||||
|
namespace.hot = []
|
||||||
|
namespace.hot.append((value, option in {'-R', '--reverse-hot'}))
|
||||||
|
parser.add_argument(
|
||||||
|
'-r', '--hot',
|
||||||
|
nargs='?',
|
||||||
|
action=AppendHot,
|
||||||
|
help="Show only the hot path for each function call. Can "
|
||||||
|
"optionally provide fields like sort.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-R', '--reverse-hot',
|
||||||
|
nargs='?',
|
||||||
|
action=AppendHot,
|
||||||
|
help="Like -r/--hot, but backwards.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--no-header',
|
'--no-header',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
@@ -1542,23 +1708,6 @@ if __name__ == "__main__":
|
|||||||
'--everything',
|
'--everything',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="Include builtin and libc specific symbols.")
|
help="Include builtin and libc specific symbols.")
|
||||||
parser.add_argument(
|
|
||||||
'-g', '--propagate',
|
|
||||||
type=lambda x: int(x, 0),
|
|
||||||
help="Depth to propagate samples up the call-stack. 0 propagates "
|
|
||||||
"up to the entry point, 1 does no propagation. Defaults to 0.")
|
|
||||||
parser.add_argument(
|
|
||||||
'-z', '--depth',
|
|
||||||
nargs='?',
|
|
||||||
type=lambda x: int(x, 0),
|
|
||||||
const=0,
|
|
||||||
help="Depth of function calls to show. 0 shows all calls unless "
|
|
||||||
"we find a cycle. Defaults to 0.")
|
|
||||||
parser.add_argument(
|
|
||||||
'-t', '--hot',
|
|
||||||
nargs='?',
|
|
||||||
action='append',
|
|
||||||
help="Show only the hot path for each function call.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-A', '--annotate',
|
'-A', '--annotate',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
|
|||||||
683
scripts/stack.py
683
scripts/stack.py
@@ -16,8 +16,9 @@ if __name__ == "__main__":
|
|||||||
|
|
||||||
import collections as co
|
import collections as co
|
||||||
import csv
|
import csv
|
||||||
import itertools as it
|
|
||||||
import functools as ft
|
import functools as ft
|
||||||
|
import io
|
||||||
|
import itertools as it
|
||||||
import math as mt
|
import math as mt
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
@@ -130,28 +131,29 @@ class RInt(co.namedtuple('RInt', 'x')):
|
|||||||
def __mod__(self, other):
|
def __mod__(self, other):
|
||||||
return self.__class__(self.x % other.x)
|
return self.__class__(self.x % other.x)
|
||||||
|
|
||||||
# size results
|
# stack size results
|
||||||
class StackResult(co.namedtuple('StackResult', [
|
class StackResult(co.namedtuple('StackResult', [
|
||||||
'file', 'function',
|
'i', 'file', 'function',
|
||||||
'frame', 'limit',
|
'frame', 'limit',
|
||||||
'children', 'notes'])):
|
'children', 'notes'])):
|
||||||
_by = ['file', 'function']
|
_by = ['i', 'file', 'function']
|
||||||
_fields = ['frame', 'limit']
|
_fields = ['frame', 'limit']
|
||||||
_sort = ['limit', 'frame']
|
_sort = ['limit', 'frame']
|
||||||
_types = {'frame': RInt, 'limit': RInt}
|
_types = {'frame': RInt, 'limit': RInt}
|
||||||
|
_i = 'i'
|
||||||
_children = 'children'
|
_children = 'children'
|
||||||
_notes = 'notes'
|
_notes = 'notes'
|
||||||
|
|
||||||
__slots__ = ()
|
__slots__ = ()
|
||||||
def __new__(cls, file='', function='', frame=0, limit=0,
|
def __new__(cls, i=None, file='', function='', frame=0, limit=0,
|
||||||
children=None, notes=None):
|
children=None, notes=None):
|
||||||
return super().__new__(cls, file, function,
|
return super().__new__(cls, i, file, function,
|
||||||
RInt(frame), RInt(limit),
|
RInt(frame), RInt(limit),
|
||||||
children if children is not None else [],
|
children if children is not None else [],
|
||||||
notes if notes is not None else set())
|
notes if notes is not None else set())
|
||||||
|
|
||||||
def __add__(self, other):
|
def __add__(self, other):
|
||||||
return StackResult(self.file, self.function,
|
return StackResult(self.i, self.file, self.function,
|
||||||
self.frame + other.frame,
|
self.frame + other.frame,
|
||||||
max(self.limit, other.limit),
|
max(self.limit, other.limit),
|
||||||
self.children + other.children,
|
self.children + other.children,
|
||||||
@@ -319,7 +321,7 @@ def collect_callgraph(ci_path,
|
|||||||
|
|
||||||
return cg_
|
return cg_
|
||||||
|
|
||||||
def collect(ci_paths, *,
|
def collect_stack(ci_paths, *,
|
||||||
everything=False,
|
everything=False,
|
||||||
depth=1,
|
depth=1,
|
||||||
**args):
|
**args):
|
||||||
@@ -415,7 +417,7 @@ def collect(ci_paths, *,
|
|||||||
limit_ = limitof(node_, seen | {node.name})
|
limit_ = limitof(node_, seen | {node.name})
|
||||||
children_, notes_, dirty_ = childrenof(
|
children_, notes_, dirty_ = childrenof(
|
||||||
node_, depth-1, seen | {node.name})
|
node_, depth-1, seen | {node.name})
|
||||||
children.append(StackResult(file_, name_, frame_, limit_,
|
children.append(StackResult(None, file_, name_, frame_, limit_,
|
||||||
children=children_,
|
children=children_,
|
||||||
notes=notes_))
|
notes=notes_))
|
||||||
dirty = dirty or dirty_
|
dirty = dirty or dirty_
|
||||||
@@ -438,14 +440,38 @@ def collect(ci_paths, *,
|
|||||||
frame = frameof(node)
|
frame = frameof(node)
|
||||||
limit = limitof(node)
|
limit = limitof(node)
|
||||||
children, notes, _ = childrenof(node, depth-1)
|
children, notes, _ = childrenof(node, depth-1)
|
||||||
results.append(StackResult(file, name, frame, limit,
|
results.append(StackResult(None, file, name, frame, limit,
|
||||||
children=children,
|
children=children,
|
||||||
notes=notes))
|
notes=notes))
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def fold(Result, results, by=None, defines=[]):
|
# common folding/tabling/read/write code
|
||||||
|
|
||||||
|
class Rev(co.namedtuple('Rev', 'x')):
|
||||||
|
__slots__ = ()
|
||||||
|
# yes we need all of these because we're a namedtuple
|
||||||
|
def __lt__(self, other):
|
||||||
|
return self.x > other.x
|
||||||
|
def __gt__(self, other):
|
||||||
|
return self.x < other.x
|
||||||
|
def __le__(self, other):
|
||||||
|
return self.x >= other.x
|
||||||
|
def __ge__(self, other):
|
||||||
|
return self.x <= other.x
|
||||||
|
|
||||||
|
def fold(Result, results, *,
|
||||||
|
by=None,
|
||||||
|
defines=[],
|
||||||
|
sort=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
# stop when depth hits zero
|
||||||
|
if depth == 0:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# organize by by
|
||||||
if by is None:
|
if by is None:
|
||||||
by = Result._by
|
by = Result._by
|
||||||
|
|
||||||
@@ -459,7 +485,7 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
if defines:
|
if defines:
|
||||||
results_ = []
|
results_ = []
|
||||||
for r in results:
|
for r in results:
|
||||||
if all(getattr(r, k) in vs for k, vs in defines):
|
if all(str(getattr(r, k)) in vs for k, vs in defines):
|
||||||
results_.append(r)
|
results_.append(r)
|
||||||
results = results_
|
results = results_
|
||||||
|
|
||||||
@@ -476,8 +502,80 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
for name, rs in folding.items():
|
for name, rs in folding.items():
|
||||||
folded.append(sum(rs[1:], start=rs[0]))
|
folded.append(sum(rs[1:], start=rs[0]))
|
||||||
|
|
||||||
|
# sort, note that python's sort is stable
|
||||||
|
folded.sort(key=lambda r: (
|
||||||
|
# sort by explicit sort fields
|
||||||
|
tuple((Rev
|
||||||
|
if reverse ^ (not k or k in Result._fields)
|
||||||
|
else lambda x: x)(
|
||||||
|
tuple((getattr(r, k_),)
|
||||||
|
if getattr(r, k_) is not None
|
||||||
|
else ()
|
||||||
|
for k_ in ([k] if k else Result._sort)))
|
||||||
|
for k, reverse in (sort or [])),
|
||||||
|
# sort by result
|
||||||
|
r))
|
||||||
|
|
||||||
|
# recurse if we have recursive results
|
||||||
|
if hasattr(Result, '_children'):
|
||||||
|
folded = [r._replace(**{
|
||||||
|
Result._children: fold(
|
||||||
|
Result, getattr(r, Result._children),
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
sort=sort,
|
||||||
|
depth=depth-1)})
|
||||||
|
for r in folded]
|
||||||
|
|
||||||
return folded
|
return folded
|
||||||
|
|
||||||
|
def hotify(Result, results, *,
|
||||||
|
fields=None,
|
||||||
|
sort=None,
|
||||||
|
depth=1,
|
||||||
|
hot=None,
|
||||||
|
**_):
|
||||||
|
# hotify only makes sense for recursive results
|
||||||
|
assert hasattr(Result, '_i')
|
||||||
|
assert hasattr(Result, '_children')
|
||||||
|
|
||||||
|
if fields is None:
|
||||||
|
fields = Result._fields
|
||||||
|
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
hot_ = []
|
||||||
|
def recurse(results_, depth_):
|
||||||
|
nonlocal hot_
|
||||||
|
if not results_:
|
||||||
|
return
|
||||||
|
|
||||||
|
# find the hottest result
|
||||||
|
r = min(results_, key=lambda r:
|
||||||
|
tuple((Rev
|
||||||
|
if reverse ^ (not k or k in Result._fields)
|
||||||
|
else lambda x: x)(
|
||||||
|
tuple((getattr(r, k_),)
|
||||||
|
if getattr(r, k_) is not None
|
||||||
|
else ()
|
||||||
|
for k_ in ([k] if k else Result._sort)))
|
||||||
|
for k, reverse in it.chain(hot, [(None, False)])))
|
||||||
|
|
||||||
|
hot_.append(r._replace(**{
|
||||||
|
Result._i: RInt(len(hot_)),
|
||||||
|
Result._children: []}))
|
||||||
|
|
||||||
|
# recurse?
|
||||||
|
if depth_ > 1:
|
||||||
|
recurse(getattr(r, Result._children),
|
||||||
|
depth_-1)
|
||||||
|
|
||||||
|
recurse(getattr(r, Result._children), depth-1)
|
||||||
|
results_.append(r._replace(**{
|
||||||
|
Result._children: hot_}))
|
||||||
|
|
||||||
|
return results_
|
||||||
|
|
||||||
def table(Result, results, diff_results=None, *,
|
def table(Result, results, diff_results=None, *,
|
||||||
by=None,
|
by=None,
|
||||||
fields=None,
|
fields=None,
|
||||||
@@ -503,124 +601,32 @@ def table(Result, results, diff_results=None, *,
|
|||||||
fields = Result._fields
|
fields = Result._fields
|
||||||
types = Result._types
|
types = Result._types
|
||||||
|
|
||||||
# fold again
|
# fold again, otherwise results risk being hidden
|
||||||
results = fold(Result, results, by=by)
|
results = fold(Result, results,
|
||||||
|
by=by,
|
||||||
|
depth=depth)
|
||||||
if diff_results is not None:
|
if diff_results is not None:
|
||||||
diff_results = fold(Result, diff_results, by=by)
|
diff_results = fold(Result, diff_results,
|
||||||
|
by=by,
|
||||||
# reduce children to hot paths? only used by some scripts
|
depth=depth)
|
||||||
if hot:
|
|
||||||
# subclass to reintroduce __dict__
|
|
||||||
Result_ = Result
|
|
||||||
class HotResult(Result_):
|
|
||||||
_i = '_hot_i'
|
|
||||||
_children = '_hot_children'
|
|
||||||
|
|
||||||
def __new__(cls, r, i=None, children=None, notes=None):
|
|
||||||
self = HotResult._make(r)
|
|
||||||
self._hot_i = i
|
|
||||||
self._hot_children = children if children is not None else []
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __add__(self, other):
|
|
||||||
return HotResult(
|
|
||||||
Result_.__add__(self, other),
|
|
||||||
self._hot_i if other._hot_i is None
|
|
||||||
else other._hot_i if self._hot_i is None
|
|
||||||
else min(self._hot_i, other._hot_i),
|
|
||||||
self._hot_children + other._hot_children)
|
|
||||||
|
|
||||||
results_ = []
|
|
||||||
for r in results:
|
|
||||||
hot_ = []
|
|
||||||
def recurse(results_, depth_):
|
|
||||||
nonlocal hot_
|
|
||||||
if not results_:
|
|
||||||
return
|
|
||||||
|
|
||||||
# find the hottest result
|
|
||||||
r = max(results_,
|
|
||||||
key=lambda r: tuple(
|
|
||||||
tuple((getattr(r, k),)
|
|
||||||
if getattr(r, k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])
|
|
||||||
if k in fields)
|
|
||||||
for k in it.chain(hot, [None])))
|
|
||||||
hot_.append(HotResult(r, i=len(hot_)))
|
|
||||||
|
|
||||||
# recurse?
|
|
||||||
if depth_ > 1:
|
|
||||||
recurse(getattr(r, Result._children),
|
|
||||||
depth_-1)
|
|
||||||
|
|
||||||
recurse(getattr(r, Result._children), depth-1)
|
|
||||||
results_.append(HotResult(r, children=hot_))
|
|
||||||
|
|
||||||
Result = HotResult
|
|
||||||
results = results_
|
|
||||||
|
|
||||||
# organize by name
|
# organize by name
|
||||||
table = {
|
table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results}
|
for r in results}
|
||||||
diff_table = {
|
diff_table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in diff_results or []}
|
for r in diff_results or []}
|
||||||
names = [name
|
|
||||||
for name in table.keys() | diff_table.keys()
|
|
||||||
if diff_results is None
|
|
||||||
or all_
|
|
||||||
or any(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(name), k, None),
|
|
||||||
getattr(diff_table.get(name), k, None))
|
|
||||||
for k in fields)]
|
|
||||||
|
|
||||||
# find compare entry if there is one
|
# find compare entry if there is one
|
||||||
if compare:
|
if compare:
|
||||||
compare_result = table.get(','.join(str(k) for k in compare))
|
compare_r = table.get(','.join(str(k) for k in compare))
|
||||||
|
|
||||||
# sort again, now with diff info, note that python's sort is stable
|
|
||||||
names.sort()
|
|
||||||
if compare:
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: (
|
|
||||||
# move compare entry to the top, note this can be
|
|
||||||
# overridden by explicitly sorting by fields
|
|
||||||
table.get(n) == compare_result,
|
|
||||||
# sort by ratio if comparing
|
|
||||||
tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(compare_result, k, None))
|
|
||||||
for k in fields)),
|
|
||||||
reverse=True)
|
|
||||||
if diff or percent:
|
|
||||||
names.sort(
|
|
||||||
# sort by ratio if diffing
|
|
||||||
key=lambda n: tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(diff_table.get(n), k, None))
|
|
||||||
for k in fields),
|
|
||||||
reverse=True)
|
|
||||||
if sort:
|
|
||||||
for k, reverse in reversed(sort):
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: tuple(
|
|
||||||
(getattr(table[n], k),)
|
|
||||||
if getattr(table.get(n), k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])),
|
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
|
||||||
|
|
||||||
|
|
||||||
# build up our lines
|
# build up our lines
|
||||||
lines = []
|
lines = []
|
||||||
@@ -647,11 +653,16 @@ def table(Result, results, diff_results=None, *,
|
|||||||
header.append('d'+k)
|
header.append('d'+k)
|
||||||
lines.append(header)
|
lines.append(header)
|
||||||
|
|
||||||
|
# delete these to try to catch typos below, we need to rebuild
|
||||||
|
# these tables at each recursive layer
|
||||||
|
del table
|
||||||
|
del diff_table
|
||||||
|
|
||||||
# entry helper
|
# entry helper
|
||||||
def table_entry(name, r, diff_r=None):
|
def table_entry(name, r, diff_r=None):
|
||||||
entry = [name]
|
entry = [name]
|
||||||
# normal entry?
|
# normal entry?
|
||||||
if ((compare is None or r == compare_result)
|
if ((compare is None or r == compare_r)
|
||||||
and not percent
|
and not percent
|
||||||
and not diff):
|
and not diff):
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -672,7 +683,7 @@ def table(Result, results, diff_results=None, *,
|
|||||||
else ['%+.1f%%' % (100*t)])(
|
else ['%+.1f%%' % (100*t)])(
|
||||||
types[k].ratio(
|
types[k].ratio(
|
||||||
getattr(r, k, None),
|
getattr(r, k, None),
|
||||||
getattr(compare_result, k, None)))))
|
getattr(compare_r, k, None)))))
|
||||||
# percent entry?
|
# percent entry?
|
||||||
elif not diff:
|
elif not diff:
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -718,71 +729,94 @@ def table(Result, results, diff_results=None, *,
|
|||||||
|
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
# recursive entry helper, only used by some scripts
|
# recursive entry helper
|
||||||
def recurse(results_, depth_,
|
def table_recurse(results_, diff_results_,
|
||||||
|
depth_,
|
||||||
prefixes=('', '', '', '')):
|
prefixes=('', '', '', '')):
|
||||||
# build the children table at each layer
|
# build the children table at each layer
|
||||||
results_ = fold(Result, results_, by=by)
|
|
||||||
table_ = {
|
table_ = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results_}
|
for r in results_}
|
||||||
names_ = list(table_.keys())
|
diff_table_ = {
|
||||||
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
|
for r in diff_results_ or []}
|
||||||
|
names_ = [n
|
||||||
|
for n in table_.keys() | diff_table_.keys()
|
||||||
|
if diff_results_ is None
|
||||||
|
or all_
|
||||||
|
or any(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(diff_table_.get(n), k, None))
|
||||||
|
for k in fields)]
|
||||||
|
|
||||||
# sort the children layer
|
# sort again, now with diff info, note that python's sort is stable
|
||||||
names_.sort()
|
names_.sort(key=lambda n: (
|
||||||
if hasattr(Result, '_i'):
|
# sort by explicit sort fields
|
||||||
names_.sort(key=lambda n: getattr(table_[n], Result._i))
|
tuple((Rev
|
||||||
if sort:
|
if reverse ^ (not k or k in Result._fields)
|
||||||
for k, reverse in reversed(sort):
|
else lambda x: x)(
|
||||||
names_.sort(
|
tuple((getattr(table_[n], k_),)
|
||||||
key=lambda n: tuple(
|
if getattr(table_.get(n), k_, None) is not None
|
||||||
(getattr(table_[n], k),)
|
else ()
|
||||||
if getattr(table_.get(n), k, None)
|
for k_ in ([k] if k else Result._sort)))
|
||||||
is not None
|
for k, reverse in (sort or [])),
|
||||||
else ()
|
# sort by ratio if diffing
|
||||||
for k in (
|
Rev(tuple(types[k].ratio(
|
||||||
[k] if k else [
|
getattr(table_.get(n), k, None),
|
||||||
k for k in Result._sort
|
getattr(diff_table_.get(n), k, None))
|
||||||
if k in fields])),
|
for k in fields))
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
if diff or percent
|
||||||
|
else (),
|
||||||
|
# move compare entry to the top, note this can be
|
||||||
|
# overridden by explicitly sorting by fields
|
||||||
|
(table_.get(n) != compare_r,
|
||||||
|
# sort by ratio if comparing
|
||||||
|
Rev(tuple(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(compare_r, k, None))
|
||||||
|
for k in fields)))
|
||||||
|
if compare
|
||||||
|
else (),
|
||||||
|
# sort by result
|
||||||
|
(table_[n],) if n in table_ else (),
|
||||||
|
# and finally by name (diffs may be missing results)
|
||||||
|
n))
|
||||||
|
|
||||||
for i, name in enumerate(names_):
|
for i, n in enumerate(names_):
|
||||||
r = table_[name]
|
# find comparable results
|
||||||
is_last = (i == len(names_)-1)
|
r = table_.get(n)
|
||||||
|
diff_r = diff_table_.get(n)
|
||||||
|
|
||||||
|
# build line
|
||||||
|
line = table_entry(n, r, diff_r)
|
||||||
|
|
||||||
line = table_entry(name, r)
|
|
||||||
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
|
||||||
# add prefixes
|
# add prefixes
|
||||||
line[0] = (prefixes[0+is_last] + line[0][0], line[0][1])
|
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
|
line[0] = (prefixes[0+(i==len(names_)-1)] + line[0][0], line[0][1])
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
|
|
||||||
# recurse?
|
# recurse?
|
||||||
if depth_ > 1:
|
if n in table_ and depth_ > 1:
|
||||||
recurse(getattr(r, Result._children),
|
table_recurse(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
getattr(diff_r, Result._children, None) or [],
|
||||||
depth_-1,
|
depth_-1,
|
||||||
(prefixes[2+is_last] + "|-> ",
|
(prefixes[2+(i==len(names_)-1)] + "|-> ",
|
||||||
prefixes[2+is_last] + "'-> ",
|
prefixes[2+(i==len(names_)-1)] + "'-> ",
|
||||||
prefixes[2+is_last] + "| ",
|
prefixes[2+(i==len(names_)-1)] + "| ",
|
||||||
prefixes[2+is_last] + " "))
|
prefixes[2+(i==len(names_)-1)] + " "))
|
||||||
|
|
||||||
# entries
|
# build entries
|
||||||
if not summary:
|
if not summary:
|
||||||
for name in names:
|
table_recurse(results, diff_results, depth)
|
||||||
r = table.get(name)
|
|
||||||
if diff_results is None:
|
|
||||||
diff_r = None
|
|
||||||
else:
|
|
||||||
diff_r = diff_table.get(name)
|
|
||||||
lines.append(table_entry(name, r, diff_r))
|
|
||||||
|
|
||||||
# recursive entries
|
|
||||||
if name in table and depth > 1:
|
|
||||||
recurse(getattr(table[name], Result._children),
|
|
||||||
depth-1,
|
|
||||||
("|-> ",
|
|
||||||
"'-> ",
|
|
||||||
"| ",
|
|
||||||
" "))
|
|
||||||
|
|
||||||
# total
|
# total
|
||||||
if not no_total and not (small_table and not summary):
|
if not no_total and not (small_table and not summary):
|
||||||
@@ -794,9 +828,8 @@ def table(Result, results, diff_results=None, *,
|
|||||||
lines.append(table_entry('TOTAL', r, diff_r))
|
lines.append(table_entry('TOTAL', r, diff_r))
|
||||||
|
|
||||||
# homogenize
|
# homogenize
|
||||||
lines = [
|
lines = [[x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
[x if isinstance(x, tuple) else (x, []) for x in line]
|
for line in lines]
|
||||||
for line in lines]
|
|
||||||
|
|
||||||
# find the best widths, note that column 0 contains the names and is
|
# find the best widths, note that column 0 contains the names and is
|
||||||
# handled a bit differently
|
# handled a bit differently
|
||||||
@@ -817,18 +850,144 @@ def table(Result, results, diff_results=None, *,
|
|||||||
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
||||||
for i, x in enumerate(line[1:], 1))))
|
for i, x in enumerate(line[1:], 1))))
|
||||||
|
|
||||||
|
def read_csv(path, Result, *,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'r') as f:
|
||||||
|
# csv or json? assume json starts with [
|
||||||
|
json = (f.buffer.peek(1)[:1] == b'[')
|
||||||
|
|
||||||
|
# read csv?
|
||||||
|
if not json:
|
||||||
|
results = []
|
||||||
|
reader = csv.DictReader(f, restval='')
|
||||||
|
for r in reader:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k].strip()}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k].strip()})))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results
|
||||||
|
|
||||||
|
# read json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
def unjsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| ({Result._children: unjsonify(
|
||||||
|
r[Result._children],
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and Result._children in r
|
||||||
|
and r[Result._children] is not None
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: set(r[Result._notes])}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and Result._notes in r
|
||||||
|
and r[Result._notes] is not None
|
||||||
|
else {}))))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results_
|
||||||
|
return unjsonify(json.load(f), depth)
|
||||||
|
|
||||||
|
def write_csv(path, Result, results, *,
|
||||||
|
json=False,
|
||||||
|
by=None,
|
||||||
|
fields=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'w') as f:
|
||||||
|
# write csv?
|
||||||
|
if not json:
|
||||||
|
writer = csv.DictWriter(f,
|
||||||
|
(by if by is not None else Result._by)
|
||||||
|
+ [k for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)])
|
||||||
|
writer.writeheader()
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
writer.writerow(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None})
|
||||||
|
|
||||||
|
# write json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
# the neat thing about json is we can include recursive results
|
||||||
|
def jsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| ({Result._children: jsonify(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and getattr(r, Result._children)
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: list(
|
||||||
|
getattr(r, Result._notes))}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and getattr(r, Result._notes)
|
||||||
|
else {}))
|
||||||
|
return results_
|
||||||
|
json.dump(jsonify(results, depth), f,
|
||||||
|
separators=(',', ':'))
|
||||||
|
|
||||||
|
|
||||||
def main(ci_paths,
|
def main(ci_paths,
|
||||||
by=None,
|
by=None,
|
||||||
fields=None,
|
fields=None,
|
||||||
defines=[],
|
defines=[],
|
||||||
sort=None,
|
sort=None,
|
||||||
|
depth=None,
|
||||||
|
hot=None,
|
||||||
**args):
|
**args):
|
||||||
# figure out depth
|
# figure out depth
|
||||||
if args.get('depth') is None:
|
if depth is None:
|
||||||
args['depth'] = mt.inf if args.get('hot') else 1
|
depth = mt.inf if hot else 1
|
||||||
elif args.get('depth') == 0:
|
elif depth == 0:
|
||||||
args['depth'] = mt.inf
|
depth = mt.inf
|
||||||
|
|
||||||
# find sizes
|
# find sizes
|
||||||
if not args.get('use', None):
|
if not args.get('use', None):
|
||||||
@@ -839,87 +998,61 @@ def main(ci_paths,
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# collect info
|
# collect info
|
||||||
results = collect(ci_paths, **args)
|
results = collect_stack(ci_paths,
|
||||||
|
depth=depth,
|
||||||
|
**args)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
results = []
|
results = read_csv(args['use'], StackResult,
|
||||||
with openio(args['use']) as f:
|
depth=depth,
|
||||||
reader = csv.DictReader(f, restval='')
|
**args)
|
||||||
for r in reader:
|
|
||||||
# filter by matching defines
|
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in StackResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
results.append(StackResult(
|
|
||||||
**{k: r[k] for k in StackResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k] for k in StackResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
results = fold(StackResult, results, by=by, defines=defines)
|
results = fold(StackResult, results,
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
depth=depth)
|
||||||
|
|
||||||
# sort, note that python's sort is stable
|
# hotify?
|
||||||
results.sort()
|
if hot:
|
||||||
if sort:
|
results = hotify(StackResult, results,
|
||||||
for k, reverse in reversed(sort):
|
fields=fields,
|
||||||
results.sort(
|
depth=depth,
|
||||||
key=lambda r: tuple(
|
hot=hot,
|
||||||
(getattr(r, k),) if getattr(r, k) is not None else ()
|
**args)
|
||||||
for k in ([k] if k else StackResult._sort)),
|
|
||||||
reverse=reverse ^ (not k or k in StackResult._fields))
|
|
||||||
|
|
||||||
# write results to CSV
|
# write results to CSV/JSON
|
||||||
if args.get('output'):
|
if args.get('output'):
|
||||||
with openio(args['output'], 'w') as f:
|
write_csv(args['output'], StackResult, results,
|
||||||
writer = csv.DictWriter(f,
|
by=by,
|
||||||
(by if by is not None else StackResult._by)
|
fields=fields,
|
||||||
+ [k for k in (
|
depth=depth,
|
||||||
fields if fields is not None
|
**args)
|
||||||
else StackResult._fields)])
|
if args.get('output_json'):
|
||||||
writer.writeheader()
|
write_csv(args['output_json'], StackResult, results,
|
||||||
for r in results:
|
json=True,
|
||||||
writer.writerow(
|
by=by,
|
||||||
{k: getattr(r, k) for k in (
|
fields=fields,
|
||||||
by if by is not None else StackResult._by)}
|
depth=depth,
|
||||||
| {k: getattr(r, k) for k in (
|
**args)
|
||||||
fields if fields is not None
|
|
||||||
else StackResult._fields)})
|
|
||||||
|
|
||||||
# find previous results?
|
# find previous results?
|
||||||
diff_results = None
|
diff_results = None
|
||||||
if args.get('diff') or args.get('percent'):
|
if args.get('diff') or args.get('percent'):
|
||||||
diff_results = []
|
|
||||||
try:
|
try:
|
||||||
with openio(args.get('diff') or args.get('percent')) as f:
|
diff_results = read_csv(
|
||||||
reader = csv.DictReader(f, restval='')
|
args.get('diff') or args.get('percent'),
|
||||||
for r in reader:
|
StackResult,
|
||||||
# filter by matching defines
|
depth=depth,
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
**args)
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in StackResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
diff_results.append(StackResult(
|
|
||||||
**{k: r[k] for k in StackResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k] for k in StackResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
raise
|
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
diff_results = []
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
diff_results = fold(StackResult, diff_results, by=by, defines=defines)
|
diff_results = fold(StackResult, diff_results,
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
depth=depth)
|
||||||
|
|
||||||
# print table
|
# print table
|
||||||
if not args.get('quiet'):
|
if not args.get('quiet'):
|
||||||
@@ -927,6 +1060,7 @@ def main(ci_paths,
|
|||||||
by=by if by is not None else ['function'],
|
by=by if by is not None else ['function'],
|
||||||
fields=fields,
|
fields=fields,
|
||||||
sort=sort,
|
sort=sort,
|
||||||
|
depth=depth,
|
||||||
**args)
|
**args)
|
||||||
|
|
||||||
# error on recursion
|
# error on recursion
|
||||||
@@ -956,24 +1090,28 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o', '--output',
|
'-o', '--output',
|
||||||
help="Specify CSV file to store results.")
|
help="Specify CSV file to store results.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-O', '--output-json',
|
||||||
|
help="Specify JSON file to store results. This may contain "
|
||||||
|
"recursive info.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-u', '--use',
|
'-u', '--use',
|
||||||
help="Don't parse anything, use this CSV file.")
|
help="Don't parse anything, use this CSV/JSON file.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-d', '--diff',
|
'-d', '--diff',
|
||||||
help="Specify CSV file to diff against.")
|
help="Specify CSV/JSON file to diff against.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-p', '--percent',
|
'-p', '--percent',
|
||||||
help="Specify CSV file to diff against, but only show precentage "
|
help="Specify CSV/JSON file to diff against, but only show "
|
||||||
"change, not a full diff.")
|
"percentage change, not a full diff.")
|
||||||
parser.add_argument(
|
|
||||||
'-a', '--all',
|
|
||||||
action='store_true',
|
|
||||||
help="Show all, not just the ones that changed.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-c', '--compare',
|
'-c', '--compare',
|
||||||
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
||||||
help="Compare results to the row matching this by pattern.")
|
help="Compare results to the row matching this by pattern.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-a', '--all',
|
||||||
|
action='store_true',
|
||||||
|
help="Show all, not just the ones that changed.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-b', '--by',
|
'-b', '--by',
|
||||||
action='append',
|
action='append',
|
||||||
@@ -999,7 +1137,7 @@ if __name__ == "__main__":
|
|||||||
def __call__(self, parser, namespace, value, option):
|
def __call__(self, parser, namespace, value, option):
|
||||||
if namespace.sort is None:
|
if namespace.sort is None:
|
||||||
namespace.sort = []
|
namespace.sort = []
|
||||||
namespace.sort.append((value, True if option == '-S' else False))
|
namespace.sort.append((value, option in {'-S', '--reverse-sort'}))
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-s', '--sort',
|
'-s', '--sort',
|
||||||
nargs='?',
|
nargs='?',
|
||||||
@@ -1010,6 +1148,29 @@ if __name__ == "__main__":
|
|||||||
nargs='?',
|
nargs='?',
|
||||||
action=AppendSort,
|
action=AppendSort,
|
||||||
help="Sort by this field, but backwards.")
|
help="Sort by this field, but backwards.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-z', '--depth',
|
||||||
|
nargs='?',
|
||||||
|
type=lambda x: int(x, 0),
|
||||||
|
const=0,
|
||||||
|
help="Depth of function calls to show. 0 shows all calls unless "
|
||||||
|
"we find a cycle. Defaults to 0.")
|
||||||
|
class AppendHot(argparse.Action):
|
||||||
|
def __call__(self, parser, namespace, value, option):
|
||||||
|
if namespace.hot is None:
|
||||||
|
namespace.hot = []
|
||||||
|
namespace.hot.append((value, option in {'-R', '--reverse-hot'}))
|
||||||
|
parser.add_argument(
|
||||||
|
'-r', '--hot',
|
||||||
|
nargs='?',
|
||||||
|
action=AppendHot,
|
||||||
|
help="Show only the hot path for each function call. Can "
|
||||||
|
"optionally provide fields like sort.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-R', '--reverse-hot',
|
||||||
|
nargs='?',
|
||||||
|
action=AppendHot,
|
||||||
|
help="Like -r/--hot, but backwards.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--no-header',
|
'--no-header',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
@@ -1034,18 +1195,6 @@ if __name__ == "__main__":
|
|||||||
'--everything',
|
'--everything',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="Include builtin and libc specific symbols.")
|
help="Include builtin and libc specific symbols.")
|
||||||
parser.add_argument(
|
|
||||||
'-z', '--depth',
|
|
||||||
nargs='?',
|
|
||||||
type=lambda x: int(x, 0),
|
|
||||||
const=0,
|
|
||||||
help="Depth of function calls to show. 0 shows all calls unless "
|
|
||||||
"we find a cycle. Defaults to 0.")
|
|
||||||
parser.add_argument(
|
|
||||||
'-t', '--hot',
|
|
||||||
nargs='?',
|
|
||||||
action='append',
|
|
||||||
help="Show only the hot path for each function call.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-e', '--error-on-recursion',
|
'-e', '--error-on-recursion',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
|
|||||||
@@ -16,6 +16,7 @@ if __name__ == "__main__":
|
|||||||
import collections as co
|
import collections as co
|
||||||
import csv
|
import csv
|
||||||
import itertools as it
|
import itertools as it
|
||||||
|
import io
|
||||||
import functools as ft
|
import functools as ft
|
||||||
import math as mt
|
import math as mt
|
||||||
import os
|
import os
|
||||||
@@ -132,10 +133,10 @@ class RInt(co.namedtuple('RInt', 'x')):
|
|||||||
|
|
||||||
# struct size results
|
# struct size results
|
||||||
class StructResult(co.namedtuple('StructResult', [
|
class StructResult(co.namedtuple('StructResult', [
|
||||||
'file', 'struct',
|
'i', 'file', 'struct',
|
||||||
'size', 'align',
|
'size', 'align',
|
||||||
'i', 'children'])):
|
'children'])):
|
||||||
_by = ['file', 'struct']
|
_by = ['i', 'file', 'struct']
|
||||||
_fields = ['size', 'align']
|
_fields = ['size', 'align']
|
||||||
_sort = ['size', 'align']
|
_sort = ['size', 'align']
|
||||||
_types = {'size': RInt, 'align': RInt}
|
_types = {'size': RInt, 'align': RInt}
|
||||||
@@ -143,20 +144,16 @@ class StructResult(co.namedtuple('StructResult', [
|
|||||||
_children = 'children'
|
_children = 'children'
|
||||||
|
|
||||||
__slots__ = ()
|
__slots__ = ()
|
||||||
def __new__(cls, file='', struct='', size=0, align=0,
|
def __new__(cls, i=None, file='', struct='', size=0, align=0,
|
||||||
i=None, children=None):
|
children=None):
|
||||||
return super().__new__(cls, file, struct,
|
return super().__new__(cls, i, file, struct,
|
||||||
RInt(size), RInt(align),
|
RInt(size), RInt(align),
|
||||||
i,
|
|
||||||
children if children is not None else [])
|
children if children is not None else [])
|
||||||
|
|
||||||
def __add__(self, other):
|
def __add__(self, other):
|
||||||
return StructResult(self.file, self.struct,
|
return StructResult(self.i, self.file, self.struct,
|
||||||
self.size + other.size,
|
self.size + other.size,
|
||||||
max(self.align, other.align),
|
max(self.align, other.align),
|
||||||
self.i if other.i is None
|
|
||||||
else other.i if self.i is None
|
|
||||||
else min(self.i, other.i),
|
|
||||||
self.children + other.children)
|
self.children + other.children)
|
||||||
|
|
||||||
|
|
||||||
@@ -317,8 +314,9 @@ def collect_dwarf_info(obj_path, tags=None, *,
|
|||||||
|
|
||||||
return DwarfInfo(info)
|
return DwarfInfo(info)
|
||||||
|
|
||||||
def collect(obj_paths, *,
|
def collect_structs(obj_paths, *,
|
||||||
everything=False,
|
everything=False,
|
||||||
|
depth=1,
|
||||||
**args):
|
**args):
|
||||||
results = []
|
results = []
|
||||||
for obj_path in obj_paths:
|
for obj_path in obj_paths:
|
||||||
@@ -420,7 +418,10 @@ def collect(obj_paths, *,
|
|||||||
return align
|
return align
|
||||||
|
|
||||||
# recursive+cached children finder
|
# recursive+cached children finder
|
||||||
def childrenof(entry):
|
def childrenof(entry, depth):
|
||||||
|
# stop here?
|
||||||
|
if depth < 1:
|
||||||
|
return []
|
||||||
# cached?
|
# cached?
|
||||||
if not hasattr(childrenof, 'cache'):
|
if not hasattr(childrenof, 'cache'):
|
||||||
childrenof.cache = {}
|
childrenof.cache = {}
|
||||||
@@ -441,10 +442,9 @@ def collect(obj_paths, *,
|
|||||||
name_ = child.name
|
name_ = child.name
|
||||||
size_ = sizeof(child)
|
size_ = sizeof(child)
|
||||||
align_ = alignof(child)
|
align_ = alignof(child)
|
||||||
children_ = childrenof(child)
|
children_ = childrenof(child, depth-1)
|
||||||
children.append(StructResult(
|
children.append(StructResult(
|
||||||
file, name_, size_, align_,
|
child.off, file, name_, size_, align_,
|
||||||
i=child.off,
|
|
||||||
children=children_))
|
children=children_))
|
||||||
# indirect type?
|
# indirect type?
|
||||||
elif entry.tag in {
|
elif entry.tag in {
|
||||||
@@ -456,7 +456,7 @@ def collect(obj_paths, *,
|
|||||||
'DW_TAG_volatile_type',
|
'DW_TAG_volatile_type',
|
||||||
'DW_TAG_restrict_type'}:
|
'DW_TAG_restrict_type'}:
|
||||||
type = int(entry['DW_AT_type'].strip('<>'), 0)
|
type = int(entry['DW_AT_type'].strip('<>'), 0)
|
||||||
children = childrenof(info[type])
|
children = childrenof(info[type], depth)
|
||||||
else:
|
else:
|
||||||
assert False, "Unknown dwarf entry? %r" % entry.tag
|
assert False, "Unknown dwarf entry? %r" % entry.tag
|
||||||
|
|
||||||
@@ -491,18 +491,18 @@ def collect(obj_paths, *,
|
|||||||
align = alignof(entry)
|
align = alignof(entry)
|
||||||
|
|
||||||
# find children, recursing if necessary
|
# find children, recursing if necessary
|
||||||
children = childrenof(entry)
|
children = childrenof(entry, depth-1)
|
||||||
|
|
||||||
# typdefs exist in a separate namespace, so we need to track
|
# typdefs exist in a separate namespace, so we need to track
|
||||||
# these separately
|
# these separately
|
||||||
if entry.tag == 'DW_TAG_typedef':
|
if entry.tag == 'DW_TAG_typedef':
|
||||||
typedefs[entry.off] = StructResult(
|
typedefs[entry.off] = StructResult(
|
||||||
file, name, size, align,
|
None, file, name, size, align,
|
||||||
children=children)
|
children=children)
|
||||||
typedefed.add(int(entry['DW_AT_type'].strip('<>'), 0))
|
typedefed.add(int(entry['DW_AT_type'].strip('<>'), 0))
|
||||||
else:
|
else:
|
||||||
types[entry.off] = StructResult(
|
types[entry.off] = StructResult(
|
||||||
file, name, size, align,
|
None, file, name, size, align,
|
||||||
children=children)
|
children=children)
|
||||||
|
|
||||||
# let typedefs take priority
|
# let typedefs take priority
|
||||||
@@ -514,7 +514,31 @@ def collect(obj_paths, *,
|
|||||||
return results
|
return results
|
||||||
|
|
||||||
|
|
||||||
def fold(Result, results, by=None, defines=[]):
|
# common folding/tabling/read/write code
|
||||||
|
|
||||||
|
class Rev(co.namedtuple('Rev', 'x')):
|
||||||
|
__slots__ = ()
|
||||||
|
# yes we need all of these because we're a namedtuple
|
||||||
|
def __lt__(self, other):
|
||||||
|
return self.x > other.x
|
||||||
|
def __gt__(self, other):
|
||||||
|
return self.x < other.x
|
||||||
|
def __le__(self, other):
|
||||||
|
return self.x >= other.x
|
||||||
|
def __ge__(self, other):
|
||||||
|
return self.x <= other.x
|
||||||
|
|
||||||
|
def fold(Result, results, *,
|
||||||
|
by=None,
|
||||||
|
defines=[],
|
||||||
|
sort=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
# stop when depth hits zero
|
||||||
|
if depth == 0:
|
||||||
|
return []
|
||||||
|
|
||||||
|
# organize by by
|
||||||
if by is None:
|
if by is None:
|
||||||
by = Result._by
|
by = Result._by
|
||||||
|
|
||||||
@@ -528,7 +552,7 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
if defines:
|
if defines:
|
||||||
results_ = []
|
results_ = []
|
||||||
for r in results:
|
for r in results:
|
||||||
if all(getattr(r, k) in vs for k, vs in defines):
|
if all(str(getattr(r, k)) in vs for k, vs in defines):
|
||||||
results_.append(r)
|
results_.append(r)
|
||||||
results = results_
|
results = results_
|
||||||
|
|
||||||
@@ -545,8 +569,80 @@ def fold(Result, results, by=None, defines=[]):
|
|||||||
for name, rs in folding.items():
|
for name, rs in folding.items():
|
||||||
folded.append(sum(rs[1:], start=rs[0]))
|
folded.append(sum(rs[1:], start=rs[0]))
|
||||||
|
|
||||||
|
# sort, note that python's sort is stable
|
||||||
|
folded.sort(key=lambda r: (
|
||||||
|
# sort by explicit sort fields
|
||||||
|
tuple((Rev
|
||||||
|
if reverse ^ (not k or k in Result._fields)
|
||||||
|
else lambda x: x)(
|
||||||
|
tuple((getattr(r, k_),)
|
||||||
|
if getattr(r, k_) is not None
|
||||||
|
else ()
|
||||||
|
for k_ in ([k] if k else Result._sort)))
|
||||||
|
for k, reverse in (sort or [])),
|
||||||
|
# sort by result
|
||||||
|
r))
|
||||||
|
|
||||||
|
# recurse if we have recursive results
|
||||||
|
if hasattr(Result, '_children'):
|
||||||
|
folded = [r._replace(**{
|
||||||
|
Result._children: fold(
|
||||||
|
Result, getattr(r, Result._children),
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
sort=sort,
|
||||||
|
depth=depth-1)})
|
||||||
|
for r in folded]
|
||||||
|
|
||||||
return folded
|
return folded
|
||||||
|
|
||||||
|
def hotify(Result, results, *,
|
||||||
|
fields=None,
|
||||||
|
sort=None,
|
||||||
|
depth=1,
|
||||||
|
hot=None,
|
||||||
|
**_):
|
||||||
|
# hotify only makes sense for recursive results
|
||||||
|
assert hasattr(Result, '_i')
|
||||||
|
assert hasattr(Result, '_children')
|
||||||
|
|
||||||
|
if fields is None:
|
||||||
|
fields = Result._fields
|
||||||
|
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
hot_ = []
|
||||||
|
def recurse(results_, depth_):
|
||||||
|
nonlocal hot_
|
||||||
|
if not results_:
|
||||||
|
return
|
||||||
|
|
||||||
|
# find the hottest result
|
||||||
|
r = min(results_, key=lambda r:
|
||||||
|
tuple((Rev
|
||||||
|
if reverse ^ (not k or k in Result._fields)
|
||||||
|
else lambda x: x)(
|
||||||
|
tuple((getattr(r, k_),)
|
||||||
|
if getattr(r, k_) is not None
|
||||||
|
else ()
|
||||||
|
for k_ in ([k] if k else Result._sort)))
|
||||||
|
for k, reverse in it.chain(hot, [(None, False)])))
|
||||||
|
|
||||||
|
hot_.append(r._replace(**{
|
||||||
|
Result._i: RInt(len(hot_)),
|
||||||
|
Result._children: []}))
|
||||||
|
|
||||||
|
# recurse?
|
||||||
|
if depth_ > 1:
|
||||||
|
recurse(getattr(r, Result._children),
|
||||||
|
depth_-1)
|
||||||
|
|
||||||
|
recurse(getattr(r, Result._children), depth-1)
|
||||||
|
results_.append(r._replace(**{
|
||||||
|
Result._children: hot_}))
|
||||||
|
|
||||||
|
return results_
|
||||||
|
|
||||||
def table(Result, results, diff_results=None, *,
|
def table(Result, results, diff_results=None, *,
|
||||||
by=None,
|
by=None,
|
||||||
fields=None,
|
fields=None,
|
||||||
@@ -572,124 +668,32 @@ def table(Result, results, diff_results=None, *,
|
|||||||
fields = Result._fields
|
fields = Result._fields
|
||||||
types = Result._types
|
types = Result._types
|
||||||
|
|
||||||
# fold again
|
# fold again, otherwise results risk being hidden
|
||||||
results = fold(Result, results, by=by)
|
results = fold(Result, results,
|
||||||
|
by=by,
|
||||||
|
depth=depth)
|
||||||
if diff_results is not None:
|
if diff_results is not None:
|
||||||
diff_results = fold(Result, diff_results, by=by)
|
diff_results = fold(Result, diff_results,
|
||||||
|
by=by,
|
||||||
# reduce children to hot paths? only used by some scripts
|
depth=depth)
|
||||||
if hot:
|
|
||||||
# subclass to reintroduce __dict__
|
|
||||||
Result_ = Result
|
|
||||||
class HotResult(Result_):
|
|
||||||
_i = '_hot_i'
|
|
||||||
_children = '_hot_children'
|
|
||||||
|
|
||||||
def __new__(cls, r, i=None, children=None, notes=None):
|
|
||||||
self = HotResult._make(r)
|
|
||||||
self._hot_i = i
|
|
||||||
self._hot_children = children if children is not None else []
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __add__(self, other):
|
|
||||||
return HotResult(
|
|
||||||
Result_.__add__(self, other),
|
|
||||||
self._hot_i if other._hot_i is None
|
|
||||||
else other._hot_i if self._hot_i is None
|
|
||||||
else min(self._hot_i, other._hot_i),
|
|
||||||
self._hot_children + other._hot_children)
|
|
||||||
|
|
||||||
results_ = []
|
|
||||||
for r in results:
|
|
||||||
hot_ = []
|
|
||||||
def recurse(results_, depth_):
|
|
||||||
nonlocal hot_
|
|
||||||
if not results_:
|
|
||||||
return
|
|
||||||
|
|
||||||
# find the hottest result
|
|
||||||
r = max(results_,
|
|
||||||
key=lambda r: tuple(
|
|
||||||
tuple((getattr(r, k),)
|
|
||||||
if getattr(r, k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])
|
|
||||||
if k in fields)
|
|
||||||
for k in it.chain(hot, [None])))
|
|
||||||
hot_.append(HotResult(r, i=len(hot_)))
|
|
||||||
|
|
||||||
# recurse?
|
|
||||||
if depth_ > 1:
|
|
||||||
recurse(getattr(r, Result._children),
|
|
||||||
depth_-1)
|
|
||||||
|
|
||||||
recurse(getattr(r, Result._children), depth-1)
|
|
||||||
results_.append(HotResult(r, children=hot_))
|
|
||||||
|
|
||||||
Result = HotResult
|
|
||||||
results = results_
|
|
||||||
|
|
||||||
# organize by name
|
# organize by name
|
||||||
table = {
|
table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results}
|
for r in results}
|
||||||
diff_table = {
|
diff_table = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in diff_results or []}
|
for r in diff_results or []}
|
||||||
names = [name
|
|
||||||
for name in table.keys() | diff_table.keys()
|
|
||||||
if diff_results is None
|
|
||||||
or all_
|
|
||||||
or any(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(name), k, None),
|
|
||||||
getattr(diff_table.get(name), k, None))
|
|
||||||
for k in fields)]
|
|
||||||
|
|
||||||
# find compare entry if there is one
|
# find compare entry if there is one
|
||||||
if compare:
|
if compare:
|
||||||
compare_result = table.get(','.join(str(k) for k in compare))
|
compare_r = table.get(','.join(str(k) for k in compare))
|
||||||
|
|
||||||
# sort again, now with diff info, note that python's sort is stable
|
|
||||||
names.sort()
|
|
||||||
if compare:
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: (
|
|
||||||
# move compare entry to the top, note this can be
|
|
||||||
# overridden by explicitly sorting by fields
|
|
||||||
table.get(n) == compare_result,
|
|
||||||
# sort by ratio if comparing
|
|
||||||
tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(compare_result, k, None))
|
|
||||||
for k in fields)),
|
|
||||||
reverse=True)
|
|
||||||
if diff or percent:
|
|
||||||
names.sort(
|
|
||||||
# sort by ratio if diffing
|
|
||||||
key=lambda n: tuple(
|
|
||||||
types[k].ratio(
|
|
||||||
getattr(table.get(n), k, None),
|
|
||||||
getattr(diff_table.get(n), k, None))
|
|
||||||
for k in fields),
|
|
||||||
reverse=True)
|
|
||||||
if sort:
|
|
||||||
for k, reverse in reversed(sort):
|
|
||||||
names.sort(
|
|
||||||
key=lambda n: tuple(
|
|
||||||
(getattr(table[n], k),)
|
|
||||||
if getattr(table.get(n), k, None) is not None
|
|
||||||
else ()
|
|
||||||
for k in (
|
|
||||||
[k] if k else [
|
|
||||||
k for k in Result._sort
|
|
||||||
if k in fields])),
|
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
|
||||||
|
|
||||||
|
|
||||||
# build up our lines
|
# build up our lines
|
||||||
lines = []
|
lines = []
|
||||||
@@ -716,11 +720,16 @@ def table(Result, results, diff_results=None, *,
|
|||||||
header.append('d'+k)
|
header.append('d'+k)
|
||||||
lines.append(header)
|
lines.append(header)
|
||||||
|
|
||||||
|
# delete these to try to catch typos below, we need to rebuild
|
||||||
|
# these tables at each recursive layer
|
||||||
|
del table
|
||||||
|
del diff_table
|
||||||
|
|
||||||
# entry helper
|
# entry helper
|
||||||
def table_entry(name, r, diff_r=None):
|
def table_entry(name, r, diff_r=None):
|
||||||
entry = [name]
|
entry = [name]
|
||||||
# normal entry?
|
# normal entry?
|
||||||
if ((compare is None or r == compare_result)
|
if ((compare is None or r == compare_r)
|
||||||
and not percent
|
and not percent
|
||||||
and not diff):
|
and not diff):
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -741,7 +750,7 @@ def table(Result, results, diff_results=None, *,
|
|||||||
else ['%+.1f%%' % (100*t)])(
|
else ['%+.1f%%' % (100*t)])(
|
||||||
types[k].ratio(
|
types[k].ratio(
|
||||||
getattr(r, k, None),
|
getattr(r, k, None),
|
||||||
getattr(compare_result, k, None)))))
|
getattr(compare_r, k, None)))))
|
||||||
# percent entry?
|
# percent entry?
|
||||||
elif not diff:
|
elif not diff:
|
||||||
for k in fields:
|
for k in fields:
|
||||||
@@ -787,71 +796,94 @@ def table(Result, results, diff_results=None, *,
|
|||||||
|
|
||||||
return entry
|
return entry
|
||||||
|
|
||||||
# recursive entry helper, only used by some scripts
|
# recursive entry helper
|
||||||
def recurse(results_, depth_,
|
def table_recurse(results_, diff_results_,
|
||||||
|
depth_,
|
||||||
prefixes=('', '', '', '')):
|
prefixes=('', '', '', '')):
|
||||||
# build the children table at each layer
|
# build the children table at each layer
|
||||||
results_ = fold(Result, results_, by=by)
|
|
||||||
table_ = {
|
table_ = {
|
||||||
','.join(str(getattr(r, k) or '') for k in by): r
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
for r in results_}
|
for r in results_}
|
||||||
names_ = list(table_.keys())
|
diff_table_ = {
|
||||||
|
','.join(str(getattr(r, k)
|
||||||
|
if getattr(r, k) is not None
|
||||||
|
else '')
|
||||||
|
for k in by): r
|
||||||
|
for r in diff_results_ or []}
|
||||||
|
names_ = [n
|
||||||
|
for n in table_.keys() | diff_table_.keys()
|
||||||
|
if diff_results_ is None
|
||||||
|
or all_
|
||||||
|
or any(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(diff_table_.get(n), k, None))
|
||||||
|
for k in fields)]
|
||||||
|
|
||||||
# sort the children layer
|
# sort again, now with diff info, note that python's sort is stable
|
||||||
names_.sort()
|
names_.sort(key=lambda n: (
|
||||||
if hasattr(Result, '_i'):
|
# sort by explicit sort fields
|
||||||
names_.sort(key=lambda n: getattr(table_[n], Result._i))
|
tuple((Rev
|
||||||
if sort:
|
if reverse ^ (not k or k in Result._fields)
|
||||||
for k, reverse in reversed(sort):
|
else lambda x: x)(
|
||||||
names_.sort(
|
tuple((getattr(table_[n], k_),)
|
||||||
key=lambda n: tuple(
|
if getattr(table_.get(n), k_, None) is not None
|
||||||
(getattr(table_[n], k),)
|
else ()
|
||||||
if getattr(table_.get(n), k, None)
|
for k_ in ([k] if k else Result._sort)))
|
||||||
is not None
|
for k, reverse in (sort or [])),
|
||||||
else ()
|
# sort by ratio if diffing
|
||||||
for k in (
|
Rev(tuple(types[k].ratio(
|
||||||
[k] if k else [
|
getattr(table_.get(n), k, None),
|
||||||
k for k in Result._sort
|
getattr(diff_table_.get(n), k, None))
|
||||||
if k in fields])),
|
for k in fields))
|
||||||
reverse=reverse ^ (not k or k in Result._fields))
|
if diff or percent
|
||||||
|
else (),
|
||||||
|
# move compare entry to the top, note this can be
|
||||||
|
# overridden by explicitly sorting by fields
|
||||||
|
(table_.get(n) != compare_r,
|
||||||
|
# sort by ratio if comparing
|
||||||
|
Rev(tuple(
|
||||||
|
types[k].ratio(
|
||||||
|
getattr(table_.get(n), k, None),
|
||||||
|
getattr(compare_r, k, None))
|
||||||
|
for k in fields)))
|
||||||
|
if compare
|
||||||
|
else (),
|
||||||
|
# sort by result
|
||||||
|
(table_[n],) if n in table_ else (),
|
||||||
|
# and finally by name (diffs may be missing results)
|
||||||
|
n))
|
||||||
|
|
||||||
for i, name in enumerate(names_):
|
for i, n in enumerate(names_):
|
||||||
r = table_[name]
|
# find comparable results
|
||||||
is_last = (i == len(names_)-1)
|
r = table_.get(n)
|
||||||
|
diff_r = diff_table_.get(n)
|
||||||
|
|
||||||
|
# build line
|
||||||
|
line = table_entry(n, r, diff_r)
|
||||||
|
|
||||||
line = table_entry(name, r)
|
|
||||||
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
|
||||||
# add prefixes
|
# add prefixes
|
||||||
line[0] = (prefixes[0+is_last] + line[0][0], line[0][1])
|
line = [x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
|
line[0] = (prefixes[0+(i==len(names_)-1)] + line[0][0], line[0][1])
|
||||||
lines.append(line)
|
lines.append(line)
|
||||||
|
|
||||||
# recurse?
|
# recurse?
|
||||||
if depth_ > 1:
|
if n in table_ and depth_ > 1:
|
||||||
recurse(getattr(r, Result._children),
|
table_recurse(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
getattr(diff_r, Result._children, None) or [],
|
||||||
depth_-1,
|
depth_-1,
|
||||||
(prefixes[2+is_last] + "|-> ",
|
(prefixes[2+(i==len(names_)-1)] + "|-> ",
|
||||||
prefixes[2+is_last] + "'-> ",
|
prefixes[2+(i==len(names_)-1)] + "'-> ",
|
||||||
prefixes[2+is_last] + "| ",
|
prefixes[2+(i==len(names_)-1)] + "| ",
|
||||||
prefixes[2+is_last] + " "))
|
prefixes[2+(i==len(names_)-1)] + " "))
|
||||||
|
|
||||||
# entries
|
# build entries
|
||||||
if not summary:
|
if not summary:
|
||||||
for name in names:
|
table_recurse(results, diff_results, depth)
|
||||||
r = table.get(name)
|
|
||||||
if diff_results is None:
|
|
||||||
diff_r = None
|
|
||||||
else:
|
|
||||||
diff_r = diff_table.get(name)
|
|
||||||
lines.append(table_entry(name, r, diff_r))
|
|
||||||
|
|
||||||
# recursive entries
|
|
||||||
if name in table and depth > 1:
|
|
||||||
recurse(getattr(table[name], Result._children),
|
|
||||||
depth-1,
|
|
||||||
("|-> ",
|
|
||||||
"'-> ",
|
|
||||||
"| ",
|
|
||||||
" "))
|
|
||||||
|
|
||||||
# total
|
# total
|
||||||
if not no_total and not (small_table and not summary):
|
if not no_total and not (small_table and not summary):
|
||||||
@@ -863,9 +895,8 @@ def table(Result, results, diff_results=None, *,
|
|||||||
lines.append(table_entry('TOTAL', r, diff_r))
|
lines.append(table_entry('TOTAL', r, diff_r))
|
||||||
|
|
||||||
# homogenize
|
# homogenize
|
||||||
lines = [
|
lines = [[x if isinstance(x, tuple) else (x, []) for x in line]
|
||||||
[x if isinstance(x, tuple) else (x, []) for x in line]
|
for line in lines]
|
||||||
for line in lines]
|
|
||||||
|
|
||||||
# find the best widths, note that column 0 contains the names and is
|
# find the best widths, note that column 0 contains the names and is
|
||||||
# handled a bit differently
|
# handled a bit differently
|
||||||
@@ -886,18 +917,144 @@ def table(Result, results, diff_results=None, *,
|
|||||||
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
nwidths[i], ' (%s)' % ', '.join(x[1]) if x[1] else '')
|
||||||
for i, x in enumerate(line[1:], 1))))
|
for i, x in enumerate(line[1:], 1))))
|
||||||
|
|
||||||
|
def read_csv(path, Result, *,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'r') as f:
|
||||||
|
# csv or json? assume json starts with [
|
||||||
|
json = (f.buffer.peek(1)[:1] == b'[')
|
||||||
|
|
||||||
|
# read csv?
|
||||||
|
if not json:
|
||||||
|
results = []
|
||||||
|
reader = csv.DictReader(f, restval='')
|
||||||
|
for r in reader:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k].strip()}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k].strip()})))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results
|
||||||
|
|
||||||
|
# read json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
def unjsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
if not any(k in r and r[k].strip()
|
||||||
|
for k in Result._fields):
|
||||||
|
continue
|
||||||
|
try:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(Result(**(
|
||||||
|
{k: r[k] for k in Result._by
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| {k: r[k] for k in Result._fields
|
||||||
|
if k in r and r[k] is not None}
|
||||||
|
| ({Result._children: unjsonify(
|
||||||
|
r[Result._children],
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and Result._children in r
|
||||||
|
and r[Result._children] is not None
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: set(r[Result._notes])}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and Result._notes in r
|
||||||
|
and r[Result._notes] is not None
|
||||||
|
else {}))))
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
return results_
|
||||||
|
return unjsonify(json.load(f), depth)
|
||||||
|
|
||||||
|
def write_csv(path, Result, results, *,
|
||||||
|
json=False,
|
||||||
|
by=None,
|
||||||
|
fields=None,
|
||||||
|
depth=1,
|
||||||
|
**_):
|
||||||
|
with openio(path, 'w') as f:
|
||||||
|
# write csv?
|
||||||
|
if not json:
|
||||||
|
writer = csv.DictWriter(f,
|
||||||
|
(by if by is not None else Result._by)
|
||||||
|
+ [k for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)])
|
||||||
|
writer.writeheader()
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
writer.writerow(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None})
|
||||||
|
|
||||||
|
# write json?
|
||||||
|
else:
|
||||||
|
import json
|
||||||
|
# the neat thing about json is we can include recursive results
|
||||||
|
def jsonify(results, depth_):
|
||||||
|
results_ = []
|
||||||
|
for r in results:
|
||||||
|
# note this allows by/fields to overlap
|
||||||
|
results_.append(
|
||||||
|
{k: getattr(r, k)
|
||||||
|
for k in (by
|
||||||
|
if by is not None
|
||||||
|
else Result._by)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| {k: str(getattr(r, k))
|
||||||
|
for k in (fields
|
||||||
|
if fields is not None
|
||||||
|
else Result._fields)
|
||||||
|
if getattr(r, k) is not None}
|
||||||
|
| ({Result._children: jsonify(
|
||||||
|
getattr(r, Result._children),
|
||||||
|
depth_-1)}
|
||||||
|
if hasattr(Result, '_children')
|
||||||
|
and getattr(r, Result._children)
|
||||||
|
and depth_ > 1
|
||||||
|
else {})
|
||||||
|
| ({Result._notes: list(
|
||||||
|
getattr(r, Result._notes))}
|
||||||
|
if hasattr(Result, '_notes')
|
||||||
|
and getattr(r, Result._notes)
|
||||||
|
else {}))
|
||||||
|
return results_
|
||||||
|
json.dump(jsonify(results, depth), f,
|
||||||
|
separators=(',', ':'))
|
||||||
|
|
||||||
|
|
||||||
def main(obj_paths, *,
|
def main(obj_paths, *,
|
||||||
by=None,
|
by=None,
|
||||||
fields=None,
|
fields=None,
|
||||||
defines=[],
|
defines=[],
|
||||||
sort=None,
|
sort=None,
|
||||||
|
depth=None,
|
||||||
|
hot=None,
|
||||||
**args):
|
**args):
|
||||||
# figure out depth
|
# figure out depth
|
||||||
if args.get('depth') is None:
|
if depth is None:
|
||||||
args['depth'] = mt.inf if args.get('hot') else 1
|
depth = mt.inf if hot else 1
|
||||||
elif args.get('depth') == 0:
|
elif depth == 0:
|
||||||
args['depth'] = mt.inf
|
depth = mt.inf
|
||||||
|
|
||||||
# find sizes
|
# find sizes
|
||||||
if not args.get('use', None):
|
if not args.get('use', None):
|
||||||
@@ -908,89 +1065,61 @@ def main(obj_paths, *,
|
|||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# collect info
|
# collect info
|
||||||
results = collect(obj_paths, **args)
|
results = collect_structs(obj_paths,
|
||||||
|
depth=depth,
|
||||||
|
**args)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
results = []
|
results = read_csv(args['use'], StructResult,
|
||||||
with openio(args['use']) as f:
|
depth=depth,
|
||||||
reader = csv.DictReader(f, restval='')
|
**args)
|
||||||
for r in reader:
|
|
||||||
# filter by matching defines
|
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in StructResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
results.append(StructResult(
|
|
||||||
**{k: r[k] for k in StructResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k]
|
|
||||||
for k in StructResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
results = fold(StructResult, results, by=by, defines=defines)
|
results = fold(StructResult, results,
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
depth=depth)
|
||||||
|
|
||||||
# sort, note that python's sort is stable
|
# hotify?
|
||||||
results.sort()
|
if hot:
|
||||||
if sort:
|
results = hotify(StructResult, results,
|
||||||
for k, reverse in reversed(sort):
|
fields=fields,
|
||||||
results.sort(
|
depth=depth,
|
||||||
key=lambda r: tuple(
|
hot=hot,
|
||||||
(getattr(r, k),) if getattr(r, k) is not None else ()
|
**args)
|
||||||
for k in ([k] if k else StructResult._sort)),
|
|
||||||
reverse=reverse ^ (not k or k in StructResult._fields))
|
|
||||||
|
|
||||||
# write results to CSV
|
# write results to CSV/JSON
|
||||||
if args.get('output'):
|
if args.get('output'):
|
||||||
with openio(args['output'], 'w') as f:
|
write_csv(args['output'], StructResult, results,
|
||||||
writer = csv.DictWriter(f,
|
by=by,
|
||||||
(by if by is not None else StructResult._by)
|
fields=fields,
|
||||||
+ [k for k in (
|
depth=depth,
|
||||||
fields if fields is not None
|
**args)
|
||||||
else StructResult._fields)])
|
if args.get('output_json'):
|
||||||
writer.writeheader()
|
write_csv(args['output_json'], StructResult, results,
|
||||||
for r in results:
|
json=True,
|
||||||
writer.writerow(
|
by=by,
|
||||||
{k: getattr(r, k) for k in (
|
fields=fields,
|
||||||
by if by is not None else StructResult._by)}
|
depth=depth,
|
||||||
| {k: getattr(r, k) for k in (
|
**args)
|
||||||
fields if fields is not None
|
|
||||||
else StructResult._fields)})
|
|
||||||
|
|
||||||
# find previous results?
|
# find previous results?
|
||||||
diff_results = None
|
diff_results = None
|
||||||
if args.get('diff') or args.get('percent'):
|
if args.get('diff') or args.get('percent'):
|
||||||
diff_results = []
|
|
||||||
try:
|
try:
|
||||||
with openio(args.get('diff') or args.get('percent')) as f:
|
diff_results = read_csv(
|
||||||
reader = csv.DictReader(f, restval='')
|
args.get('diff') or args.get('percent'),
|
||||||
for r in reader:
|
StructResult,
|
||||||
# filter by matching defines
|
depth=depth,
|
||||||
if not all(k in r and r[k] in vs for k, vs in defines):
|
**args)
|
||||||
continue
|
|
||||||
|
|
||||||
if not any(k in r and r[k].strip()
|
|
||||||
for k in StructResult._fields):
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
diff_results.append(StructResult(
|
|
||||||
**{k: r[k] for k in StructResult._by
|
|
||||||
if k in r and r[k].strip()},
|
|
||||||
**{k: r[k]
|
|
||||||
for k in StructResult._fields
|
|
||||||
if k in r and r[k].strip()}))
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
diff_results = []
|
||||||
|
|
||||||
# fold
|
# fold
|
||||||
diff_results = fold(StructResult, diff_results, by=by, defines=defines)
|
diff_results = fold(StructResult, diff_results,
|
||||||
|
by=by,
|
||||||
|
defines=defines,
|
||||||
|
depth=depth)
|
||||||
|
|
||||||
# print table
|
# print table
|
||||||
if not args.get('quiet'):
|
if not args.get('quiet'):
|
||||||
@@ -998,6 +1127,7 @@ def main(obj_paths, *,
|
|||||||
by=by if by is not None else ['struct'],
|
by=by if by is not None else ['struct'],
|
||||||
fields=fields,
|
fields=fields,
|
||||||
sort=sort,
|
sort=sort,
|
||||||
|
depth=depth,
|
||||||
**args)
|
**args)
|
||||||
|
|
||||||
|
|
||||||
@@ -1022,24 +1152,28 @@ if __name__ == "__main__":
|
|||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-o', '--output',
|
'-o', '--output',
|
||||||
help="Specify CSV file to store results.")
|
help="Specify CSV file to store results.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-O', '--output-json',
|
||||||
|
help="Specify JSON file to store results. This may contain "
|
||||||
|
"recursive info.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-u', '--use',
|
'-u', '--use',
|
||||||
help="Don't parse anything, use this CSV file.")
|
help="Don't parse anything, use this CSV file.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-d', '--diff',
|
'-d', '--diff',
|
||||||
help="Specify CSV file to diff against.")
|
help="Specify CSV/JSON file to diff against.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-p', '--percent',
|
'-p', '--percent',
|
||||||
help="Specify CSV file to diff against, but only show precentage "
|
help="Specify CSV/JSON file to diff against, but only show "
|
||||||
"change, not a full diff.")
|
"percentage change, not a full diff.")
|
||||||
parser.add_argument(
|
|
||||||
'-a', '--all',
|
|
||||||
action='store_true',
|
|
||||||
help="Show all, not just the ones that changed.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-c', '--compare',
|
'-c', '--compare',
|
||||||
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
type=lambda x: tuple(v.strip() for v in x.split(',')),
|
||||||
help="Compare results to the row matching this by pattern.")
|
help="Compare results to the row matching this by pattern.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-a', '--all',
|
||||||
|
action='store_true',
|
||||||
|
help="Show all, not just the ones that changed.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-b', '--by',
|
'-b', '--by',
|
||||||
action='append',
|
action='append',
|
||||||
@@ -1065,7 +1199,7 @@ if __name__ == "__main__":
|
|||||||
def __call__(self, parser, namespace, value, option):
|
def __call__(self, parser, namespace, value, option):
|
||||||
if namespace.sort is None:
|
if namespace.sort is None:
|
||||||
namespace.sort = []
|
namespace.sort = []
|
||||||
namespace.sort.append((value, True if option == '-S' else False))
|
namespace.sort.append((value, option in {'-S', '--reverse-sort'}))
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'-s', '--sort',
|
'-s', '--sort',
|
||||||
nargs='?',
|
nargs='?',
|
||||||
@@ -1076,6 +1210,29 @@ if __name__ == "__main__":
|
|||||||
nargs='?',
|
nargs='?',
|
||||||
action=AppendSort,
|
action=AppendSort,
|
||||||
help="Sort by this field, but backwards.")
|
help="Sort by this field, but backwards.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-z', '--depth',
|
||||||
|
nargs='?',
|
||||||
|
type=lambda x: int(x, 0),
|
||||||
|
const=0,
|
||||||
|
help="Depth of function calls to show. 0 shows all calls unless "
|
||||||
|
"we find a cycle. Defaults to 0.")
|
||||||
|
class AppendHot(argparse.Action):
|
||||||
|
def __call__(self, parser, namespace, value, option):
|
||||||
|
if namespace.hot is None:
|
||||||
|
namespace.hot = []
|
||||||
|
namespace.hot.append((value, option in {'-R', '--reverse-hot'}))
|
||||||
|
parser.add_argument(
|
||||||
|
'-r', '--hot',
|
||||||
|
nargs='?',
|
||||||
|
action=AppendHot,
|
||||||
|
help="Show only the hot path for each function call. Can "
|
||||||
|
"optionally provide fields like sort.")
|
||||||
|
parser.add_argument(
|
||||||
|
'-R', '--reverse-hot',
|
||||||
|
nargs='?',
|
||||||
|
action=AppendHot,
|
||||||
|
help="Like -r/--hot, but backwards.")
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--no-header',
|
'--no-header',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
@@ -1100,18 +1257,6 @@ if __name__ == "__main__":
|
|||||||
'--everything',
|
'--everything',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help="Include builtin and libc specific symbols.")
|
help="Include builtin and libc specific symbols.")
|
||||||
parser.add_argument(
|
|
||||||
'-z', '--depth',
|
|
||||||
nargs='?',
|
|
||||||
type=lambda x: int(x, 0),
|
|
||||||
const=0,
|
|
||||||
help="Depth of function calls to show. 0 shows all calls unless "
|
|
||||||
"we find a cycle. Defaults to 0.")
|
|
||||||
parser.add_argument(
|
|
||||||
'-t', '--hot',
|
|
||||||
nargs='?',
|
|
||||||
action='append',
|
|
||||||
help="Show only the hot path for each function call.")
|
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--objdump-path',
|
'--objdump-path',
|
||||||
type=lambda x: x.split(),
|
type=lambda x: x.split(),
|
||||||
|
|||||||
@@ -685,7 +685,7 @@ def find_runner(runner, id=None, main=True, **args):
|
|||||||
# run under perf?
|
# run under perf?
|
||||||
if args.get('perf'):
|
if args.get('perf'):
|
||||||
cmd[:0] = args['perf_script'] + list(filter(None, [
|
cmd[:0] = args['perf_script'] + list(filter(None, [
|
||||||
'-R',
|
'--record',
|
||||||
'--perf-freq=%s' % args['perf_freq']
|
'--perf-freq=%s' % args['perf_freq']
|
||||||
if args.get('perf_freq') else None,
|
if args.get('perf_freq') else None,
|
||||||
'--perf-period=%s' % args['perf_period']
|
'--perf-period=%s' % args['perf_period']
|
||||||
|
|||||||
Reference in New Issue
Block a user