2019-08-27 20:31:29 +02:00
|
|
|
#!/usr/bin/env python
|
2017-03-01 22:35:00 +01:00
|
|
|
|
|
|
|
from __future__ import print_function
|
|
|
|
|
2020-01-20 02:52:08 +01:00
|
|
|
import io
|
2017-03-01 22:35:00 +01:00
|
|
|
import yaml
|
|
|
|
# Try to use the C parser.
|
|
|
|
try:
|
|
|
|
from yaml import CLoader as Loader
|
|
|
|
except ImportError:
|
|
|
|
print("For faster parsing, you may want to install libYAML for PyYAML")
|
|
|
|
from yaml import Loader
|
|
|
|
|
2020-03-25 22:38:34 +01:00
|
|
|
import html
|
2017-06-29 20:56:25 +02:00
|
|
|
from collections import defaultdict
|
2017-07-17 20:00:41 +02:00
|
|
|
import fnmatch
|
2017-06-29 20:56:25 +02:00
|
|
|
import functools
|
|
|
|
from multiprocessing import Lock
|
2017-07-17 20:00:41 +02:00
|
|
|
import os, os.path
|
2017-03-01 22:35:00 +01:00
|
|
|
import subprocess
|
2017-08-11 19:56:57 +02:00
|
|
|
try:
|
|
|
|
# The previously builtin function `intern()` was moved
|
|
|
|
# to the `sys` module in Python 3.
|
|
|
|
from sys import intern
|
|
|
|
except:
|
|
|
|
pass
|
2017-03-01 22:35:00 +01:00
|
|
|
|
2019-02-06 19:43:37 +01:00
|
|
|
import re
|
|
|
|
|
2017-06-29 20:56:25 +02:00
|
|
|
import optpmap
|
|
|
|
|
2017-06-26 18:51:24 +02:00
|
|
|
try:
|
|
|
|
dict.iteritems
|
|
|
|
except AttributeError:
|
|
|
|
# Python 3
|
|
|
|
def itervalues(d):
|
|
|
|
return iter(d.values())
|
|
|
|
def iteritems(d):
|
|
|
|
return iter(d.items())
|
|
|
|
else:
|
|
|
|
# Python 2
|
|
|
|
def itervalues(d):
|
|
|
|
return d.itervalues()
|
|
|
|
def iteritems(d):
|
|
|
|
return d.iteritems()
|
|
|
|
|
|
|
|
|
2017-03-01 22:35:00 +01:00
|
|
|
def html_file_name(filename):
|
2017-07-18 21:25:34 +02:00
|
|
|
return filename.replace('/', '_').replace('#', '_') + ".html"
|
2017-03-01 22:35:00 +01:00
|
|
|
|
2017-06-26 18:51:24 +02:00
|
|
|
|
2017-03-01 22:35:00 +01:00
|
|
|
def make_link(File, Line):
|
2017-06-07 16:57:20 +02:00
|
|
|
return "\"{}#L{}\"".format(html_file_name(File), Line)
|
2017-03-01 22:35:00 +01:00
|
|
|
|
|
|
|
|
|
|
|
class Remark(yaml.YAMLObject):
|
|
|
|
# Work-around for http://pyyaml.org/ticket/154.
|
|
|
|
yaml_loader = Loader
|
|
|
|
|
2017-11-29 18:07:41 +01:00
|
|
|
default_demangler = 'c++filt -n'
|
|
|
|
demangler_proc = None
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def set_demangler(cls, demangler):
|
|
|
|
cls.demangler_proc = subprocess.Popen(demangler.split(), stdin=subprocess.PIPE, stdout=subprocess.PIPE)
|
|
|
|
cls.demangler_lock = Lock()
|
|
|
|
|
|
|
|
@classmethod
|
|
|
|
def demangle(cls, name):
|
|
|
|
with cls.demangler_lock:
|
|
|
|
cls.demangler_proc.stdin.write((name + '\n').encode('utf-8'))
|
|
|
|
cls.demangler_proc.stdin.flush()
|
|
|
|
return cls.demangler_proc.stdout.readline().rstrip().decode('utf-8')
|
|
|
|
|
2017-07-20 00:04:59 +02:00
|
|
|
# Intern all strings since we have lot of duplication across filenames,
|
|
|
|
# remark text.
|
|
|
|
#
|
|
|
|
# Change Args from a list of dicts to a tuple of tuples. This saves
|
|
|
|
# memory in two ways. One, a small tuple is significantly smaller than a
|
|
|
|
# small dict. Two, using tuple instead of list allows Args to be directly
|
|
|
|
# used as part of the key (in Python only immutable types are hashable).
|
|
|
|
def _reduce_memory(self):
|
2017-07-20 00:04:56 +02:00
|
|
|
self.Pass = intern(self.Pass)
|
|
|
|
self.Name = intern(self.Name)
|
2017-12-14 19:42:42 +01:00
|
|
|
try:
|
|
|
|
# Can't intern unicode strings.
|
|
|
|
self.Function = intern(self.Function)
|
|
|
|
except:
|
|
|
|
pass
|
2017-07-20 00:04:56 +02:00
|
|
|
|
2017-07-20 00:04:59 +02:00
|
|
|
def _reduce_memory_dict(old_dict):
|
2017-07-20 00:04:56 +02:00
|
|
|
new_dict = dict()
|
2017-08-11 20:02:07 +02:00
|
|
|
for (k, v) in iteritems(old_dict):
|
2017-07-20 00:04:56 +02:00
|
|
|
if type(k) is str:
|
|
|
|
k = intern(k)
|
|
|
|
|
|
|
|
if type(v) is str:
|
|
|
|
v = intern(v)
|
|
|
|
elif type(v) is dict:
|
2017-07-20 00:04:59 +02:00
|
|
|
# This handles [{'Caller': ..., 'DebugLoc': { 'File': ... }}]
|
|
|
|
v = _reduce_memory_dict(v)
|
2017-07-20 00:04:56 +02:00
|
|
|
new_dict[k] = v
|
2017-07-20 00:04:59 +02:00
|
|
|
return tuple(new_dict.items())
|
2017-07-20 00:04:56 +02:00
|
|
|
|
2017-07-20 00:04:59 +02:00
|
|
|
self.Args = tuple([_reduce_memory_dict(arg_dict) for arg_dict in self.Args])
|
|
|
|
|
|
|
|
# The inverse operation of the dictonary-related memory optimization in
|
|
|
|
# _reduce_memory_dict. E.g.
|
|
|
|
# (('DebugLoc', (('File', ...) ... ))) -> [{'DebugLoc': {'File': ...} ....}]
|
|
|
|
def recover_yaml_structure(self):
|
|
|
|
def tuple_to_dict(t):
|
|
|
|
d = dict()
|
|
|
|
for (k, v) in t:
|
|
|
|
if type(v) is tuple:
|
|
|
|
v = tuple_to_dict(v)
|
|
|
|
d[k] = v
|
|
|
|
return d
|
|
|
|
|
|
|
|
self.Args = [tuple_to_dict(arg_tuple) for arg_tuple in self.Args]
|
2017-07-20 00:04:56 +02:00
|
|
|
|
|
|
|
def canonicalize(self):
|
2017-03-02 18:00:53 +01:00
|
|
|
if not hasattr(self, 'Hotness'):
|
|
|
|
self.Hotness = 0
|
|
|
|
if not hasattr(self, 'Args'):
|
|
|
|
self.Args = []
|
2017-07-20 00:04:59 +02:00
|
|
|
self._reduce_memory()
|
2017-03-01 22:35:00 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def File(self):
|
|
|
|
return self.DebugLoc['File']
|
|
|
|
|
|
|
|
@property
|
|
|
|
def Line(self):
|
|
|
|
return int(self.DebugLoc['Line'])
|
|
|
|
|
|
|
|
@property
|
|
|
|
def Column(self):
|
|
|
|
return self.DebugLoc['Column']
|
|
|
|
|
|
|
|
@property
|
|
|
|
def DebugLocString(self):
|
|
|
|
return "{}:{}:{}".format(self.File, self.Line, self.Column)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def DemangledFunctionName(self):
|
2017-11-29 18:07:41 +01:00
|
|
|
return self.demangle(self.Function)
|
2017-03-01 22:35:00 +01:00
|
|
|
|
|
|
|
@property
|
|
|
|
def Link(self):
|
|
|
|
return make_link(self.File, self.Line)
|
|
|
|
|
|
|
|
def getArgString(self, mapping):
|
2017-07-20 00:04:59 +02:00
|
|
|
mapping = dict(list(mapping))
|
2017-03-01 22:35:00 +01:00
|
|
|
dl = mapping.get('DebugLoc')
|
|
|
|
if dl:
|
|
|
|
del mapping['DebugLoc']
|
|
|
|
|
|
|
|
assert(len(mapping) == 1)
|
2017-08-14 06:16:43 +02:00
|
|
|
(key, value) = list(mapping.items())[0]
|
2017-03-01 22:35:00 +01:00
|
|
|
|
2017-12-20 20:08:12 +01:00
|
|
|
if key == 'Caller' or key == 'Callee' or key == 'DirectCallee':
|
2020-03-25 22:38:34 +01:00
|
|
|
value = html.escape(self.demangle(value))
|
2017-03-01 22:35:00 +01:00
|
|
|
|
|
|
|
if dl and key != 'Caller':
|
2017-07-20 00:04:59 +02:00
|
|
|
dl_dict = dict(list(dl))
|
2017-12-14 19:42:42 +01:00
|
|
|
return u"<a href={}>{}</a>".format(
|
2017-07-20 00:04:59 +02:00
|
|
|
make_link(dl_dict['File'], dl_dict['Line']), value)
|
2017-03-01 22:35:00 +01:00
|
|
|
else:
|
|
|
|
return value
|
|
|
|
|
2017-12-06 17:50:50 +01:00
|
|
|
# Return a cached dictionary for the arguments. The key for each entry is
|
|
|
|
# the argument key (e.g. 'Callee' for inlining remarks. The value is a
|
|
|
|
# list containing the value (e.g. for 'Callee' the function) and
|
|
|
|
# optionally a DebugLoc.
|
|
|
|
def getArgDict(self):
|
|
|
|
if hasattr(self, 'ArgDict'):
|
|
|
|
return self.ArgDict
|
|
|
|
self.ArgDict = {}
|
|
|
|
for arg in self.Args:
|
|
|
|
if len(arg) == 2:
|
|
|
|
if arg[0][0] == 'DebugLoc':
|
|
|
|
dbgidx = 0
|
|
|
|
else:
|
|
|
|
assert(arg[1][0] == 'DebugLoc')
|
|
|
|
dbgidx = 1
|
|
|
|
|
|
|
|
key = arg[1 - dbgidx][0]
|
|
|
|
entry = (arg[1 - dbgidx][1], arg[dbgidx][1])
|
|
|
|
else:
|
|
|
|
arg = arg[0]
|
|
|
|
key = arg[0]
|
|
|
|
entry = (arg[1], )
|
|
|
|
|
|
|
|
self.ArgDict[key] = entry
|
|
|
|
return self.ArgDict
|
|
|
|
|
2017-03-02 18:00:59 +01:00
|
|
|
def getDiffPrefix(self):
|
|
|
|
if hasattr(self, 'Added'):
|
|
|
|
if self.Added:
|
|
|
|
return '+'
|
|
|
|
else:
|
|
|
|
return '-'
|
|
|
|
return ''
|
|
|
|
|
|
|
|
@property
|
|
|
|
def PassWithDiffPrefix(self):
|
|
|
|
return self.getDiffPrefix() + self.Pass
|
|
|
|
|
2017-03-01 22:35:00 +01:00
|
|
|
@property
|
|
|
|
def message(self):
|
|
|
|
# Args is a list of mappings (dictionaries)
|
|
|
|
values = [self.getArgString(mapping) for mapping in self.Args]
|
|
|
|
return "".join(values)
|
|
|
|
|
|
|
|
@property
|
|
|
|
def RelativeHotness(self):
|
|
|
|
if self.max_hotness:
|
2017-09-29 18:56:54 +02:00
|
|
|
return "{0:.2f}%".format(self.Hotness * 100. / self.max_hotness)
|
2017-03-01 22:35:00 +01:00
|
|
|
else:
|
|
|
|
return ''
|
|
|
|
|
|
|
|
@property
|
|
|
|
def key(self):
|
2017-07-20 00:04:59 +02:00
|
|
|
return (self.__class__, self.PassWithDiffPrefix, self.Name, self.File,
|
|
|
|
self.Line, self.Column, self.Function, self.Args)
|
2017-03-02 18:00:56 +01:00
|
|
|
|
|
|
|
def __hash__(self):
|
|
|
|
return hash(self.key)
|
|
|
|
|
|
|
|
def __eq__(self, other):
|
|
|
|
return self.key == other.key
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return str(self.key)
|
2017-03-01 22:35:00 +01:00
|
|
|
|
|
|
|
|
|
|
|
class Analysis(Remark):
|
|
|
|
yaml_tag = '!Analysis'
|
|
|
|
|
|
|
|
@property
|
|
|
|
def color(self):
|
|
|
|
return "white"
|
|
|
|
|
|
|
|
|
|
|
|
class AnalysisFPCommute(Analysis):
|
|
|
|
yaml_tag = '!AnalysisFPCommute'
|
|
|
|
|
|
|
|
|
|
|
|
class AnalysisAliasing(Analysis):
|
|
|
|
yaml_tag = '!AnalysisAliasing'
|
|
|
|
|
|
|
|
|
|
|
|
class Passed(Remark):
|
|
|
|
yaml_tag = '!Passed'
|
|
|
|
|
|
|
|
@property
|
|
|
|
def color(self):
|
|
|
|
return "green"
|
|
|
|
|
|
|
|
|
|
|
|
class Missed(Remark):
|
|
|
|
yaml_tag = '!Missed'
|
|
|
|
|
|
|
|
@property
|
|
|
|
def color(self):
|
|
|
|
return "red"
|
|
|
|
|
2019-03-27 19:35:04 +01:00
|
|
|
class Failure(Missed):
|
|
|
|
yaml_tag = '!Failure'
|
2017-03-01 22:35:00 +01:00
|
|
|
|
2019-03-27 19:14:32 +01:00
|
|
|
def get_remarks(input_file, filter_=None):
|
2017-03-01 22:35:00 +01:00
|
|
|
max_hotness = 0
|
|
|
|
all_remarks = dict()
|
|
|
|
file_remarks = defaultdict(functools.partial(defaultdict, list))
|
|
|
|
|
2020-01-20 02:52:08 +01:00
|
|
|
with io.open(input_file, encoding = 'utf-8') as f:
|
2017-03-01 22:35:00 +01:00
|
|
|
docs = yaml.load_all(f, Loader=Loader)
|
2019-02-06 19:43:37 +01:00
|
|
|
|
2019-03-27 19:14:32 +01:00
|
|
|
filter_e = None
|
|
|
|
if filter_:
|
|
|
|
filter_e = re.compile(filter_)
|
2017-03-01 22:35:00 +01:00
|
|
|
for remark in docs:
|
2017-07-20 00:04:56 +02:00
|
|
|
remark.canonicalize()
|
2017-03-01 22:35:00 +01:00
|
|
|
# Avoid remarks withoug debug location or if they are duplicated
|
|
|
|
if not hasattr(remark, 'DebugLoc') or remark.key in all_remarks:
|
|
|
|
continue
|
2019-02-06 19:43:37 +01:00
|
|
|
|
2019-03-27 19:14:32 +01:00
|
|
|
if filter_e and not filter_e.search(remark.Pass):
|
2019-02-06 19:43:37 +01:00
|
|
|
continue
|
|
|
|
|
2017-03-01 22:35:00 +01:00
|
|
|
all_remarks[remark.key] = remark
|
|
|
|
|
|
|
|
file_remarks[remark.File][remark.Line].append(remark)
|
|
|
|
|
2017-03-02 18:00:59 +01:00
|
|
|
# If we're reading a back a diff yaml file, max_hotness is already
|
|
|
|
# captured which may actually be less than the max hotness found
|
|
|
|
# in the file.
|
|
|
|
if hasattr(remark, 'max_hotness'):
|
|
|
|
max_hotness = remark.max_hotness
|
2017-03-01 22:35:00 +01:00
|
|
|
max_hotness = max(max_hotness, remark.Hotness)
|
|
|
|
|
|
|
|
return max_hotness, all_remarks, file_remarks
|
|
|
|
|
|
|
|
|
2019-03-27 19:14:32 +01:00
|
|
|
def gather_results(filenames, num_jobs, should_print_progress, filter_=None):
|
2017-06-29 20:56:25 +02:00
|
|
|
if should_print_progress:
|
|
|
|
print('Reading YAML files...')
|
2017-11-29 18:07:41 +01:00
|
|
|
if not Remark.demangler_proc:
|
|
|
|
Remark.set_demangler(Remark.default_demangler)
|
2017-06-29 20:56:25 +02:00
|
|
|
remarks = optpmap.pmap(
|
2019-02-06 19:43:37 +01:00
|
|
|
get_remarks, filenames, num_jobs, should_print_progress, filter_)
|
2017-03-01 22:35:00 +01:00
|
|
|
max_hotness = max(entry[0] for entry in remarks)
|
|
|
|
|
|
|
|
def merge_file_remarks(file_remarks_job, all_remarks, merged):
|
2017-06-26 18:51:24 +02:00
|
|
|
for filename, d in iteritems(file_remarks_job):
|
|
|
|
for line, remarks in iteritems(d):
|
2017-03-01 22:35:00 +01:00
|
|
|
for remark in remarks:
|
|
|
|
# Bring max_hotness into the remarks so that
|
|
|
|
# RelativeHotness does not depend on an external global.
|
|
|
|
remark.max_hotness = max_hotness
|
|
|
|
if remark.key not in all_remarks:
|
|
|
|
merged[filename][line].append(remark)
|
|
|
|
|
|
|
|
all_remarks = dict()
|
|
|
|
file_remarks = defaultdict(functools.partial(defaultdict, list))
|
|
|
|
for _, all_remarks_job, file_remarks_job in remarks:
|
|
|
|
merge_file_remarks(file_remarks_job, all_remarks, file_remarks)
|
|
|
|
all_remarks.update(all_remarks_job)
|
|
|
|
|
|
|
|
return all_remarks, file_remarks, max_hotness != 0
|
2017-07-17 20:00:41 +02:00
|
|
|
|
|
|
|
|
2017-09-29 07:20:53 +02:00
|
|
|
def find_opt_files(*dirs_or_files):
|
2017-07-17 20:00:41 +02:00
|
|
|
all = []
|
|
|
|
for dir_or_file in dirs_or_files:
|
|
|
|
if os.path.isfile(dir_or_file):
|
|
|
|
all.append(dir_or_file)
|
|
|
|
else:
|
|
|
|
for dir, subdirs, files in os.walk(dir_or_file):
|
|
|
|
# Exclude mounted directories and symlinks (os.walk default).
|
|
|
|
subdirs[:] = [d for d in subdirs
|
|
|
|
if not os.path.ismount(os.path.join(dir, d))]
|
|
|
|
for file in files:
|
2018-02-26 22:15:49 +01:00
|
|
|
if fnmatch.fnmatch(file, "*.opt.yaml*"):
|
2017-07-17 20:00:41 +02:00
|
|
|
all.append(os.path.join(dir, file))
|
|
|
|
return all
|